gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2016 The Bazel Authors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.idea.blaze.android.run.test; import com.android.ddmlib.testrunner.RemoteAndroidTestRunner; import com.android.tools.idea.run.ApkProvisionException; import com.android.tools.idea.run.ApplicationIdProvider; import com.android.tools.idea.run.ConsolePrinter; import com.android.tools.idea.run.tasks.LaunchContext; import com.android.tools.idea.run.tasks.LaunchResult; import com.android.tools.idea.run.tasks.LaunchTask; import com.android.tools.idea.run.util.LaunchStatus; import com.android.tools.idea.testartifacts.instrumented.AndroidTestListener; import com.google.common.collect.ImmutableList; import com.google.idea.blaze.android.manifest.ManifestParser; import com.google.idea.blaze.android.run.deployinfo.BlazeAndroidDeployInfo; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.text.StringUtil; import java.util.List; import org.jetbrains.annotations.NotNull; class StockAndroidTestLaunchTask implements LaunchTask { private static final String ID = "STOCK_ANDROID_TEST"; private static final Logger LOG = Logger.getInstance(StockAndroidTestLaunchTask.class); private final BlazeAndroidTestRunConfigurationState configState; private final String instrumentationTestRunner; private final String testApplicationId; private final boolean waitForDebugger; StockAndroidTestLaunchTask( BlazeAndroidTestRunConfigurationState configState, String runner, String testPackage, boolean waitForDebugger) { this.configState = configState; this.instrumentationTestRunner = runner; this.waitForDebugger = waitForDebugger; this.testApplicationId = testPackage; } public static LaunchTask getStockTestLaunchTask( BlazeAndroidTestRunConfigurationState configState, ApplicationIdProvider applicationIdProvider, boolean waitForDebugger, BlazeAndroidDeployInfo deployInfo, LaunchStatus launchStatus) { String testPackage; try { testPackage = applicationIdProvider.getTestPackageName(); if (testPackage == null) { launchStatus.terminateLaunch("Unable to determine test package name", true); return null; } } catch (ApkProvisionException e) { launchStatus.terminateLaunch("Unable to determine test package name", true); return null; } List<String> availableRunners = getRunnersFromManifest(deployInfo); if (availableRunners.isEmpty()) { launchStatus.terminateLaunch( String.format( "No instrumentation test runner is defined in the manifest.\n" + "At least one instrumentation tag must be defined for the\n" + "\"%1$s\" package in the AndroidManifest.xml, e.g.:\n" + "\n" + "<manifest\n" + " package=\"%1$s\"\n" + " xmlns:android=\"http://schemas.android.com/apk/res/android\">\n" + "\n" + " <instrumentation\n" + " android:name=\"androidx.test.runner.AndroidJUnitRunner\"\n" + " android:targetPackage=\"%1$s\">\n" + " </instrumentation>\n" + "\n" + "</manifest>", testPackage), true); // Note: Gradle users will never see the above message, so don't mention Gradle here. // Even if no runners are defined in build.gradle, Gradle will add a default to the manifest. return null; } String runner = configState.getInstrumentationRunnerClass(); if (!StringUtil.isEmpty(runner)) { if (!availableRunners.contains(runner)) { launchStatus.terminateLaunch( String.format( "Instrumentation test runner \"%2$s\"\n" + "is not defined for the \"%1$s\" package in the manifest.\n" + "Clear the 'Specific instrumentation runner' field in your configuration\n" + "to default to \"%3$s\",\n" + "or add the runner to your AndroidManifest.xml:\n" + "\n" + "<manifest\n" + " package=\"%1$s\"\n" + " xmlns:android=\"http://schemas.android.com/apk/res/android\">\n" + "\n" + " <instrumentation\n" + " android:name=\"%2$s\"\n" + " android:targetPackage=\"%1$s\">\n" + " </instrumentation>\n" + "\n" + "</manifest>", testPackage, runner, availableRunners.get(0)), true); return null; } } else { // Default to the first available runner. runner = availableRunners.get(0); } return new StockAndroidTestLaunchTask(configState, runner, testPackage, waitForDebugger); } private static ImmutableList<String> getRunnersFromManifest( final BlazeAndroidDeployInfo deployInfo) { if (!ApplicationManager.getApplication().isReadAccessAllowed()) { return ApplicationManager.getApplication() .runReadAction( (Computable<ImmutableList<String>>) () -> getRunnersFromManifest(deployInfo)); } ManifestParser.ParsedManifest parsedManifest = deployInfo.getMergedManifest(); if (parsedManifest != null) { return ImmutableList.copyOf(parsedManifest.instrumentationClassNames); } return ImmutableList.of(); } @Override public String getDescription() { return "Launching instrumentation runner"; } @Override public int getDuration() { return 2; } @Override public LaunchResult run(@NotNull LaunchContext launchContext) { ConsolePrinter printer = launchContext.getConsolePrinter(); printer.stdout("Running tests\n"); final RemoteAndroidTestRunner runner = new RemoteAndroidTestRunner( testApplicationId, instrumentationTestRunner, launchContext.getDevice()); switch (configState.getTestingType()) { case BlazeAndroidTestRunConfigurationState.TEST_ALL_IN_MODULE: break; case BlazeAndroidTestRunConfigurationState.TEST_ALL_IN_PACKAGE: runner.setTestPackageName(configState.getPackageName()); break; case BlazeAndroidTestRunConfigurationState.TEST_CLASS: runner.setClassName(configState.getClassName()); break; case BlazeAndroidTestRunConfigurationState.TEST_METHOD: runner.setMethodName(configState.getClassName(), configState.getMethodName()); break; default: LOG.error(String.format("Unrecognized testing type: %d", configState.getTestingType())); return LaunchResult.error("", getDescription()); } runner.setDebug(waitForDebugger); runner.setRunOptions(configState.getExtraOptions()); printer.stdout("$ adb shell " + runner.getAmInstrumentCommand()); // run in a separate thread as this will block until the tests complete ApplicationManager.getApplication() .executeOnPooledThread( () -> { try { runner.run(new AndroidTestListener(printer)); } catch (Exception e) { LOG.info(e); printer.stderr("Error: Unexpected exception while running tests: " + e); } }); return LaunchResult.success(); } @NotNull @Override public String getId() { return ID; } }
/* * Copyright 2009-2015 DigitalGlobe, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. */ package org.mrgeo.cmd.ingest; import junit.framework.Assert; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.junit.*; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mrgeo.core.Defs; import org.mrgeo.core.MrGeoConstants; import org.mrgeo.core.MrGeoProperties; import org.mrgeo.image.MrsImage; import org.mrgeo.image.MrsImagePyramid; import org.mrgeo.image.MrsImagePyramidMetadata; import org.mrgeo.rasterops.OpImageRegistrar; import org.mrgeo.hdfs.utils.HadoopFileUtils; import org.mrgeo.junit.IntegrationTest; import org.mrgeo.test.TestUtils; import org.mrgeo.utils.HadoopUtils; import org.mrgeo.utils.LongRectangle; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.PrintStream; import java.util.Properties; /** * @author jason.surratt * */ @SuppressWarnings("static-method") public class IngestImageTest { @Rule public TestName testname = new TestName(); private static TestUtils testUtils; private static String input; private static Path inputHdfs; private static Path outputHdfs; private final static String all_ones = "all-ones"; private static String all_ones_input = Defs.INPUT + all_ones + ".tif"; private static String all_ones_output; private final static String aster_sample = "AsterSample"; private static Configuration conf; private Properties providerProperties; private static String origProtectionLevelRequired; private static String origProtectionLevelDefault; private static String origProtectionLevel; @BeforeClass public static void init() throws IOException { Properties props = MrGeoProperties.getInstance(); origProtectionLevelRequired = props.getProperty(MrGeoConstants.MRGEO_PROTECTION_LEVEL_REQUIRED); origProtectionLevelDefault = props.getProperty(MrGeoConstants.MRGEO_PROTECTION_LEVEL_DEFAULT); origProtectionLevel = props.getProperty(MrGeoConstants.MRGEO_PROTECTION_LEVEL); conf = HadoopUtils.createConfiguration(); testUtils = new TestUtils(IngestImageTest.class); input = testUtils.getInputLocal(); inputHdfs = testUtils.getInputHdfs(); outputHdfs = testUtils.getOutputHdfs(); HadoopFileUtils.delete(inputHdfs); // copy test files up to HDFS //HadoopFileUtils.copyToHdfs(input, inputHdfs, "greece.tif"); HadoopFileUtils.copyToHdfs(input, inputHdfs, aster_sample); File file = new File(all_ones_input); all_ones_input = "file://" + file.getAbsolutePath(); all_ones_output = new Path(outputHdfs, all_ones).toString(); } @After public void teardown() { // Restore MrGeoProperties Properties props = MrGeoProperties.getInstance(); if (origProtectionLevelRequired == null) { props.remove(MrGeoConstants.MRGEO_PROTECTION_LEVEL_REQUIRED); } else { props.setProperty( MrGeoConstants.MRGEO_PROTECTION_LEVEL_REQUIRED, origProtectionLevelRequired); } if (origProtectionLevelDefault == null) { props.remove(MrGeoConstants.MRGEO_PROTECTION_LEVEL_DEFAULT); } else { props.setProperty(MrGeoConstants.MRGEO_PROTECTION_LEVEL_DEFAULT, origProtectionLevelDefault); } if (origProtectionLevel == null) { props.remove(MrGeoConstants.MRGEO_PROTECTION_LEVEL); } else { props.setProperty(MrGeoConstants.MRGEO_PROTECTION_LEVEL, origProtectionLevel); } } @Before public void setUp() { providerProperties = null; OpImageRegistrar.registerMrGeoOps(); // tack on the test name to the output all_ones_output = new Path(outputHdfs, testname.getMethodName()).toString(); } @Test @Category(IntegrationTest.class) public void ingestSimple() throws Exception { String[] args = { all_ones_input, "-l","-o", all_ones_output }; int res = new IngestImage().run(args, conf, providerProperties); Assert.assertEquals("IngestImage command exited with error", 0, res); // now look at the files built. We really not interested in the actual data, just that // things were build. (this is testing the command, not the algorithms) MrsImagePyramid pyramid = MrsImagePyramid.open(all_ones_output, providerProperties); Assert.assertNotNull("MrsImagePyramid not loaded", pyramid); MrsImagePyramidMetadata metadata = pyramid.getMetadata(); Assert.assertNotNull("MrsImagePyramid metadata not loaded", metadata); Assert.assertEquals("", metadata.getProtectionLevel()); Assert.assertEquals("Wrong number of levels", 10, metadata.getMaxZoomLevel()); for (int level = metadata.getMaxZoomLevel(); level >= 1; level--) { MrsImage image = pyramid.getImage(level); Assert.assertNotNull("MrsImage image missing for level " + level, image); image.close(); } // check that we ingested the right number of tiles - in particular, that our maxTx/maxTy // is inclusive LongRectangle tb = metadata.getTileBounds(metadata.getMaxZoomLevel()); long numTiles = (tb.getMaxX() - tb.getMinX() + 1) * (tb.getMaxY() - tb.getMinY() + 1); Assert.assertEquals("Wrong number of tiles", 12L, numTiles); } @Test @Category(IntegrationTest.class) public void ingestSimpleWithDefaultProtectionLevel() throws Exception { String protectionLevel = "public"; Properties props = MrGeoProperties.getInstance(); props.setProperty( MrGeoConstants.MRGEO_PROTECTION_LEVEL_REQUIRED, "true"); props.setProperty(MrGeoConstants.MRGEO_PROTECTION_LEVEL_DEFAULT, protectionLevel); props.setProperty(MrGeoConstants.MRGEO_PROTECTION_LEVEL, ""); String[] args = { all_ones_input, "-l","-o", all_ones_output }; int res = new IngestImage().run(args, conf, providerProperties); Assert.assertEquals("IngestImage command exited with error", 0, res); // now look at the files built. We really not interested in the actual data, just that // things were build. (this is testing the command, not the algorithms) MrsImagePyramid pyramid = MrsImagePyramid.open(all_ones_output, providerProperties); Assert.assertNotNull("MrsImagePyramid not loaded", pyramid); MrsImagePyramidMetadata metadata = pyramid.getMetadata(); Assert.assertNotNull("MrsImagePyramid metadata not loaded", metadata); Assert.assertEquals(protectionLevel, metadata.getProtectionLevel()); Assert.assertEquals("Wrong number of levels", 10, metadata.getMaxZoomLevel()); for (int level = metadata.getMaxZoomLevel(); level >= 1; level--) { MrsImage image = pyramid.getImage(level); Assert.assertNotNull("MrsImage image missing for level " + level, image); image.close(); } // check that we ingested the right number of tiles - in particular, that our maxTx/maxTy // is inclusive LongRectangle tb = metadata.getTileBounds(metadata.getMaxZoomLevel()); long numTiles = (tb.getMaxX() - tb.getMinX() + 1) * (tb.getMaxY() - tb.getMinY() + 1); Assert.assertEquals("Wrong number of tiles", 12L, numTiles); } @Test @Category(IntegrationTest.class) public void ingestSimpleWithProtectionLevel() throws Exception { String protectionLevel = "private"; Properties props = MrGeoProperties.getInstance(); props.setProperty( MrGeoConstants.MRGEO_PROTECTION_LEVEL_REQUIRED, "true"); props.setProperty(MrGeoConstants.MRGEO_PROTECTION_LEVEL_DEFAULT, "public"); props.setProperty(MrGeoConstants.MRGEO_PROTECTION_LEVEL, ""); String[] args = { all_ones_input, "-l","-o", all_ones_output, "-pl", protectionLevel }; int res = new IngestImage().run(args, conf, providerProperties); Assert.assertEquals("IngestImage command exited with error", 0, res); // now look at the files built. We really not interested in the actual data, just that // things were build. (this is testing the command, not the algorithms) MrsImagePyramid pyramid = MrsImagePyramid.open(all_ones_output, providerProperties); Assert.assertNotNull("MrsImagePyramid not loaded", pyramid); MrsImagePyramidMetadata metadata = pyramid.getMetadata(); Assert.assertNotNull("MrsImagePyramid metadata not loaded", metadata); Assert.assertEquals(protectionLevel, metadata.getProtectionLevel()); Assert.assertEquals("Wrong number of levels", 10, metadata.getMaxZoomLevel()); for (int level = metadata.getMaxZoomLevel(); level >= 1; level--) { MrsImage image = pyramid.getImage(level); Assert.assertNotNull("MrsImage image missing for level " + level, image); image.close(); } // check that we ingested the right number of tiles - in particular, that our maxTx/maxTy // is inclusive LongRectangle tb = metadata.getTileBounds(metadata.getMaxZoomLevel()); long numTiles = (tb.getMaxX() - tb.getMinX() + 1) * (tb.getMaxY() - tb.getMinY() + 1); Assert.assertEquals("Wrong number of tiles", 12L, numTiles); } @Test @Category(IntegrationTest.class) public void ingestSkipPyramids() throws Exception { String[] args = { all_ones_input, "-o", all_ones_output, "-sp" }; int res= new IngestImage().run(args, conf, providerProperties); Assert.assertEquals("IngestImage command exited with error", 0, res); // now look at the files built. We really not interested in the actual data, just that // things were build. (this is testing the command, not the algorithms) MrsImagePyramid pyramid = MrsImagePyramid.open(all_ones_output, providerProperties); Assert.assertNotNull("MrsImagePyramid not loaded", pyramid); MrsImagePyramidMetadata metadata = pyramid.getMetadata(); Assert.assertNotNull("MrsImagePyramid metadata not loaded", metadata); Assert.assertEquals("Wrong max zoom level", 10, metadata.getMaxZoomLevel()); MrsImage image = pyramid.getImage(metadata.getMaxZoomLevel()); Assert.assertNotNull("MrsImage image missing for level " + metadata.getMaxZoomLevel(), image); image.close(); for (int level = metadata.getMaxZoomLevel() - 1; level >= 1; level--) { image = pyramid.getImage(level); Assert.assertNull("MrsImage found for level " + level, image); } } @Test @Category(IntegrationTest.class) public void ingestMissingDefaultProtectionLevel() throws Exception { ByteArrayOutputStream outContent = new ByteArrayOutputStream(); PrintStream saveOut = System.out; System.setOut(new PrintStream(outContent)); Properties props = MrGeoProperties.getInstance(); props.setProperty(MrGeoConstants.MRGEO_PROTECTION_LEVEL_REQUIRED, "true"); props.setProperty(MrGeoConstants.MRGEO_PROTECTION_LEVEL_DEFAULT, ""); props.setProperty(MrGeoConstants.MRGEO_PROTECTION_LEVEL, ""); String[] args = { all_ones_input, "-o", all_ones_output, "-sp" }; int res = new IngestImage().run(args, conf, providerProperties); Assert.assertEquals(-1, res); Assert.assertTrue("Unexpected output: " + outContent.toString(), outContent.toString().contains("Missing required option: pl")); } // ignored because it _always_ times out during the 0.20.2 integration tests... @Ignore @Test(timeout=250000) @Category(IntegrationTest.class) public void ingestPerformanceRegression() throws Exception { String inputAster = new Path(inputHdfs, aster_sample).toString(); String outputAster = new Path(outputHdfs, aster_sample).toString(); String[] args = { inputAster, "-o", outputAster, "-sp", "-l" }; int res= new IngestImage().run(args, conf, providerProperties); Assert.assertEquals("IngestImageOld command exited with error", 0, res); MrsImagePyramid pyramid = MrsImagePyramid.open(outputAster, providerProperties); Assert.assertNotNull("MrsImagePyramid not loaded", pyramid); MrsImagePyramidMetadata metadata = pyramid.getMetadata(); Assert.assertNotNull("MrsImagePyramid metadata not loaded", metadata); Assert.assertEquals("Wrong max zoom level", 12, metadata.getMaxZoomLevel()); MrsImage image = pyramid.getImage(metadata.getMaxZoomLevel()); Assert.assertNotNull("MrsImage image missing for level " + metadata.getMaxZoomLevel(), image); image.close(); for (int level = metadata.getMaxZoomLevel() - 1; level >= 1; level--) { image = pyramid.getImage(level); Assert.assertNull("MrsImage found for level " + level, image); } } }
/* * $Id$ * This file is a part of the Arakhne Foundation Classes, http://www.arakhne.org/afc * * Copyright (c) 2000-2012 Stephane GALLAND. * Copyright (c) 2005-10, Multiagent Team, Laboratoire Systemes et Transports, * Universite de Technologie de Belfort-Montbeliard. * Copyright (c) 2013-2016 The original authors, and other authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.arakhne.afc.math.geometry.d3.ifx; import java.util.ArrayList; import java.util.Iterator; import javafx.beans.binding.Bindings; import javafx.beans.property.BooleanProperty; import javafx.beans.property.ObjectProperty; import javafx.beans.property.ReadOnlyBooleanWrapper; import javafx.beans.property.ReadOnlyListProperty; import javafx.beans.property.ReadOnlyListWrapper; import javafx.beans.property.ReadOnlyObjectWrapper; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleObjectProperty; import javafx.collections.FXCollections; import org.eclipse.xtext.xbase.lib.Pure; import org.arakhne.afc.math.MathConstants; import org.arakhne.afc.math.geometry.MathFXAttributeNames; import org.arakhne.afc.math.geometry.PathElementType; import org.arakhne.afc.math.geometry.PathWindingRule; import org.arakhne.afc.math.geometry.d3.Point3D; import org.arakhne.afc.math.geometry.d3.Transform3D; import org.arakhne.afc.math.geometry.d3.ai.Path3ai; import org.arakhne.afc.math.geometry.d3.ai.PathIterator3ai; import org.arakhne.afc.vmutil.asserts.AssertMessages; import org.arakhne.afc.vmutil.locale.Locale; /** Path with 3 integer FX properties. * * @author $Author: sgalland$ * @author $Author: tpiotrow$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ * @since 13.0 */ public class Path3ifx extends AbstractShape3ifx<Path3ifx> implements Path3ai<Shape3ifx<?>, Path3ifx, PathElement3ifx, Point3ifx, Vector3ifx, RectangularPrism3ifx> { private static final long serialVersionUID = -5410743023218999966L; /** Array of types. */ private ReadOnlyListWrapper<PathElementType> types; /** Array of coords. */ private ReadOnlyListWrapper<Point3ifx> coords; /** Winding rule for the path. */ private ObjectProperty<PathWindingRule> windingRule; /** Indicates if the path is empty. * The path is empty when there is no point inside, or * all the points are at the same coordinate, or * when the path does not represents a drawable path * (a path with a line or a curve). */ private BooleanProperty isEmpty; /** Indicates if the path is a polyline. */ private BooleanProperty isPolyline; /** Indicates if the path is curved. */ private BooleanProperty isCurved; /** Indicates if the path is a polygon. */ private BooleanProperty isPolygon; /** Indicates if the path is multipart. */ private BooleanProperty isMultipart; /** Buffer for the bounds of the path that corresponds * to all the points added in the path. */ private ObjectProperty<RectangularPrism3ifx> logicalBounds; /** Construct an empty path. */ public Path3ifx() { this(DEFAULT_WINDING_RULE); } /** Construct a path by copying the given elements. * @param iterator the iterator that provides the elements to copy. */ public Path3ifx(Iterator<PathElement3ifx> iterator) { this(DEFAULT_WINDING_RULE, iterator); } /** Construct an empty path with the given path winding rule. * @param windingRule the path winding rule. */ public Path3ifx(PathWindingRule windingRule) { assert windingRule != null : AssertMessages.notNullParameter(0); if (windingRule != DEFAULT_WINDING_RULE) { windingRuleProperty().set(windingRule); } } /** Construct a path by copying the given elements, and the given path winding rule. * @param windingRule the path winding rule. * @param iterator the iterator that provides the elements to copy. */ public Path3ifx(PathWindingRule windingRule, Iterator<PathElement3ifx> iterator) { assert windingRule != null : AssertMessages.notNullParameter(0); assert iterator != null : AssertMessages.notNullParameter(1); if (windingRule != DEFAULT_WINDING_RULE) { windingRuleProperty().set(windingRule); } add(iterator); } /** Constructor by copy. * @param path the path to copy. */ public Path3ifx(Path3ai<?, ?, ?, ?, ?, ?> path) { set(path); } @Pure @Override public boolean containsControlPoint(Point3D<?, ?> pt) { assert pt != null : AssertMessages.notNullParameter(); if (this.coords != null && !this.coords.isEmpty()) { for (int i = 0; i < this.coords.size(); i++) { final Point3ifx point = this.coords.get(i); if (point.ix() == pt.ix() && point.iy() == pt.iy() && point.iz() == pt.iz()) { return true; } } } return false; } @Override public void clear() { if (this.types != null) { this.types.clear(); } if (this.coords != null) { this.coords.clear(); } } @Pure @Override public Path3ifx clone() { final Path3ifx clone = super.clone(); clone.coords = null; if (this.coords != null && !this.coords.isEmpty()) { clone.innerCoordinatesProperty().addAll(this.coords); } clone.types = null; if (this.types != null && !this.types.isEmpty()) { clone.innerTypesProperty().addAll(this.types); } clone.windingRule = null; if (this.windingRule != null) { clone.windingRuleProperty().set(this.windingRule.get()); } clone.boundingBox = null; clone.logicalBounds = null; clone.isCurved = null; clone.isMultipart = null; clone.isPolyline = null; clone.isPolygon = null; clone.isEmpty = null; return clone; } @Pure @Override public int hashCode() { int bits = 1; bits = 31 * bits + ((this.coords == null) ? 0 : this.coords.hashCode()); bits = 31 * bits + ((this.types == null) ? 0 : this.types.hashCode()); bits = 31 * bits + ((this.windingRule == null) ? 0 : this.windingRule.hashCode()); return bits ^ (bits >> 31); } @Override public void translate(int dx, int dy, int dz) { for (final Point3ifx point : this.coords) { point.add(dx, dy, dz); } } @Override public void transform(Transform3D transform) { assert transform != null : AssertMessages.notNullParameter(); for (final Point3ifx point : this.coords) { transform.transform(point); } } /** Replies the isEmpty property. * * @return the isEmpty property. */ public BooleanProperty isEmptyProperty() { if (this.isEmpty == null) { this.isEmpty = new SimpleBooleanProperty(this, MathFXAttributeNames.IS_EMPTY); this.isEmpty.bind(Bindings.createBooleanBinding(() -> { final PathIterator3ai<PathElement3ifx> pi = getPathIterator(); PathElement3ifx pe; while (pi.hasNext()) { pe = pi.next(); if (pe.isDrawable()) { return false; } } return true; }, innerTypesProperty(), innerCoordinatesProperty())); } return this.isEmpty; } @Override public boolean isEmpty() { return isEmptyProperty().get(); } @Override public ObjectProperty<RectangularPrism3ifx> boundingBoxProperty() { if (this.boundingBox == null) { this.boundingBox = new ReadOnlyObjectWrapper<>(this, MathFXAttributeNames.BOUNDING_BOX); this.boundingBox.bind(Bindings.createObjectBinding(() -> { final RectangularPrism3ifx bb = getGeomFactory().newBox(); Path3ai.computeDrawableElementBoundingBox( getPathIterator(MathConstants.SPLINE_APPROXIMATION_RATIO), bb); return bb; }, innerCoordinatesProperty())); } return this.boundingBox; } /** Replies the property that corresponds to the bounding box of the control points. * * <p>The replied box is not the one corresponding to the drawable elements, as replied * by {@link #boundingBoxProperty()}. * * @return the bounding box of the control points. */ public ObjectProperty<RectangularPrism3ifx> controlPointBoundingBoxProperty() { if (this.logicalBounds == null) { this.logicalBounds = new ReadOnlyObjectWrapper<>(this, MathFXAttributeNames.CONTROL_POINT_BOUNDING_BOX); this.logicalBounds.bind(Bindings.createObjectBinding(() -> { final RectangularPrism3ifx bb = getGeomFactory().newBox(); Path3ai.computeControlPointBoundingBox( getPathIterator(), bb); return bb; }, innerCoordinatesProperty())); } return this.logicalBounds; } @Override public RectangularPrism3ifx toBoundingBox() { return boundingBoxProperty().get().clone(); } @Override public void toBoundingBox(RectangularPrism3ifx box) { assert box != null : AssertMessages.notNullParameter(); box.set(boundingBoxProperty().get()); } /** Replies the windingRule property. * * @return the windingRule property. */ public ObjectProperty<PathWindingRule> windingRuleProperty() { if (this.windingRule == null) { this.windingRule = new SimpleObjectProperty<>(this, MathFXAttributeNames.WINDING_RULE, DEFAULT_WINDING_RULE); } return this.windingRule; } @Override public PathWindingRule getWindingRule() { return this.windingRule == null ? DEFAULT_WINDING_RULE : this.windingRule.get(); } @Override public void setWindingRule(PathWindingRule rule) { assert rule != null : AssertMessages.notNullParameter(); if (this.windingRule != null || rule != DEFAULT_WINDING_RULE) { windingRuleProperty().set(rule); } } /** Replies the isPolyline property. * * @return the isPolyline property. */ public BooleanProperty isPolylineProperty() { if (this.isPolyline == null) { this.isPolyline = new ReadOnlyBooleanWrapper(this, MathFXAttributeNames.IS_POLYLINE, false); this.isPolyline.bind(Bindings.createBooleanBinding(() -> { boolean first = true; boolean hasOneLine = false; for (final PathElementType type : innerTypesProperty()) { if (first) { if (type != PathElementType.MOVE_TO) { return false; } first = false; } else if (type != PathElementType.LINE_TO) { return false; } else { hasOneLine = true; } } return hasOneLine; }, innerTypesProperty())); } return this.isPolyline; } @Override public boolean isPolyline() { return isPolylineProperty().get(); } /** Replies the isCurved property. * * @return the isCurved property. */ public BooleanProperty isCurvedProperty() { if (this.isCurved == null) { this.isCurved = new ReadOnlyBooleanWrapper(this, MathFXAttributeNames.IS_CURVED, false); this.isCurved.bind(Bindings.createBooleanBinding(() -> { for (final PathElementType type : innerTypesProperty()) { if (type == PathElementType.CURVE_TO || type == PathElementType.QUAD_TO) { return true; } } return false; }, innerTypesProperty())); } return this.isCurved; } @Override public boolean isCurved() { return isCurvedProperty().get(); } /** Replies the isMultiParts property. * * @return the isMultiParts property. */ public BooleanProperty isMultiPartsProperty() { if (this.isMultipart == null) { this.isMultipart = new ReadOnlyBooleanWrapper(this, MathFXAttributeNames.IS_MULTIPARTS, false); this.isMultipart.bind(Bindings.createBooleanBinding(() -> { boolean foundOne = false; for (final PathElementType type : innerTypesProperty()) { if (type == PathElementType.MOVE_TO) { if (foundOne) { return true; } foundOne = true; } } return false; }, innerTypesProperty())); } return this.isMultipart; } @Override public boolean isMultiParts() { return isMultiPartsProperty().get(); } /** Replies the isPolygon property. * * @return the isPolygon property. */ public BooleanProperty isPolygonProperty() { if (this.isPolygon == null) { this.isPolygon = new ReadOnlyBooleanWrapper(this, MathFXAttributeNames.IS_POLYGON, false); this.isPolygon.bind(Bindings.createBooleanBinding(() -> { boolean first = true; boolean lastIsClose = false; for (final PathElementType type : innerTypesProperty()) { lastIsClose = false; if (first) { if (type != PathElementType.MOVE_TO) { return false; } first = false; } else if (type == PathElementType.MOVE_TO) { return false; } else if (type == PathElementType.CLOSE) { lastIsClose = true; } } return lastIsClose; }, innerTypesProperty())); } return this.isPolygon; } @Override public boolean isPolygon() { return isPolygonProperty().get(); } @Override public void closePath() { if (this.types == null || this.types.isEmpty() || (this.types.get(this.types.size() - 1) != PathElementType.CLOSE && this.types.get(this.types.size() - 1) != PathElementType.MOVE_TO)) { this.types.add(PathElementType.CLOSE); } } @Override public RectangularPrism3ifx toBoundingBoxWithCtrlPoints() { return controlPointBoundingBoxProperty().get().clone(); } @Override public void toBoundingBoxWithCtrlPoints(RectangularPrism3ifx box) { assert box != null : AssertMessages.notNullParameter(); box.set(controlPointBoundingBoxProperty().get()); } @Override public int[] toIntArray(Transform3D transform) { final int n = (this.coords != null) ? this.coords.size() * 3 : 0; final int[] clone = new int[n]; if (n > 0) { final Iterator<Point3ifx> iterator = this.coords.iterator(); int i = 0; while (iterator.hasNext()) { final Point3ifx point = iterator.next(); if (!(transform == null || transform.isIdentity())) { transform.transform(point); } clone[i++] = point.ix(); clone[i++] = point.iy(); clone[i++] = point.iz(); } } return clone; } @Override public float[] toFloatArray(Transform3D transform) { final int n = (this.coords != null) ? this.coords.size() : 0; final float[] clone = new float[n]; if (n > 0) { final Iterator<Point3ifx> iterator = this.coords.iterator(); int i = 0; while (iterator.hasNext()) { final Point3ifx point = iterator.next(); if (!(transform == null || transform.isIdentity())) { transform.transform(point); } clone[i++] = (float) point.getX(); clone[i++] = (float) point.getY(); clone[i++] = (float) point.getZ(); } } return clone; } @Override public double[] toDoubleArray(Transform3D transform) { final int n = (this.coords != null) ? this.coords.size() * 3 : 0; final double[] clone = new double[n]; if (n > 0) { final Iterator<Point3ifx> iterator = this.coords.iterator(); int i = 0; while (iterator.hasNext()) { final Point3ifx point = iterator.next(); if (!(transform == null || transform.isIdentity())) { transform.transform(point); } clone[i++] = point.getX(); clone[i++] = point.getY(); clone[i++] = point.getZ(); } } return clone; } @Override public Point3ifx[] toPointArray(Transform3D transform) { final int n = (this.coords != null) ? this.coords.size() : 0; final Point3ifx[] clone = new Point3ifx[n]; if (n > 0) { final Iterator<Point3ifx> iterator = this.coords.iterator(); Point3ifx point = iterator.next(); for (int i = 0; i < n; ++i) { if (!(transform == null || transform.isIdentity())) { transform.transform(point); } clone[i] = point; point = iterator.next(); } } return clone; } @Override public Point3ifx getPointAt(int index) { if (this.coords == null) { throw new IndexOutOfBoundsException(); } return this.coords.get(index); } @Override @Pure public int getCurrentX() { if (this.coords == null) { throw new IndexOutOfBoundsException(); } final int index = this.coords.size() - 1; return this.coords.get(index).ix(); } @Override @Pure public int getCurrentY() { if (this.coords == null) { throw new IndexOutOfBoundsException(); } final int index = this.coords.size() - 1; return this.coords.get(index).iy(); } @Override @Pure public int getCurrentZ() { if (this.coords == null) { throw new IndexOutOfBoundsException(); } final int index = this.coords.size() - 1; return this.coords.get(index).iz(); } @Override @Pure public Point3ifx getCurrentPoint() { if (this.coords == null) { throw new IndexOutOfBoundsException(); } final int baseIdx = this.coords.size() - 1; return this.coords.get(baseIdx); } @Override public int size() { return (this.coords == null) ? 0 : this.coords.size(); } @Override @SuppressWarnings("checkstyle:magicnumber") public void removeLast() { if (this.types != null && !this.types.isEmpty() && this.coords != null && !this.coords.isEmpty()) { final int lastIndex = this.types.size() - 1; final int coordSize = this.coords.size(); int coordIndex = coordSize; switch (this.types.get(lastIndex)) { case CLOSE: // no coord to remove break; case MOVE_TO: coordIndex = coordSize - 1; break; case LINE_TO: coordIndex = coordSize - 1; break; case CURVE_TO: coordIndex = coordSize - 3; break; case QUAD_TO: coordIndex = coordSize - 2; break; case ARC_TO: default: throw new IllegalStateException(); } this.coords.remove(coordIndex, coordSize); this.types.remove(lastIndex); } else { throw new IllegalStateException(); } } @Override public void moveTo(int x, int y, int z) { if (this.types != null && !this.types.isEmpty() && this.types.get(this.types.size() - 1) == PathElementType.MOVE_TO) { assert this.coords != null && !this.coords.isEmpty(); final int idx = this.coords.size() - 1; this.coords.set(idx, getGeomFactory().newPoint(x, y, z)); } else { innerTypesProperty().add(PathElementType.MOVE_TO); final ReadOnlyListWrapper<Point3ifx> coords = innerCoordinatesProperty(); coords.add(getGeomFactory().newPoint(x, y, z)); } } @Override public void moveTo(Point3D<?, ?> position) { assert position != null : AssertMessages.notNullParameter(); if (this.types != null && !this.types.isEmpty() && this.types.get(this.types.size() - 1) == PathElementType.MOVE_TO) { assert this.coords != null && !this.coords.isEmpty(); final int idx = this.coords.size() - 1; this.coords.set(idx, getGeomFactory().convertToPoint(position)); } else { innerTypesProperty().add(PathElementType.MOVE_TO); final ReadOnlyListWrapper<Point3ifx> coords = innerCoordinatesProperty(); coords.add(getGeomFactory().convertToPoint(position)); } } private void ensureMoveTo() { if (this.types == null || this.types.isEmpty()) { throw new IllegalStateException(Locale.getString("E1")); //$NON-NLS-1$ } } @Override public void lineTo(int x, int y, int z) { ensureMoveTo(); innerTypesProperty().add(PathElementType.LINE_TO); final ReadOnlyListWrapper<Point3ifx> coords = innerCoordinatesProperty(); coords.add(getGeomFactory().newPoint(x, y, z)); } @Override public void lineTo(Point3D<?, ?> to) { assert to != null : AssertMessages.notNullParameter(); ensureMoveTo(); innerTypesProperty().add(PathElementType.LINE_TO); final ReadOnlyListWrapper<Point3ifx> coords = innerCoordinatesProperty(); coords.add(getGeomFactory().convertToPoint(to)); } @Override public void quadTo(int x1, int y1, int z1, int x2, int y2, int z2) { ensureMoveTo(); innerTypesProperty().add(PathElementType.QUAD_TO); final ReadOnlyListWrapper<Point3ifx> coords = innerCoordinatesProperty(); coords.add(getGeomFactory().newPoint(x1, y1, z1)); coords.add(getGeomFactory().newPoint(x2, y2, z2)); } @Override public void quadTo(Point3D<?, ?> ctrl, Point3D<?, ?> to) { assert ctrl != null : AssertMessages.notNullParameter(0); assert to != null : AssertMessages.notNullParameter(1); ensureMoveTo(); innerTypesProperty().add(PathElementType.QUAD_TO); final ReadOnlyListWrapper<Point3ifx> coords = innerCoordinatesProperty(); coords.add(getGeomFactory().convertToPoint(ctrl)); coords.add(getGeomFactory().convertToPoint(to)); } @Override @SuppressWarnings("checkstyle:parameternumber") public void curveTo(int x1, int y1, int z1, int x2, int y2, int z2, int x3, int y3, int z3) { ensureMoveTo(); innerTypesProperty().add(PathElementType.CURVE_TO); final ReadOnlyListWrapper<Point3ifx> coords = innerCoordinatesProperty(); coords.add(getGeomFactory().newPoint(x1, y1, z1)); coords.add(getGeomFactory().newPoint(x2, y2, z2)); coords.add(getGeomFactory().newPoint(x3, y3, z3)); } @Override @SuppressWarnings("checkstyle:parameternumber") public void curveTo(Point3D<?, ?> ctrl1, Point3D<?, ?> ctrl2, Point3D<?, ?> to) { assert ctrl1 != null : AssertMessages.notNullParameter(0); assert ctrl2 != null : AssertMessages.notNullParameter(1); assert to != null : AssertMessages.notNullParameter(2); ensureMoveTo(); innerTypesProperty().add(PathElementType.CURVE_TO); final ReadOnlyListWrapper<Point3ifx> coords = innerCoordinatesProperty(); coords.add(getGeomFactory().convertToPoint(ctrl1)); coords.add(getGeomFactory().convertToPoint(ctrl2)); coords.add(getGeomFactory().convertToPoint(to)); } /** Replies the private coordinates property. * * @return the private coordinates property. */ protected ReadOnlyListWrapper<Point3ifx> innerCoordinatesProperty() { if (this.coords == null) { this.coords = new ReadOnlyListWrapper<>(this, MathFXAttributeNames.COORDINATES, FXCollections.observableList(new ArrayList<>())); } return this.coords; } /** Replies the coordinates property. * * @return the coordinates property. */ public ReadOnlyListProperty<Point3ifx> coordinatesProperty() { return innerCoordinatesProperty().getReadOnlyProperty(); } @Override public int getCoordAt(int index) { if (this.coords == null) { throw new IndexOutOfBoundsException(); } final Point3ifx point = this.coords.get(index / 3); return index % 3 == 0 ? point.ix() : index % 3 == 1 ? point.iy() : point.iz(); } @Override public void setLastPoint(int x, int y, int z) { if (this.coords != null && !this.coords.isEmpty()) { final int idx = this.coords.size() - 1; final Point3ifx point = this.coords.get(idx); point.setX(x); point.setY(y); point.setZ(z); } else { throw new IllegalStateException(); } } @Override @SuppressWarnings({"checkstyle:cyclomaticcomplexity", "checkstyle:booleanexpressioncomplexity", "checkstyle:magicnumber"}) public boolean remove(int x, int y, int z) { if (this.types != null && !this.types.isEmpty() && this.coords != null && !this.coords.isEmpty()) { for (int i = 0, j = 0; i < this.coords.size() && j < this.types.size(); ++j) { final Point3ifx point = this.coords.get(i); switch (this.types.get(j)) { case MOVE_TO: //$FALL-THROUGH$ case LINE_TO: if (x == point.ix() && y == point.iy() && z == point.iz()) { this.coords.remove(i); this.types.remove(j); return true; } i++; break; case CURVE_TO: final Point3ifx p2 = this.coords.get(i + 1); final Point3ifx p3 = this.coords.get(i + 2); if ((x == point.ix() && y == point.iy() && z == point.iz()) || (x == p2.ix() && y == p2.iy() && z == p2.iz()) || (x == p3.ix() && y == p3.iy() && z == p3.iz())) { this.coords.remove(i, i + 3); this.types.remove(j); return true; } i += 3; break; case QUAD_TO: final Point3ifx pt = this.coords.get(i + 1); if ((x == point.ix() && y == point.iy() && z == point.iz()) || (x == pt.ix() && y == pt.iy() && z == pt.iz())) { this.coords.remove(i, i + 2); this.types.remove(j); return true; } i += 2; break; case CLOSE: break; case ARC_TO: default: break; } } } return false; } /** Remove the point from this path. * * <p>If the given point do not match exactly a point in the path, nothing is removed. * * @param point the point to remove. * @return <code>true</code> if the point was removed; <code>false</code> otherwise. */ @SuppressWarnings({"checkstyle:magicnumber", "checkstyle:cyclomaticcomplexity"}) public boolean remove(Point3D<?, ?> point) { if (this.types != null && !this.types.isEmpty() && this.coords != null && !this.coords.isEmpty()) { for (int i = 0, j = 0; i < this.coords.size() && j < this.types.size(); j++) { final Point3ifx currentPoint = this.coords.get(i); switch (this.types.get(j)) { case MOVE_TO: //$FALL-THROUGH$ case LINE_TO: if (point.equals(currentPoint)) { this.coords.remove(i); this.types.remove(j); return true; } i++; break; case CURVE_TO: final Point3ifx p2 = this.coords.get(i + 1); final Point3ifx p3 = this.coords.get(i + 2); if ((point.equals(currentPoint)) || (point.equals(p2)) || (point.equals(p3))) { this.coords.remove(i, i + 3); this.types.remove(j); return true; } i += 3; break; case QUAD_TO: final Point3ifx pt = this.coords.get(i + 1); if ((point.equals(currentPoint)) || (point.equals(pt))) { this.coords.remove(i, i + 2); this.types.remove(j); return true; } i += 2; break; case CLOSE: break; case ARC_TO: throw new IllegalStateException(); default: break; } } } return false; } @Override public void set(Path3ifx path) { assert path != null : AssertMessages.notNullParameter(); clear(); add(path.getPathIterator()); } /** Replies the private types property. * * @return the private types property. */ protected ReadOnlyListWrapper<PathElementType> innerTypesProperty() { if (this.types == null) { this.types = new ReadOnlyListWrapper<>(this, MathFXAttributeNames.TYPES, FXCollections.observableList(new ArrayList<>())); } return this.types; } /** Replies the types property. * * @return the types property. */ public ReadOnlyListProperty<PathElementType> typesProperty() { return innerTypesProperty().getReadOnlyProperty(); } @Override public int getPathElementCount() { return this.types == null ? 0 : innerTypesProperty().size(); } @Override public PathElementType getPathElementTypeAt(int index) { if (this.types == null) { throw new IndexOutOfBoundsException(); } return this.types.get(index); } }
/* * Copyright 2012, Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.LinkBench; import java.io.File; import java.io.IOException; import java.util.Properties; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.EnhancedPatternLayout; import org.apache.log4j.FileAppender; import org.apache.log4j.Layout; import org.apache.log4j.Level; import org.apache.log4j.Logger; public class ConfigUtil { public static final String linkbenchHomeEnvVar = "LINKBENCH_HOME"; public static final String LINKBENCH_LOGGER = "com.facebook.linkbench"; /** * @return null if not set, or if not valid path */ public static String findLinkBenchHome() { String linkBenchHome = System.getenv("LINKBENCH_HOME"); if (linkBenchHome != null && linkBenchHome.length() > 0) { File dir = new File(linkBenchHome); if (dir.exists() && dir.isDirectory()) { return linkBenchHome; } } return null; } public static Level getDebugLevel(Properties props) throws LinkBenchConfigError { if (props == null) { return Level.DEBUG; } String levStr = props.getProperty(Config.DEBUGLEVEL); if (levStr == null) { return Level.DEBUG; } try { int level = Integer.parseInt(levStr); if (level <= 0) { return Level.INFO; } else if (level == 1) { return Level.DEBUG; } else { return Level.TRACE; } } catch (NumberFormatException e) { Level lev = Level.toLevel(levStr, null); if (lev != null) { return lev; } else { throw new LinkBenchConfigError("Invalid setting for debug level: " + levStr); } } } /** * Setup log4j to log to stderr with a timestamp and thread id * Could add in configuration from file later if it was really necessary * @param props * @param logFile if not null, info logging will be diverted to this file * @throws IOException * @throws Exception */ public static void setupLogging(Properties props, String logFile) throws LinkBenchConfigError, IOException { Layout fmt = new EnhancedPatternLayout("%p %d [%t]: %m%n%throwable{30}"); Level logLevel = ConfigUtil.getDebugLevel(props); Logger.getRootLogger().removeAllAppenders(); Logger lbLogger = Logger.getLogger(LINKBENCH_LOGGER); lbLogger.setLevel(logLevel); ConsoleAppender console = new ConsoleAppender(fmt, "System.err"); /* If logfile is specified, put full stream in logfile and only * print important messages to terminal */ if (logFile != null) { console.setThreshold(Level.WARN); // Only print salient messages lbLogger.addAppender(new FileAppender(fmt, logFile)); } lbLogger.addAppender(console); } /** * Look up key in props, failing if not present * @param props * @param key * @return * @throws LinkBenchConfigError thrown if key not present */ public static String getPropertyRequired(Properties props, String key) throws LinkBenchConfigError { String v = props.getProperty(key); if (v == null) { throw new LinkBenchConfigError("Expected configuration key " + key + " to be defined"); } return v; } public static int getInt(Properties props, String key) throws LinkBenchConfigError { return getInt(props, key, null); } /** * Retrieve a config key and convert to integer * @param props * @param key * @return a non-null string value * @throws LinkBenchConfigError if not present or not integer */ public static int getInt(Properties props, String key, Integer defaultVal) throws LinkBenchConfigError { if (defaultVal != null && !props.containsKey(key)) { return defaultVal; } String v = getPropertyRequired(props, key); try { return Integer.parseInt(v); } catch (NumberFormatException e) { throw new LinkBenchConfigError("Expected configuration key " + key + " to be integer, but was '" + v + "'"); } } public static long getLong(Properties props, String key) throws LinkBenchConfigError { return getLong(props, key, null); } /** * Retrieve a config key and convert to long integer * @param props * @param key * @param defaultVal default value if key not present * @return * @throws LinkBenchConfigError if not present or not integer */ public static long getLong(Properties props, String key, Long defaultVal) throws LinkBenchConfigError { if (defaultVal != null && !props.containsKey(key)) { return defaultVal; } String v = getPropertyRequired(props, key); try { return Long.parseLong(v); } catch (NumberFormatException e) { throw new LinkBenchConfigError("Expected configuration key " + key + " to be long integer, but was '" + v + "'"); } } public static double getDouble(Properties props, String key) throws LinkBenchConfigError { return getDouble(props, key, null); } /** * Retrieve a config key and convert to double * @param props * @param key * @param defaultVal default value if key not present * @return * @throws LinkBenchConfigError if not present or not double */ public static double getDouble(Properties props, String key, Double defaultVal) throws LinkBenchConfigError { if (defaultVal != null && !props.containsKey(key)) { return defaultVal; } String v = getPropertyRequired(props, key); try { return Double.parseDouble(v); } catch (NumberFormatException e) { throw new LinkBenchConfigError("Expected configuration key " + key + " to be double, but was '" + v + "'"); } } /** * Retrieve a config key and convert to boolean. * Valid boolean strings are "true" or "false", case insensitive * @param props * @param key * @return * @throws LinkBenchConfigError if not present or not boolean */ public static boolean getBool(Properties props, String key) throws LinkBenchConfigError { String v = getPropertyRequired(props, key).trim().toLowerCase(); // Parse manually since parseBoolean accepts many things as "false" if (v.equals("true")) { return true; } else if (v.equals("false")) { return false; } else { throw new LinkBenchConfigError("Expected configuration key " + key + " to be true or false, but was '" + v + "'"); } } }
package com.thinkaurelius.titan.diskstorage.cache; import com.thinkaurelius.titan.core.attribute.Duration; import com.thinkaurelius.titan.diskstorage.BackendException; import com.thinkaurelius.titan.diskstorage.Entry; import com.thinkaurelius.titan.diskstorage.EntryList; import com.thinkaurelius.titan.diskstorage.StaticBuffer; import com.thinkaurelius.titan.diskstorage.keycolumnvalue.*; import com.thinkaurelius.titan.diskstorage.keycolumnvalue.cache.CacheTransaction; import com.thinkaurelius.titan.diskstorage.keycolumnvalue.cache.KCVSCache; import com.thinkaurelius.titan.diskstorage.keycolumnvalue.inmemory.InMemoryStoreManager; import com.thinkaurelius.titan.diskstorage.util.BufferUtil; import com.thinkaurelius.titan.diskstorage.util.StandardBaseTransactionConfig; import com.thinkaurelius.titan.diskstorage.util.StaticArrayEntry; import com.thinkaurelius.titan.diskstorage.util.WriteByteBuffer; import com.thinkaurelius.titan.diskstorage.util.time.StandardDuration; import com.thinkaurelius.titan.diskstorage.util.time.TimestampProvider; import com.thinkaurelius.titan.diskstorage.util.time.Timestamps; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import static org.junit.Assert.*; /** * @author Matthias Broecheler (me@matthiasb.com) */ public abstract class KCVSCacheTest { public static final String STORE_NAME = "store"; public static final TimestampProvider times = Timestamps.MICRO; public static final Duration MAX_WRITE_TIME = new StandardDuration(100, TimeUnit.MILLISECONDS); public KeyColumnValueStoreManager storeManager; public CounterKCVS store; public KCVSCache cache; @Before public void setup() throws Exception { storeManager = new InMemoryStoreManager(); store = new CounterKCVS(storeManager.openDatabase(STORE_NAME)); cache = getCache(store); } public abstract KCVSCache getCache(KeyColumnValueStore store); public StoreTransaction getStoreTx() { try { return storeManager.beginTransaction(StandardBaseTransactionConfig.of(times)); } catch (BackendException se) { throw new RuntimeException(se); } } public CacheTransaction getCacheTx() { CacheTransaction cacheTx = new CacheTransaction(getStoreTx(), storeManager, 1024, MAX_WRITE_TIME, false); return cacheTx; } @After public void shutdown() throws Exception { cache.close(); storeManager.close(); } public void loadStore(int numKeys, int numCols) { StoreTransaction tx = getStoreTx(); try { for (int i=1;i<=numKeys;i++) { List<Entry> adds = new ArrayList<Entry>(numCols); for (int j=1;j<=numCols;j++) adds.add(getEntry(j,j)); store.mutate(BufferUtil.getIntBuffer(i),adds,KeyColumnValueStore.NO_DELETIONS,tx); } tx.commit(); } catch (BackendException e) { throw new RuntimeException(e); } } @Test public void testSmallCache() throws Exception { final int numKeys = 100, numCols = 10; final int repeats = 100, clearEvery = 20, numMulti = 10; assertTrue(numCols>=10); //Assumed below loadStore(numKeys,numCols); //Repeatedly read from cache and clear in between int calls = 0; assertEquals(calls,store.getSliceCalls()); for (int t=0;t<repeats;t++) { if (t%clearEvery==0) { cache.clearCache(); calls+=numKeys*2+1; } CacheTransaction tx = getCacheTx(); for (int i=1;i<=numKeys;i++) { assertEquals(10,cache.getSlice(getQuery(i,0,numCols+1).setLimit(10),tx).size()); assertEquals(3,cache.getSlice(getQuery(i,2,5),tx).size()); } //Multi-query List<StaticBuffer> keys = new ArrayList<StaticBuffer>(); for (int i=10;i<10+numMulti;i++) keys.add(BufferUtil.getIntBuffer(i)); Map<StaticBuffer,EntryList> result = cache.getSlice(keys,getQuery(4,9),tx); assertEquals(keys.size(),result.size()); for (StaticBuffer key : keys) assertTrue(result.containsKey(key)); for (EntryList r : result.values()) { assertEquals(5,r.size()); } tx.commit(); assertEquals(calls,store.getSliceCalls()); } store.resetCounter(); //Check invalidation StaticBuffer key = BufferUtil.getIntBuffer(23); List<StaticBuffer> keys = new ArrayList<StaticBuffer>(); keys.add(key); keys.add(BufferUtil.getIntBuffer(12)); keys.add(BufferUtil.getIntBuffer(5)); //Read CacheTransaction tx = getCacheTx(); assertEquals(numCols,cache.getSlice(new KeySliceQuery(key,getQuery(0,numCols+1)),tx).size()); Map<StaticBuffer,EntryList> result = cache.getSlice(keys,getQuery(2,8),tx); assertEquals(keys.size(),result.size()); assertEquals(6,result.get(key).size()); //Update List<Entry> dels = new ArrayList<Entry>(numCols/2); for (int j=1;j<=numCols;j=j+2) dels.add(getEntry(j,j)); cache.mutateEntries(key, KeyColumnValueStore.NO_ADDITIONS, dels, tx); tx.commit(); assertEquals(2,store.getSliceCalls()); //Ensure updates are correctly read tx = getCacheTx(); assertEquals(numCols/2,cache.getSlice(new KeySliceQuery(key,getQuery(0,numCols+1)),tx).size()); result = cache.getSlice(keys,getQuery(2,8),tx); assertEquals(keys.size(),result.size()); assertEquals(3,result.get(key).size()); tx.commit(); assertEquals(4,store.getSliceCalls()); } public static KeySliceQuery getQuery(int key, int startCol, int endCol) { return new KeySliceQuery(BufferUtil.getIntBuffer(key),getQuery(startCol, endCol)); } public static SliceQuery getQuery(int startCol, int endCol) { return new SliceQuery(BufferUtil.getIntBuffer(startCol),BufferUtil.getIntBuffer(endCol)); } public static Entry getEntry(int col, int val) { return new StaticArrayEntry(new WriteByteBuffer(4 * 2).putInt(col).putInt(val).getStaticBuffer(), 4); } public static class CounterKCVS implements KeyColumnValueStore { private final KeyColumnValueStore store; private final AtomicLong getSliceCounter; public CounterKCVS(KeyColumnValueStore store) { this.store = store; getSliceCounter = new AtomicLong(0); } public long getSliceCalls() { return getSliceCounter.get(); } public void resetCounter() { getSliceCounter.set(0); } @Override public EntryList getSlice(KeySliceQuery query, StoreTransaction txh) throws BackendException { getSliceCounter.incrementAndGet(); return store.getSlice(query,txh); } @Override public Map<StaticBuffer, EntryList> getSlice(List<StaticBuffer> keys, SliceQuery query, StoreTransaction txh) throws BackendException { getSliceCounter.incrementAndGet(); return store.getSlice(keys,query,txh); } @Override public void mutate(StaticBuffer key, List<Entry> additions, List<StaticBuffer> deletions, StoreTransaction txh) throws BackendException { store.mutate(key,additions,deletions,txh); } @Override public void acquireLock(StaticBuffer key, StaticBuffer column, StaticBuffer expectedValue, StoreTransaction txh) throws BackendException { store.acquireLock(key,column,expectedValue,txh); } @Override public KeyIterator getKeys(KeyRangeQuery query, StoreTransaction txh) throws BackendException { return store.getKeys(query,txh); } @Override public KeyIterator getKeys(SliceQuery query, StoreTransaction txh) throws BackendException { return store.getKeys(query,txh); } @Override public String getName() { return store.getName(); } @Override public void close() throws BackendException { store.close(); } } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.xdebugger.impl.breakpoints; import com.intellij.execution.impl.ConsoleViewUtil; import com.intellij.ide.startup.StartupManagerEx; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.actionSystem.ex.ActionManagerEx; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.editor.colors.EditorColorsAdapter; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.colors.EditorColorsScheme; import com.intellij.openapi.editor.event.*; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.ex.util.EditorUtil; import com.intellij.openapi.editor.markup.MarkupEditorFilterFactory; import com.intellij.openapi.editor.markup.RangeHighlighter; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileEditor.FileEditor; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.fileEditor.TextEditor; import com.intellij.openapi.project.DumbAwareRunnable; import com.intellij.openapi.project.Project; import com.intellij.openapi.startup.StartupManager; import com.intellij.openapi.util.AsyncResult; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileEvent; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.openapi.vfs.VirtualFileUrlChangeAdapter; import com.intellij.psi.PsiDocumentManager; import com.intellij.util.Consumer; import com.intellij.util.SmartList; import com.intellij.util.containers.BidirectionalMap; import com.intellij.util.ui.update.MergingUpdateQueue; import com.intellij.util.ui.update.Update; import com.intellij.xdebugger.XDebuggerManager; import com.intellij.xdebugger.breakpoints.SuspendPolicy; import com.intellij.xdebugger.breakpoints.XBreakpoint; import com.intellij.xdebugger.breakpoints.XLineBreakpoint; import com.intellij.xdebugger.impl.XSourcePositionImpl; import com.intellij.xdebugger.impl.ui.DebuggerUIUtil; import gnu.trove.TIntHashSet; import org.jetbrains.annotations.NotNull; import java.awt.event.MouseEvent; import java.util.Collection; import java.util.List; /** * @author nik */ public class XLineBreakpointManager { private final BidirectionalMap<XLineBreakpointImpl, Document> myBreakpoints = new BidirectionalMap<XLineBreakpointImpl, Document>(); private final MergingUpdateQueue myBreakpointsUpdateQueue; private final Project myProject; private final XDependentBreakpointManager myDependentBreakpointManager; private final StartupManagerEx myStartupManager; public XLineBreakpointManager(Project project, final XDependentBreakpointManager dependentBreakpointManager, final StartupManager startupManager) { myProject = project; myDependentBreakpointManager = dependentBreakpointManager; myStartupManager = (StartupManagerEx)startupManager; if (!myProject.isDefault()) { EditorEventMulticaster editorEventMulticaster = EditorFactory.getInstance().getEventMulticaster(); editorEventMulticaster.addDocumentListener(new MyDocumentListener(), project); editorEventMulticaster.addEditorMouseListener(new MyEditorMouseListener(), project); editorEventMulticaster.addEditorMouseMotionListener(new MyEditorMouseMotionListener(), project); final MyDependentBreakpointListener myDependentBreakpointListener = new MyDependentBreakpointListener(); myDependentBreakpointManager.addListener(myDependentBreakpointListener); Disposer.register(project, new Disposable() { @Override public void dispose() { myDependentBreakpointManager.removeListener(myDependentBreakpointListener); } }); VirtualFileManager.getInstance().addVirtualFileListener(new VirtualFileUrlChangeAdapter() { @Override protected void fileUrlChanged(String oldUrl, String newUrl) { for (XLineBreakpointImpl breakpoint : myBreakpoints.keySet()) { final String url = breakpoint.getFileUrl(); if (FileUtil.startsWith(url, oldUrl)) { breakpoint.setFileUrl(newUrl + url.substring(oldUrl.length())); } } } @Override public void fileDeleted(@NotNull VirtualFileEvent event) { List<XBreakpoint<?>> toRemove = new SmartList<XBreakpoint<?>>(); for (XLineBreakpointImpl breakpoint : myBreakpoints.keySet()) { if (breakpoint.getFileUrl().equals(event.getFile().getUrl())) { toRemove.add(breakpoint); } } removeBreakpoints(toRemove); } }, project); } myBreakpointsUpdateQueue = new MergingUpdateQueue("XLine breakpoints", 300, true, null, project); // Update breakpoints colors if global color schema was changed final EditorColorsManager colorsManager = EditorColorsManager.getInstance(); if (colorsManager != null) { // in some debugger tests EditorColorsManager component isn't loaded final MyEditorColorsListener myColorsSchemeListener = new MyEditorColorsListener(); Disposer.register(project, new Disposable() { @Override public void dispose() { colorsManager.removeEditorColorsListener(myColorsSchemeListener); } }); colorsManager.addEditorColorsListener(myColorsSchemeListener); } } public void updateBreakpointsUI() { if (myProject.isDefault()) return; Runnable runnable = new DumbAwareRunnable() { @Override public void run() { for (XLineBreakpointImpl breakpoint : myBreakpoints.keySet()) { breakpoint.updateUI(); } } }; if (ApplicationManager.getApplication().isUnitTestMode() || myStartupManager.startupActivityPassed()) { runnable.run(); } else { myStartupManager.registerPostStartupActivity(runnable); } } public void registerBreakpoint(XLineBreakpointImpl breakpoint, final boolean initUI) { if (initUI) { breakpoint.updateUI(); } Document document = breakpoint.getDocument(); if (document != null) { myBreakpoints.put(breakpoint, document); } } public void unregisterBreakpoint(final XLineBreakpointImpl breakpoint) { RangeHighlighter highlighter = breakpoint.getHighlighter(); if (highlighter != null) { myBreakpoints.remove(breakpoint); } } private void updateBreakpoints(@NotNull Document document) { Collection<XLineBreakpointImpl> breakpoints = myBreakpoints.getKeysByValue(document); if (breakpoints == null) { return; } TIntHashSet lines = new TIntHashSet(); List<XBreakpoint<?>> toRemove = new SmartList<XBreakpoint<?>>(); for (XLineBreakpointImpl breakpoint : breakpoints) { breakpoint.updatePosition(); if (!breakpoint.isValid() || !lines.add(breakpoint.getLine())) { toRemove.add(breakpoint); } } removeBreakpoints(toRemove); } private void removeBreakpoints(final List<? extends XBreakpoint<?>> toRemove) { if (toRemove.isEmpty()) { return; } ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { for (XBreakpoint<?> breakpoint : toRemove) { XDebuggerManager.getInstance(myProject).getBreakpointManager().removeBreakpoint(breakpoint); } } }); } public void breakpointChanged(final XLineBreakpointImpl breakpoint) { if (ApplicationManager.getApplication().isDispatchThread()) { breakpoint.updateUI(); } else { queueBreakpointUpdate(breakpoint); } } public void queueBreakpointUpdate(final XBreakpoint<?> slave) { if (slave instanceof XLineBreakpointImpl<?>) { queueBreakpointUpdate((XLineBreakpointImpl<?>)slave); } } public void queueBreakpointUpdate(@NotNull final XLineBreakpointImpl<?> breakpoint) { myBreakpointsUpdateQueue.queue(new Update(breakpoint) { @Override public void run() { breakpoint.updateUI(); } }); } public void queueAllBreakpointsUpdate() { myBreakpointsUpdateQueue.queue(new Update("all breakpoints") { @Override public void run() { for (XLineBreakpointImpl breakpoint : myBreakpoints.keySet()) { breakpoint.updateUI(); } } }); } private class MyDocumentListener extends DocumentAdapter { @Override public void documentChanged(final DocumentEvent e) { final Document document = e.getDocument(); Collection<XLineBreakpointImpl> breakpoints = myBreakpoints.getKeysByValue(document); if (breakpoints != null && !breakpoints.isEmpty()) { myBreakpointsUpdateQueue.queue(new Update(document) { @Override public void run() { updateBreakpoints(document); } }); } } } private boolean myDragDetected = false; private class MyEditorMouseMotionListener extends EditorMouseMotionAdapter { @Override public void mouseDragged(EditorMouseEvent e) { myDragDetected = true; } } private class MyEditorMouseListener extends EditorMouseAdapter { @Override public void mousePressed(EditorMouseEvent e) { myDragDetected = false; } @Override public void mouseClicked(final EditorMouseEvent e) { final Editor editor = e.getEditor(); final MouseEvent mouseEvent = e.getMouseEvent(); if (mouseEvent.isPopupTrigger() || mouseEvent.isMetaDown() || mouseEvent.isControlDown() || mouseEvent.getButton() != MouseEvent.BUTTON1 || MarkupEditorFilterFactory.createIsDiffFilter().avaliableIn(editor) || !isInsideGutter(e, editor) || ConsoleViewUtil.isConsoleViewEditor(editor) || !isFromMyProject(editor) || (editor.getSelectionModel().hasSelection() && myDragDetected) ) { return; } PsiDocumentManager.getInstance(myProject).commitAndRunReadAction(new Runnable() { @Override public void run() { final int line = EditorUtil.yPositionToLogicalLine(editor, mouseEvent); final Document document = editor.getDocument(); final VirtualFile file = FileDocumentManager.getInstance().getFile(document); if (line >= 0 && line < document.getLineCount() && file != null) { ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { if (!myProject.isDisposed() && myProject.isInitialized() && file.isValid()) { ActionManagerEx.getInstanceEx().fireBeforeActionPerformed(IdeActions.ACTION_TOGGLE_LINE_BREAKPOINT, e.getMouseEvent()); AsyncResult<XLineBreakpoint> result = XBreakpointUtil.toggleLineBreakpoint( myProject, XSourcePositionImpl.create(file, line), editor, mouseEvent.isAltDown(), false); result.doWhenDone(new Consumer<XLineBreakpoint>() { @Override public void consume(XLineBreakpoint breakpoint) { if (!mouseEvent.isAltDown() && mouseEvent.isShiftDown() && breakpoint != null) { breakpoint.setSuspendPolicy(SuspendPolicy.NONE); String selection = editor.getSelectionModel().getSelectedText(); if (selection != null) { breakpoint.setLogExpression(selection); } else { breakpoint.setLogMessage(true); } // edit breakpoint DebuggerUIUtil.showXBreakpointEditorBalloon(myProject, mouseEvent.getPoint(), ((EditorEx)editor).getGutterComponentEx(), false, breakpoint); } } }); } } }); } } }); } private boolean isInsideGutter(EditorMouseEvent e, Editor editor) { if (e.getArea() != EditorMouseEventArea.LINE_MARKERS_AREA && e.getArea() != EditorMouseEventArea.FOLDING_OUTLINE_AREA) { return false; } return e.getMouseEvent().getX() <= ((EditorEx)editor).getGutterComponentEx().getWhitespaceSeparatorOffset(); } } private boolean isFromMyProject(@NotNull Editor editor) { if (myProject == editor.getProject()) { return true; } for (FileEditor fileEditor : FileEditorManager.getInstance(myProject).getAllEditors()) { if (fileEditor instanceof TextEditor && ((TextEditor)fileEditor).getEditor().equals(editor)) { return true; } } return false; } private class MyDependentBreakpointListener implements XDependentBreakpointListener { @Override public void dependencySet(final XBreakpoint<?> slave, final XBreakpoint<?> master) { queueBreakpointUpdate(slave); } @Override public void dependencyCleared(final XBreakpoint<?> breakpoint) { queueBreakpointUpdate(breakpoint); } } private class MyEditorColorsListener extends EditorColorsAdapter { @Override public void globalSchemeChange(EditorColorsScheme scheme) { updateBreakpointsUI(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.curator.discovery; import com.google.common.base.Preconditions; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.recipes.leader.LeaderLatch; import org.apache.curator.framework.recipes.leader.LeaderLatchListener; import org.apache.curator.framework.recipes.leader.Participant; import org.apache.druid.concurrent.LifecycleLock; import org.apache.druid.discovery.DruidLeaderSelector; import org.apache.druid.guice.annotations.Self; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.concurrent.Execs; import org.apache.druid.java.util.common.guava.CloseQuietly; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.server.DruidNode; import javax.annotation.Nullable; import java.util.concurrent.ExecutorService; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicReference; /** */ public class CuratorDruidLeaderSelector implements DruidLeaderSelector { private static final EmittingLogger log = new EmittingLogger(CuratorDruidLeaderSelector.class); private final LifecycleLock lifecycleLock = new LifecycleLock(); private final DruidNode self; private final CuratorFramework curator; private final String latchPath; private ExecutorService listenerExecutor; private DruidLeaderSelector.Listener listener = null; private final AtomicReference<LeaderLatch> leaderLatch = new AtomicReference<>(); private volatile boolean leader = false; private volatile int term = 0; public CuratorDruidLeaderSelector(CuratorFramework curator, @Self DruidNode self, String latchPath) { this.curator = curator; this.self = self; this.latchPath = latchPath; // Creating a LeaderLatch here allows us to query for the current leader. We will not be considered for leadership // election until LeaderLatch.start() is called in registerListener(). This allows clients to observe the current // leader without being involved in the election. this.leaderLatch.set(createNewLeaderLatch()); } private LeaderLatch createNewLeaderLatch() { return new LeaderLatch(curator, latchPath, self.getServiceScheme() + "://" + self.getHostAndPortToUse()); } private LeaderLatch createNewLeaderLatchWithListener() { final LeaderLatch newLeaderLatch = createNewLeaderLatch(); newLeaderLatch.addListener( new LeaderLatchListener() { @Override public void isLeader() { try { if (leader) { log.warn("I'm being asked to become leader. But I am already the leader. Ignored event."); return; } leader = true; term++; listener.becomeLeader(); } catch (Exception ex) { log.makeAlert(ex, "listener becomeLeader() failed. Unable to become leader").emit(); // give others a chance to become leader. final LeaderLatch oldLatch = createNewLeaderLatchWithListener(); CloseQuietly.close(oldLatch); leader = false; try { //Small delay before starting the latch so that others waiting are chosen to become leader. Thread.sleep(ThreadLocalRandom.current().nextInt(1000, 5000)); leaderLatch.get().start(); } catch (Exception e) { // If an exception gets thrown out here, then the node will zombie out 'cause it won't be looking for // the latch anymore. I don't believe it's actually possible for an Exception to throw out here, but // Curator likes to have "throws Exception" on methods so it might happen... log.makeAlert(e, "I am a zombie").emit(); } } } @Override public void notLeader() { try { if (!leader) { log.warn("I'm being asked to stop being leader. But I am not the leader. Ignored event."); return; } leader = false; listener.stopBeingLeader(); } catch (Exception ex) { log.makeAlert(ex, "listener.stopBeingLeader() failed. Unable to stopBeingLeader").emit(); } } }, listenerExecutor ); return leaderLatch.getAndSet(newLeaderLatch); } @Nullable @Override public String getCurrentLeader() { try { final LeaderLatch latch = leaderLatch.get(); Participant participant = latch.getLeader(); if (participant.isLeader()) { return participant.getId(); } return null; } catch (Exception e) { throw new RuntimeException(e); } } @Override public boolean isLeader() { return leader; } @Override public int localTerm() { return term; } @Override public void registerListener(DruidLeaderSelector.Listener listener) { Preconditions.checkArgument(listener != null, "listener is null."); if (!lifecycleLock.canStart()) { throw new ISE("can't start."); } try { this.listener = listener; this.listenerExecutor = Execs.singleThreaded( StringUtils.format( "LeaderSelector[%s]", StringUtils.encodeForFormat(latchPath) ) ); createNewLeaderLatchWithListener(); leaderLatch.get().start(); lifecycleLock.started(); } catch (Exception ex) { throw new RuntimeException(ex); } finally { lifecycleLock.exitStart(); } } @Override public void unregisterListener() { if (!lifecycleLock.canStop()) { throw new ISE("can't stop."); } CloseQuietly.close(leaderLatch.get()); listenerExecutor.shutdownNow(); } }
/** * Copyright [2014] Gaurav Gupta * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.netbeans.jpa.modeler.reveng.database; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import org.netbeans.api.project.SourceGroup; import org.netbeans.jpa.modeler.source.SourceGroups; import org.netbeans.jpa.modeler.reveng.database.generator.IPersistenceModelGenerator; import org.netbeans.modules.j2ee.persistence.dd.JavaPersistenceQLKeywords; import org.netbeans.modules.j2ee.persistence.entitygenerator.EntityMember; import org.netbeans.modules.j2ee.persistence.wizard.fromdb.Table; import org.netbeans.modules.j2ee.persistence.wizard.fromdb.TableClosure; import org.openide.filesystems.FileObject; import org.openide.util.ChangeSupport; import org.openide.util.NbBundle; import org.openide.util.Utilities; public final class SelectedTables { // not private because used in tests enum Problem { NO_JAVA_IDENTIFIER, JPA_QL_IDENTIFIER, ALREADY_EXISTS }; private final IPersistenceModelGenerator persistenceGen; private final Map<Table, String> table2ClassName = new HashMap<Table, String>(); private final Map<Table, Set<Problem>> table2Problems = new TreeMap<Table, Set<Problem>>(); // private final Map<Table, UpdateType> table2UpdateType = new HashMap<Table, UpdateType>(); private final ChangeListener tableClosureListener = new TableClosureListener(); private final ChangeSupport changeSupport = new ChangeSupport(this); private TableClosure tableClosure; private SourceGroup location; private String packageName; private FileObject targetFolder; private Set<Table> validatedTables = Collections.emptySet(); public SelectedTables(IPersistenceModelGenerator persistenceGen, TableClosure tableClosure, SourceGroup location, String packageName) throws IOException { // assert persistenceGen != null; this.persistenceGen = persistenceGen; setTableClosureAndTargetFolder(tableClosure, location, packageName); } /** * Sets the new table closure and target folder at once. This is needed in * order to avoid multiple validations in the table closure and the target * folder were set separately. */ public void setTableClosureAndTargetFolder(TableClosure tableClosure, SourceGroup location, String packageName) throws IOException { assert tableClosure != null; boolean tableClosureChanged = changeTableClosure(tableClosure); boolean targetFolderChanged = changeTargetFolder(location, packageName); if (tableClosureChanged || targetFolderChanged) { revalidateTables(); } } /** * Sets the new target folder. */ public void setTargetFolder(SourceGroup location, String packageName) throws IOException { if (changeTargetFolder(location, packageName)) { revalidateTables(); } } /** * Sets the new table closure, returning true if the new table closure was * different than the current value and false otherwise. */ private boolean changeTableClosure(TableClosure tableClosure) { if (!tableClosure.equals(this.tableClosure)) { if (this.tableClosure != null) { this.tableClosure.removeChangeListener(tableClosureListener); } this.tableClosure = tableClosure; table2ClassName.clear(); // table2UpdateType.clear(); this.tableClosure.addChangeListener(tableClosureListener); return true; } return false; } /** * Sets the new target folder, returning true if the new target folder was * different from the current value and false otherwise. */ private boolean changeTargetFolder(SourceGroup location, String packageName) throws IOException { if (!Utilities.compareObjects(location, this.location) || !Utilities.compareObjects(packageName, this.packageName)) { this.location = location; this.packageName = packageName; if (location != null && packageName != null) { targetFolder = SourceGroups.getFolderForPackage(location, packageName, false); } else { targetFolder = null; } return true; } return false; } /** * Adds a new change listener, which will be called when the list of * selected tables changes, when a class name for a table changes or when * the return value of {@link #getFirstProblemDisplayName} changes. */ public void addChangeListener(ChangeListener listener) { changeSupport.addChangeListener(listener); } /** * Removes a change listener. */ public void removeChangeListener(ChangeListener listener) { changeSupport.removeChangeListener(listener); } private void revalidateTables() { validatedTables = Collections.emptySet(); table2Problems.clear(); validateTables(); } private void validateTables() { Set<Table> addedTables = new HashSet<Table>(tableClosure.getSelectedTables()); addedTables.removeAll(validatedTables); Set<Table> removedTables = new HashSet<Table>(validatedTables); removedTables.removeAll(tableClosure.getSelectedTables()); for (Table table : removedTables) { table2Problems.remove(table); } for (Table table : addedTables) { putProblems(table, validateClassName(getClassName(table))); } validatedTables = new HashSet<Table>(tableClosure.getSelectedTables()); changeSupport.fireChange(); } /** * Returns a sorted list of selected tables. This returns the same tables as * {@link TableClosure#getSelectedTables}. */ public List<Table> getTables() { List<Table> result = new ArrayList<Table>(tableClosure.getSelectedTables()); Collections.sort(result); return result; } /** * Returns the class name for the given table. */ public String getClassName(Table table) { assert table != null; String className = table2ClassName.get(table); if (className == null) { className = EntityMember.makeClassName(table.getName()); // String exClassName = persistenceGen.getFQClassName(table.getName()); // if (exClassName != null) { // int i = exClassName.lastIndexOf('.'); // if (i > -1) { // exClassName = exClassName.substring(i + 1); // } // className = persistenceGen.generateEntityName(exClassName); // } else { // className = EntityMember.makeClassName(table.getName()); // className = persistenceGen.generateEntityName(className); // } } return className; } /** * Sets the class name for the given table. */ public void setClassName(Table table, String className) { assert table != null; assert className != null; table2ClassName.put(table, className); putProblems(table, validateClassName(className)); changeSupport.fireChange(); } // public void setUpdateType(Table table, UpdateType updateType) { // assert table != null; // assert updateType != null; // // table2UpdateType.put(table, updateType); // changeSupport.fireChange(); // } // // public UpdateType getUpdateType(Table table) { // assert table != null; // UpdateType ut = table2UpdateType.get(table); // // if (table.getDisabledReason() instanceof Table.ExistingDisabledReason) { // if (ut == null || ut == UpdateType.NEW) { // table2UpdateType.remove(table); // ut = UpdateType.UPDATE; // } // } else { // if (ut != null) { // table2UpdateType.remove(table); // } // ut = UpdateType.NEW; // } // // return ut; // } FileObject getTargetFolder() { return targetFolder; } private void putProblems(Table table, Set<Problem> problems) { if (problems.isEmpty()) { table2Problems.remove(table); } else { table2Problems.put(table, problems); } } private Set<Problem> validateClassName(String className) { Set<Problem> problems = EnumSet.noneOf(Problem.class); if (!Utilities.isJavaIdentifier(className)) { problems.add(Problem.NO_JAVA_IDENTIFIER); } if (JavaPersistenceQLKeywords.isKeyword(className)) { problems.add(Problem.JPA_QL_IDENTIFIER); } /* commented to have an ability to update entity classes if (targetFolder != null && targetFolder.getFileObject(className, "java") != null) { // NOI18N problems.add(Problem.ALREADY_EXISTS); } */ return problems; } public void ensureUniqueClassNames() { // make sure proposed class names are unique in the target package Set<Table> tables = tableClosure.getSelectedTables(); if (targetFolder != null) { for (Table t : tables) { String className = getClassName(t); boolean existingFile = (targetFolder.getFileObject(className, "java") != null); // NOI18N if (existingFile) { setClassName(t, className + '1'); } } } } /** * Returns the display name of the first problem regarding a table or its * class name (the class name may be an invalid Java identifier or a JPA QL * reserved keyword, or the class might exist in the target folder. */ public String getFirstProblemDisplayName() { Map.Entry<Table, Set<Problem>> firstProblemEntry = getFirstProblemEntry(); if (firstProblemEntry == null) { return null; } return getProblemDisplayNameForTable(firstProblemEntry.getKey(), firstProblemEntry.getValue()); } private Map.Entry<Table, Set<Problem>> getFirstProblemEntry() { Set<Map.Entry<Table, Set<Problem>>> problemEntries = table2Problems.entrySet(); if (problemEntries.isEmpty()) { return null; } return problemEntries.iterator().next(); } private String getProblemDisplayNameForTable(Table table, Set<Problem> problems) { Problem problem = problems.iterator().next(); return getProblemDisplayName(problem, getClassName(table)); } public String getProblemDisplayNameForTable(Table table) { Set<Problem> problems = table2Problems.get(table); return ((problems != null) ? getProblemDisplayNameForTable(table, problems) : null); } public boolean hasProblem(Table table) { return table2Problems.containsKey(table); } /** * Not private because used in tests. */ Problem getFirstProblem() { Map.Entry<Table, Set<Problem>> firstProblemEntry = getFirstProblemEntry(); if (firstProblemEntry == null) { return null; } Set<Problem> problems = firstProblemEntry.getValue(); return problems.iterator().next(); } private static String getProblemDisplayName(Problem problem, String className) { switch (problem) { case NO_JAVA_IDENTIFIER: return NbBundle.getMessage(SelectedTables.class, "ERR_NoJavaIdentifier", className); case JPA_QL_IDENTIFIER: return NbBundle.getMessage(SelectedTables.class, "ERR_ReservedQLKeyword", className); case ALREADY_EXISTS: return NbBundle.getMessage(SelectedTables.class, "ERR_AlreadyExists", className); default: assert false : problem + " should be handled in getProblemDisplayName()"; // NOI18N } return null; } private final class TableClosureListener implements ChangeListener { public void stateChanged(ChangeEvent event) { validateTables(); } } }
/* * Copyright (c) 2000, 2003, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package com.sun.corba.se.impl.encoding; import java.nio.ByteBuffer; import com.sun.corba.se.impl.encoding.BufferManagerWrite; import com.sun.corba.se.impl.orbutil.ORBUtility; import com.sun.corba.se.pept.transport.ByteBufferPool; import com.sun.corba.se.spi.orb.ORB; // Notes about the class. // Assumptions, the ByteBuffer's position is set by the constructor's // index variable and the ByteBuffer's limit points to the end of the // data. Also, since the index variable tracks the current empty // position in the buffer, the ByteBuffer's position is updated // any time there's a call to this class's position(). // Although, a ByteBuffer's length is it's capacity(), the context in // which length is used in this object, this.buflen is actually the // ByteBuffer limit(). public class ByteBufferWithInfo { private ORB orb; private boolean debug; // REVISIT - index should eventually be replaced with byteBuffer.position() private int index; // Current empty position in buffer. // REVISIT - CHANGE THESE TO PRIVATE public ByteBuffer byteBuffer;// Marshal buffer. public int buflen; // Total length of buffer. // Unnecessary... public int needed; // How many more bytes are needed on overflow. public boolean fragmented; // Did the overflow operation fragment? public ByteBufferWithInfo(org.omg.CORBA.ORB orb, ByteBuffer byteBuffer, int index) { this.orb = (ORB)orb; debug = this.orb.transportDebugFlag; this.byteBuffer = byteBuffer; if (byteBuffer != null) { this.buflen = byteBuffer.limit(); } position(index); this.needed = 0; this.fragmented = false; } public ByteBufferWithInfo(org.omg.CORBA.ORB orb, ByteBuffer byteBuffer) { this(orb, byteBuffer, 0); } public ByteBufferWithInfo(org.omg.CORBA.ORB orb, BufferManagerWrite bufferManager) { this(orb, bufferManager, true); } // Right now, EncapsOutputStream's do not use pooled byte buffers. // EncapsOutputStream's is the only one that does not use pooled // byte buffers. Hence, the reason for the boolean 'usePooledByteBuffers'. // See EncapsOutputStream for additional information. public ByteBufferWithInfo(org.omg.CORBA.ORB orb, BufferManagerWrite bufferManager, boolean usePooledByteBuffers) { this.orb = (ORB)orb; debug = this.orb.transportDebugFlag; int bufferSize = bufferManager.getBufferSize(); if (usePooledByteBuffers) { ByteBufferPool byteBufferPool = this.orb.getByteBufferPool(); this.byteBuffer = byteBufferPool.getByteBuffer(bufferSize); if (debug) { // print address of ByteBuffer gotten from pool int bbAddress = System.identityHashCode(byteBuffer); StringBuffer sb = new StringBuffer(80); sb.append("constructor (ORB, BufferManagerWrite) - got ") .append("ByteBuffer id (").append(bbAddress) .append(") from ByteBufferPool."); String msgStr = sb.toString(); dprint(msgStr); } } else { // don't allocate from pool, allocate non-direct ByteBuffer this.byteBuffer = ByteBuffer.allocate(bufferSize); } position(0); this.buflen = bufferSize; this.byteBuffer.limit(this.buflen); this.needed = 0; this.fragmented = false; } // Shallow copy constructor public ByteBufferWithInfo (ByteBufferWithInfo bbwi) { this.orb = bbwi.orb; this.debug = bbwi.debug; this.byteBuffer = bbwi.byteBuffer; this.buflen = bbwi.buflen; this.byteBuffer.limit(this.buflen); position(bbwi.position()); this.needed = bbwi.needed; this.fragmented = bbwi.fragmented; } // So IIOPOutputStream seems more intuitive public int getSize() { return position(); } // accessor to buflen public int getLength() { return buflen; } // get position in this buffer public int position() { // REVISIT - This should be changed to return the // value of byteBuffer.position() rather // than this.index. But, byteBuffer.position // is manipulated via ByteBuffer writes, reads, // gets and puts. These locations need to be // investigated and updated before // byteBuffer.position() can be returned here. // return byteBuffer.position(); return index; } // set position in this buffer public void position(int newPosition) { // REVISIT - This should be changed to set only the // value of byteBuffer.position rather // than this.index. This change should be made // in conjunction with the change to this.position(). byteBuffer.position(newPosition); index = newPosition; } // mutator to buflen public void setLength(int theLength) { buflen = theLength; byteBuffer.limit(buflen); } // Grow byteBuffer to a size larger than position() + needed public void growBuffer(ORB orb) { // This code used to live directly in CDROutputStream.grow. // Recall that the byteBuffer size is 'really' the limit or // buflen. int newLength = byteBuffer.limit() * 2; while (position() + needed >= newLength) newLength = newLength * 2; ByteBufferPool byteBufferPool = orb.getByteBufferPool(); ByteBuffer newBB = byteBufferPool.getByteBuffer(newLength); if (debug) { // print address of ByteBuffer just gotten int newbbAddress = System.identityHashCode(newBB); StringBuffer sb = new StringBuffer(80); sb.append("growBuffer() - got ByteBuffer id ("); sb.append(newbbAddress).append(") from ByteBufferPool."); String msgStr = sb.toString(); dprint(msgStr); } byteBuffer.position(0); newBB.put(byteBuffer); // return 'old' byteBuffer reference to the ByteBuffer pool if (debug) { // print address of ByteBuffer being released int bbAddress = System.identityHashCode(byteBuffer); StringBuffer sb = new StringBuffer(80); sb.append("growBuffer() - releasing ByteBuffer id ("); sb.append(bbAddress).append(") to ByteBufferPool."); String msgStr2 = sb.toString(); dprint(msgStr2); } byteBufferPool.releaseByteBuffer(byteBuffer); // update the byteBuffer with a larger ByteBuffer byteBuffer = newBB; // limit and buflen must be set to newLength. buflen = newLength; byteBuffer.limit(buflen); } public String toString() { StringBuffer str = new StringBuffer("ByteBufferWithInfo:"); str.append(" buflen = " + buflen); str.append(" byteBuffer.limit = " + byteBuffer.limit()); str.append(" index = " + index); str.append(" position = " + position()); str.append(" needed = " + needed); str.append(" byteBuffer = " + (byteBuffer == null ? "null" : "not null")); str.append(" fragmented = " + fragmented); return str.toString(); } protected void dprint(String msg) { ORBUtility.dprint("ByteBufferWithInfo", msg); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * HexBinary.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: SNAPSHOT Built on : Dec 21, 2007 (04:03:30 LKT) */ package org.apache.axis2.databinding.types.xsd; /** * HexBinary bean class */ public class HexBinary implements org.apache.axis2.databinding.ADBBean{ /* This type was generated from the piece of schema that had name = hexBinary Namespace URI = http://www.w3.org/2001/XMLSchema Namespace Prefix = ns1 */ private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://www.w3.org/2001/XMLSchema")){ return "xsd"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * field for HexBinary */ protected org.apache.axis2.databinding.types.HexBinary localHexBinary ; /** * Auto generated getter method * @return org.apache.axis2.databinding.types.HexBinary */ public org.apache.axis2.databinding.types.HexBinary getHexBinary(){ return localHexBinary; } /** * Auto generated setter method * @param param HexBinary */ public void setHexBinary(org.apache.axis2.databinding.types.HexBinary param){ this.localHexBinary=param; } public java.lang.String toString(){ return localHexBinary.toString(); } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,parentQName){ public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { HexBinary.this.serialize(parentQName,factory,xmlWriter); } }; return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl( parentQName,factory,dataSource); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,factory,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ java.lang.String prefix = null; java.lang.String namespace = null; prefix = parentQName.getPrefix(); namespace = parentQName.getNamespaceURI(); if ((namespace != null) && (namespace.trim().length() > 0)) { java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, parentQName.getLocalPart()); } else { if (prefix == null) { prefix = generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } else { xmlWriter.writeStartElement(parentQName.getLocalPart()); } if (serializeType){ java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://www.w3.org/2001/XMLSchema"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){ writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", namespacePrefix+":hexBinary", xmlWriter); } else { writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", "hexBinary", xmlWriter); } } if (localHexBinary==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("hexBinary cannot be null!!"); }else{ xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localHexBinary)); } xmlWriter.writeEndElement(); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ java.util.ArrayList elementList = new java.util.ArrayList(); java.util.ArrayList attribList = new java.util.ArrayList(); elementList.add(org.apache.axis2.databinding.utils.reader.ADBXMLStreamReader.ELEMENT_TEXT); if (localHexBinary != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localHexBinary)); } else { throw new org.apache.axis2.databinding.ADBException("hexBinary cannot be null!!"); } return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray()); } /** * Factory class that keeps the parse method */ public static class Factory{ public static HexBinary fromString(java.lang.String value, java.lang.String namespaceURI){ HexBinary returnValue = new HexBinary(); returnValue.setHexBinary( org.apache.axis2.databinding.utils.ConverterUtil.convertToHexBinary(value)); return returnValue; } public static HexBinary fromString(javax.xml.stream.XMLStreamReader xmlStreamReader, java.lang.String content) { if (content.indexOf(":") > -1){ java.lang.String prefix = content.substring(0,content.indexOf(":")); java.lang.String namespaceUri = xmlStreamReader.getNamespaceContext().getNamespaceURI(prefix); return HexBinary.Factory.fromString(content,namespaceUri); } else { return HexBinary.Factory.fromString(content,""); } } /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static HexBinary parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ HexBinary object = new HexBinary(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){ java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName!=null){ java.lang.String nsPrefix = null; if (fullTypeName.indexOf(":") > -1){ nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":")); } nsPrefix = nsPrefix==null?"":nsPrefix; java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1); if (!"hexBinary".equals(type)){ //find namespace for the prefix java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (HexBinary)org.apache.axis2.databinding.types.xsd.ExtensionMapper.getTypeObject( nsUri,type,reader); } } } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); while(!reader.isEndElement()) { if (reader.isStartElement() || reader.hasText()){ if (reader.isStartElement() || reader.hasText()){ java.lang.String content = reader.getElementText(); object.setHexBinary( org.apache.axis2.databinding.utils.ConverterUtil.convertToHexBinary(content)); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } } else { reader.next(); } } // end of while loop } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
/* * Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.test.database.auto; import com.orientechnologies.orient.client.db.ODatabaseHelper; import com.orientechnologies.orient.core.command.OCommandExecutor; import com.orientechnologies.orient.core.command.OCommandRequestText; import com.orientechnologies.orient.core.db.ODatabase; import com.orientechnologies.orient.core.db.ODatabaseListener; import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; import com.orientechnologies.orient.core.hook.ODocumentHookAbstract; import com.orientechnologies.orient.core.hook.ORecordHook; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery; import com.orientechnologies.orient.core.tx.OTransaction.TXTYPE; import com.tinkerpop.blueprints.impls.orient.OrientBaseGraph; import com.tinkerpop.blueprints.impls.orient.OrientGraph; import com.tinkerpop.blueprints.impls.orient.OrientVertex; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Optional; import org.testng.annotations.Parameters; import org.testng.annotations.Test; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; /** * Tests the right calls of all the db's listener API. * * @author Sylvain Spinelli */ public class DbListenerTest extends DocumentDBBaseTest { protected int onAfterTxCommit = 0; protected int onAfterTxRollback = 0; protected int onBeforeTxBegin = 0; protected int onBeforeTxCommit = 0; protected int onBeforeTxRollback = 0; protected int onClose = 0; protected int onCreate = 0; protected int onDelete = 0; protected int onOpen = 0; protected int onCorruption = 0; protected String command; protected Object commandResult; public class DocumentChangeListener { final Map<ODocument, List<String>> changes = new HashMap<ODocument, List<String>>(); public DocumentChangeListener(OrientBaseGraph g) { this(g.getRawGraph()); } public DocumentChangeListener(final ODatabaseDocumentTx db) { db.registerHook(new ODocumentHookAbstract(db) { @Override public ORecordHook.DISTRIBUTED_EXECUTION_MODE getDistributedExecutionMode() { return ORecordHook.DISTRIBUTED_EXECUTION_MODE.SOURCE_NODE; } @Override public void onRecordAfterUpdate(ODocument iDocument) { List<String> changedFields = new ArrayList<String>(); for (String f : iDocument.getDirtyFields()) { changedFields.add(f); final Object oldValue = iDocument.getOriginalValue(f); final Object newValue = iDocument.field(f); // System.out.println("Field " + f + " Old: " + oldValue + " -> " + newValue); } changes.put(iDocument, changedFields); } }); } public Map<ODocument, List<String>> getChanges() { return changes; } } public class DbListener implements ODatabaseListener { @Override public void onAfterTxCommit(ODatabase iDatabase) { onAfterTxCommit++; } @Override public void onAfterTxRollback(ODatabase iDatabase) { onAfterTxRollback++; } @Override public void onBeforeTxBegin(ODatabase iDatabase) { onBeforeTxBegin++; } @Override public void onBeforeTxCommit(ODatabase iDatabase) { onBeforeTxCommit++; } @Override public void onBeforeTxRollback(ODatabase iDatabase) { onBeforeTxRollback++; } @Override public void onClose(ODatabase iDatabase) { onClose++; } @Override public void onBeforeCommand(OCommandRequestText iCommand, OCommandExecutor executor) { command = iCommand.getText(); } @Override public void onAfterCommand(OCommandRequestText iCommand, OCommandExecutor executor, Object result) { commandResult = result; } @Override public void onCreate(ODatabase iDatabase) { onCreate++; } @Override public void onDelete(ODatabase iDatabase) { onDelete++; } @Override public void onOpen(ODatabase iDatabase) { onOpen++; } @Override public boolean onCorruptionRepairDatabase(ODatabase iDatabase, final String iReason, String iWhatWillbeFixed) { onCorruption++; return true; } } @Parameters(value = "url") public DbListenerTest(@Optional String url) { super(url); } @AfterClass @Override public void afterClass() throws Exception { } @BeforeMethod @Override public void beforeMethod() throws Exception { } @AfterMethod @Override public void afterMethod() throws Exception { } @Test public void testEmbeddedDbListeners() throws IOException { if (database.getURL().startsWith("remote:")) return; if (database.exists()) ODatabaseHelper.deleteDatabase(database, getStorageType()); database.registerListener(new DbListener()); int curOnclose = onClose; int curCreate = onCreate; int curDelete = onDelete; ODatabaseHelper.createDatabase(database, url, getStorageType()); Assert.assertEquals(onCreate, curCreate + 1); database.open("admin", "admin"); Assert.assertEquals(onOpen, 1); database.begin(TXTYPE.OPTIMISTIC); Assert.assertEquals(onBeforeTxBegin, 1); database.newInstance().save(); database.commit(); Assert.assertEquals(onBeforeTxCommit, 1); Assert.assertEquals(onAfterTxCommit, 1); database.begin(TXTYPE.OPTIMISTIC); Assert.assertEquals(onBeforeTxBegin, 2); database.newInstance().save(); database.rollback(); Assert.assertEquals(onBeforeTxRollback, 1); Assert.assertEquals(onAfterTxRollback, 1); ODatabaseHelper.deleteDatabase(database, getStorageType()); Assert.assertEquals(onClose, curOnclose + 1); Assert.assertEquals(onDelete, curDelete + 1); ODatabaseHelper.createDatabase(database, url, getStorageType()); } @Test public void testRemoteDbListeners() throws IOException { if (!database.getURL().startsWith("remote:")) return; if (database.exists()) ODatabaseHelper.deleteDatabase(database, getStorageType()); ODatabaseHelper.createDatabase(database, url, getStorageType()); database.registerListener(new DbListener()); database.open("admin", "admin"); Assert.assertEquals(onOpen, 1); database.begin(TXTYPE.OPTIMISTIC); Assert.assertEquals(onBeforeTxBegin, 1); database.newInstance().save(); database.commit(); Assert.assertEquals(onBeforeTxCommit, 1); Assert.assertEquals(onAfterTxCommit, 1); database.begin(TXTYPE.OPTIMISTIC); Assert.assertEquals(onBeforeTxBegin, 2); database.newInstance().save(); database.rollback(); Assert.assertEquals(onBeforeTxRollback, 1); Assert.assertEquals(onAfterTxRollback, 1); database.close(); Assert.assertEquals(onClose, 1); } @Test public void testEmbeddedDbListenersTxRecords() throws IOException { if (database.getURL().startsWith("remote:")) return; if (database.exists()) ODatabaseHelper.deleteDatabase(database, getStorageType()); ODatabaseHelper.createDatabase(database, url, getStorageType()); final AtomicInteger recordedChanges = new AtomicInteger(); database.open("admin", "admin"); database.begin(TXTYPE.OPTIMISTIC); ODocument rec = database.newInstance().field("name", "Jay").save(); database.commit(); final DocumentChangeListener cl = new DocumentChangeListener(database); database.begin(TXTYPE.OPTIMISTIC); rec.field("surname", "Miner").save(); database.commit(); Assert.assertEquals(cl.getChanges().size(), 1); ODatabaseHelper.deleteDatabase(database, getStorageType()); ODatabaseHelper.createDatabase(database, url, getStorageType()); } @Test public void testEmbeddedDbListenersGraph() throws IOException { if (database.getURL().startsWith("remote:")) return; if (database.exists()) ODatabaseHelper.deleteDatabase(database, getStorageType()); ODatabaseHelper.createDatabase(database, url, getStorageType()); database.open("admin", "admin"); OrientGraph g = new OrientGraph(database); OrientVertex v = g.addVertex(null); v.setProperty("name", "Jay"); g.commit(); final DocumentChangeListener cl = new DocumentChangeListener(g); v.setProperty("surname", "Miner"); g.shutdown(); Assert.assertEquals(cl.getChanges().size(), 1); ODatabaseHelper.deleteDatabase(database, getStorageType()); ODatabaseHelper.createDatabase(database, url, getStorageType()); } @Test public void testEmbeddedDbListenersCommands() throws IOException { if (database.getURL().startsWith("remote:")) return; if (database.exists()) ODatabaseHelper.deleteDatabase(database, getStorageType()); ODatabaseHelper.createDatabase(database, url, getStorageType()); final AtomicInteger recordedChanges = new AtomicInteger(); database.open("admin", "admin"); database.registerListener(new DbListener()); String iText = "select from OUser"; Object execute = database.command(new OSQLSynchQuery<Object>(iText)).execute(); Assert.assertEquals(execute, commandResult); Assert.assertEquals(iText, command); ODatabaseHelper.deleteDatabase(database, getStorageType()); ODatabaseHelper.createDatabase(database, url, getStorageType()); } }
package ca.uhn.fhirtest.config; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.config.BaseJavaConfigR4; import ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgres94Dialect; import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer; import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; import ca.uhn.fhir.jpa.util.DerbyTenSevenHapiFhirDialect; import ca.uhn.fhir.jpa.validation.ValidationSettings; import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor; import ca.uhn.fhir.validation.ResultSeverityEnum; import ca.uhn.fhirtest.interceptor.PublicSecurityInterceptor; import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; import org.apache.commons.dbcp2.BasicDataSource; import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings; import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings; import org.hibernate.search.engine.cfg.BackendSettings; import org.hl7.fhir.dstu2.model.Subscription; import org.hl7.fhir.r5.utils.validation.constants.ReferenceValidationPolicy; import org.springframework.beans.factory.annotation.Autowire; import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Lazy; import org.springframework.context.annotation.Primary; import org.springframework.context.support.PropertySourcesPlaceholderConfigurer; import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.transaction.annotation.EnableTransactionManagement; import javax.persistence.EntityManagerFactory; import javax.sql.DataSource; import java.util.Properties; import java.util.concurrent.TimeUnit; @Configuration @Import(CommonConfig.class) @EnableTransactionManagement() public class TestR4Config extends BaseJavaConfigR4 { public static final String FHIR_DB_USERNAME = "${fhir.db.username}"; public static final String FHIR_DB_PASSWORD = "${fhir.db.password}"; public static final String FHIR_LUCENE_LOCATION_R4 = "${fhir.lucene.location.r4}"; public static final Integer COUNT_SEARCH_RESULTS_UP_TO = 50000; @Value(TestR4Config.FHIR_DB_USERNAME) private String myDbUsername; @Value(TestR4Config.FHIR_DB_PASSWORD) private String myDbPassword; @Value(FHIR_LUCENE_LOCATION_R4) private String myFhirLuceneLocation; @Bean public DaoConfig daoConfig() { DaoConfig retVal = new DaoConfig(); retVal.addSupportedSubscriptionType(Subscription.SubscriptionChannelType.EMAIL); retVal.addSupportedSubscriptionType(Subscription.SubscriptionChannelType.RESTHOOK); retVal.addSupportedSubscriptionType(Subscription.SubscriptionChannelType.WEBSOCKET); retVal.setWebsocketContextPath("/websocketR4"); retVal.setAllowContainsSearches(true); retVal.setAllowMultipleDelete(true); retVal.setAllowInlineMatchUrlReferences(false); retVal.setAllowExternalReferences(true); retVal.getTreatBaseUrlsAsLocal().add("http://hapi.fhir.org/baseR4"); retVal.getTreatBaseUrlsAsLocal().add("https://hapi.fhir.org/baseR4"); retVal.getTreatBaseUrlsAsLocal().add("http://fhirtest.uhn.ca/baseR4"); retVal.getTreatBaseUrlsAsLocal().add("https://fhirtest.uhn.ca/baseR4"); retVal.setIndexMissingFields(DaoConfig.IndexEnabledEnum.ENABLED); retVal.setCountSearchResultsUpTo(TestR4Config.COUNT_SEARCH_RESULTS_UP_TO); retVal.setFetchSizeDefaultMaximum(10000); retVal.setExpungeEnabled(true); retVal.setFilterParameterEnabled(true); retVal.setDefaultSearchParamsCanBeOverridden(false); retVal.getModelConfig().setIndexOnContainedResources(true); return retVal; } @Bean public ModelConfig modelConfig() { return daoConfig().getModelConfig(); } @Override @Bean public ValidationSettings validationSettings() { ValidationSettings retVal = super.validationSettings(); retVal.setLocalReferenceValidationDefaultPolicy(ReferenceValidationPolicy.CHECK_VALID); return retVal; } @Bean(name = "myPersistenceDataSourceR4", destroyMethod = "close") public DataSource dataSource() { BasicDataSource retVal = new BasicDataSource(); if (CommonConfig.isLocalTestMode()) { retVal.setUrl("jdbc:derby:memory:fhirtest_r4;create=true"); } else { retVal.setDriver(new org.postgresql.Driver()); retVal.setUrl("jdbc:postgresql://localhost/fhirtest_r4"); } retVal.setUsername(myDbUsername); retVal.setPassword(myDbPassword); retVal.setDefaultQueryTimeout(20); retVal.setTestOnBorrow(true); DataSource dataSource = ProxyDataSourceBuilder .create(retVal) // .logQueryBySlf4j(SLF4JLogLevel.INFO, "SQL") .logSlowQueryBySlf4j(10000, TimeUnit.MILLISECONDS) .afterQuery(new CurrentThreadCaptureQueriesListener()) .countQuery() .build(); return dataSource; } @Override @Bean(autowire = Autowire.BY_TYPE) public DatabaseBackedPagingProvider databaseBackedPagingProvider() { DatabaseBackedPagingProvider retVal = super.databaseBackedPagingProvider(); retVal.setDefaultPageSize(20); retVal.setMaximumPageSize(500); return retVal; } @Override @Bean public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) { LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory); retVal.setPersistenceUnitName("PU_HapiFhirJpaR4"); retVal.setDataSource(dataSource()); retVal.setJpaProperties(jpaProperties()); return retVal; } private Properties jpaProperties() { Properties extraProperties = new Properties(); if (CommonConfig.isLocalTestMode()) { extraProperties.put("hibernate.dialect", DerbyTenSevenHapiFhirDialect.class.getName()); } else { extraProperties.put("hibernate.dialect", HapiFhirPostgres94Dialect.class.getName()); } extraProperties.put("hibernate.format_sql", "false"); extraProperties.put("hibernate.show_sql", "false"); extraProperties.put("hibernate.hbm2ddl.auto", "update"); extraProperties.put("hibernate.jdbc.batch_size", "20"); extraProperties.put("hibernate.cache.use_query_cache", "false"); extraProperties.put("hibernate.cache.use_second_level_cache", "false"); extraProperties.put("hibernate.cache.use_structured_entries", "false"); extraProperties.put("hibernate.cache.use_minimal_puts", "false"); extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene"); extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName()); extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-filesystem"); extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_ROOT), myFhirLuceneLocation); extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT"); return extraProperties; } /** * Bean which validates incoming requests */ @Bean @Lazy public RequestValidatingInterceptor requestValidatingInterceptor() { RequestValidatingInterceptor requestValidator = new RequestValidatingInterceptor(); requestValidator.setFailOnSeverity(null); requestValidator.setAddResponseHeaderOnSeverity(null); requestValidator.setAddResponseOutcomeHeaderOnSeverity(ResultSeverityEnum.INFORMATION); requestValidator.addValidatorModule(instanceValidator()); requestValidator.setIgnoreValidatorExceptions(true); return requestValidator; } @Bean public PublicSecurityInterceptor securityInterceptor() { return new PublicSecurityInterceptor(); } @Bean @Primary public JpaTransactionManager hapiTransactionManager(EntityManagerFactory entityManagerFactory) { JpaTransactionManager retVal = new JpaTransactionManager(); retVal.setEntityManagerFactory(entityManagerFactory); return retVal; } /** * This lets the "@Value" fields reference properties from the properties file */ @Bean public static PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer() { return new PropertySourcesPlaceholderConfigurer(); } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.ui.impl; import com.intellij.ide.DataManager; import com.intellij.ide.impl.TypeSafeDataProviderAdapter; import com.intellij.ide.ui.UISettings; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.actionSystem.DataProvider; import com.intellij.openapi.actionSystem.TypeSafeDataProvider; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.DialogWrapperDialog; import com.intellij.openapi.ui.DialogWrapperPeer; import com.intellij.openapi.ui.popup.StackingPopupDispatcher; import com.intellij.openapi.util.ActionCallback; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.wm.IdeFrame; import com.intellij.openapi.wm.WindowManager; import com.intellij.openapi.wm.ex.WindowManagerEx; import com.intellij.openapi.wm.impl.IdeFrameImpl; import com.intellij.openapi.wm.impl.IdeGlassPaneEx; import com.intellij.ui.FocusTrackback; import com.intellij.ui.ScreenUtil; import com.intellij.ui.components.JBLayeredPane; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.*; import java.awt.image.BufferedImage; import java.lang.ref.WeakReference; /** * @author spleaner */ public class GlassPaneDialogWrapperPeer extends DialogWrapperPeer implements FocusTrackbackProvider { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.ui.impl.GlassPaneDialogWrapperPeer"); private DialogWrapper myWrapper; private WindowManagerEx myWindowManager; private Project myProject; private MyDialog myDialog; private boolean myCanBeParent; private String myTitle; public GlassPaneDialogWrapperPeer(DialogWrapper wrapper, Project project, boolean canBeParent) throws GlasspanePeerUnavailableException { myWrapper = wrapper; myCanBeParent = canBeParent; myWindowManager = null; Application application = ApplicationManager.getApplication(); if (application != null && application.hasComponent(WindowManager.class)) { myWindowManager = (WindowManagerEx) WindowManager.getInstance(); } Window window = null; if (myWindowManager != null) { if (project == null) { project = CommonDataKeys.PROJECT.getData(DataManager.getInstance().getDataContext()); } myProject = project; window = myWindowManager.suggestParentWindow(project); if (window == null) { Window focusedWindow = myWindowManager.getMostRecentFocusedWindow(); if (focusedWindow instanceof IdeFrameImpl) { window = focusedWindow; } } } Window owner; if (window != null) { owner = window; } else { owner = JOptionPane.getRootFrame(); } createDialog(owner); } public GlassPaneDialogWrapperPeer(DialogWrapper wrapper, boolean canBeParent) throws GlasspanePeerUnavailableException { this(wrapper, (Project)null, canBeParent); } public GlassPaneDialogWrapperPeer(DialogWrapper wrapper, @NotNull Component parent, boolean canBeParent) throws GlasspanePeerUnavailableException { myWrapper = wrapper; myCanBeParent = canBeParent; if (!parent.isShowing() && parent != JOptionPane.getRootFrame()) { throw new IllegalArgumentException("parent must be showing: " + parent); } myWindowManager = null; Application application = ApplicationManager.getApplication(); if (application != null && application.hasComponent(WindowManager.class)) { myWindowManager = (WindowManagerEx) WindowManager.getInstance(); } Window owner = parent instanceof Window ? (Window) parent : (Window) SwingUtilities.getAncestorOfClass(Window.class, parent); if (!(owner instanceof Dialog) && !(owner instanceof Frame)) { owner = JOptionPane.getRootFrame(); } createDialog(owner); } private void createDialog(final Window owner) throws GlasspanePeerUnavailableException { Window active = KeyboardFocusManager.getCurrentKeyboardFocusManager().getActiveWindow(); if (!(active instanceof JDialog) && owner instanceof IdeFrame) { Component glassPane; // Not all successor of IdeFrame are frames if (owner instanceof JFrame) { glassPane = ((JFrame)owner).getGlassPane(); } else if (owner instanceof JDialog) { glassPane = ((JDialog)owner).getGlassPane(); } else { throw new IllegalStateException("Cannot find glass pane for " + owner.getClass().getName()); } assert glassPane instanceof IdeGlassPaneEx : "GlassPane should be instance of IdeGlassPane!"; myDialog = new MyDialog((IdeGlassPaneEx) glassPane, myWrapper, myProject); } else { throw new GlasspanePeerUnavailableException(); } } @Override public FocusTrackback getFocusTrackback() { if (myDialog != null) { return myDialog.getFocusTrackback(); } return null; } @Override public void setUndecorated(final boolean undecorated) { LOG.assertTrue(undecorated, "Decorated dialogs are not supported!"); } @Override public void addMouseListener(final MouseListener listener) { throw new UnsupportedOperationException("Not implemented in " + getClass().getCanonicalName()); } @Override public void addMouseListener(final MouseMotionListener listener) { throw new UnsupportedOperationException("Not implemented in " + getClass().getCanonicalName()); } @Override public void addKeyListener(final KeyListener listener) { throw new UnsupportedOperationException("Not implemented in " + getClass().getCanonicalName()); } @Override public void toFront() { throw new UnsupportedOperationException("Not implemented in " + getClass().getCanonicalName()); } @Override public void toBack() { throw new UnsupportedOperationException("Not implemented in " + getClass().getCanonicalName()); } @Override public void dispose() { LOG.assertTrue(EventQueue.isDispatchThread(), "Access is allowed from event dispatch thread only"); if (myDialog != null) { Disposer.dispose(myDialog); myDialog = null; myProject = null; myWindowManager = null; } } @Override public Container getContentPane() { return myDialog.getContentPane(); } @Override public Window getOwner() { throw new UnsupportedOperationException("Not implemented in " + getClass().getCanonicalName()); } @Override public Window getWindow() { return null; } @Override public JRootPane getRootPane() { if (myDialog == null) { return null; } return myDialog.getRootPane(); } @Override public Dimension getSize() { return myDialog.getSize(); } @Override public String getTitle() { return ""; } @Override public Dimension getPreferredSize() { return myDialog.getPreferredSize(); } @Override public void setModal(final boolean modal) { LOG.assertTrue(modal, "Can't be non modal!"); } @Override public boolean isModal() { return true; } @Override public boolean isVisible() { return myDialog != null && myDialog.isVisible(); } @Override public boolean isShowing() { return myDialog != null && myDialog.isShowing(); } @Override public void setSize(final int width, final int height) { myDialog.setSize(width, height); } @Override public void setTitle(final String title) { myTitle = title; } // TODO: WTF?! VOID?!!! @Override public void isResizable() { } @Override public void setResizable(final boolean resizable) { throw new UnsupportedOperationException("Not implemented in " + getClass().getCanonicalName()); } @NotNull @Override public Point getLocation() { return myDialog.getLocation(); } @Override public void setLocation(@NotNull final Point p) { setLocation(p.x, p.y); } @Override public void setLocation(final int x, final int y) { if (myDialog == null || !myDialog.isShowing()) { return; } final Point _p = new Point(x, y); final JRootPane pane = SwingUtilities.getRootPane(myDialog); SwingUtilities.convertPointFromScreen(_p, pane); final Insets insets = myDialog.getInsets(); // todo: fix coords to include shadow (border) paddings // todo: reimplement dragging in every client to calculate window position properly int _x = _p.x - insets.left; int _y = _p.y - insets.top; final Container container = myDialog.getTransparentPane(); _x = _x > 0 ? (_x + myDialog.getWidth() < container.getWidth() ? _x : container.getWidth() - myDialog.getWidth()) : 0; _y = _y > 0 ? (_y + myDialog.getHeight() < container.getHeight() ? _y : container.getHeight() - myDialog.getHeight()) : 0; myDialog.setLocation(_x, _y); } @Override public ActionCallback show() { LOG.assertTrue(EventQueue.isDispatchThread(), "Access is allowed from event dispatch thread only"); hidePopupsIfNeeded(); myDialog.setVisible(true); return ActionCallback.DONE; } @Override public void setContentPane(final JComponent content) { myDialog.setContentPane(content); } @Override public void centerInParent() { if (myDialog != null) { myDialog.center(); } } @Override public void validate() { if (myDialog != null) { myDialog.resetSizeCache(); myDialog.invalidate(); } } @Override public void repaint() { if (myDialog != null) { myDialog.repaint(); } } @Override public void pack() { } @Override public void setAppIcons() { throw new UnsupportedOperationException("Not implemented in " + getClass().getCanonicalName()); } @Override public boolean isHeadless() { return DialogWrapperPeerImpl.isHeadlessEnv(); } //[kirillk] for now it only deals with the TaskWindow under Mac OS X: modal dialogs are shown behind JBPopup //hopefully this whole code will go away private void hidePopupsIfNeeded() { if (!SystemInfo.isMac) return; StackingPopupDispatcher.getInstance().hidePersistentPopups(); Disposer.register(myDialog, new Disposable() { @Override public void dispose() { StackingPopupDispatcher.getInstance().restorePersistentPopups(); } }); } private static class MyDialog extends JPanel implements Disposable, DialogWrapperDialog, DataProvider, FocusTrackback.Provider { private final WeakReference<DialogWrapper> myDialogWrapper; private final IdeGlassPaneEx myPane; private JComponent myContentPane; private MyRootPane myRootPane; private BufferedImage shadow; private final JLayeredPane myTransparentPane; private JButton myDefaultButton; private Dimension myShadowSize = null; private final Container myWrapperPane; private Component myPreviouslyFocusedComponent; private Dimension myCachedSize = null; private MyDialog(IdeGlassPaneEx pane, DialogWrapper wrapper, Project project) { setLayout(new BorderLayout()); setOpaque(false); setBorder(BorderFactory.createEmptyBorder(ShadowBorderPainter.TOP_SIZE, ShadowBorderPainter.SIDE_SIZE, ShadowBorderPainter.BOTTOM_SIZE, ShadowBorderPainter.SIDE_SIZE)); myPane = pane; myDialogWrapper = new WeakReference<DialogWrapper>(wrapper); // myProject = new WeakReference<Project>(project); myRootPane = new MyRootPane(this); // be careful with DialogWrapper.dispose()! Disposer.register(this, myRootPane); myContentPane = new JPanel(); myContentPane.setOpaque(true); add(myContentPane, BorderLayout.CENTER); myTransparentPane = createTransparentPane(); myWrapperPane = createWrapperPane(); myWrapperPane.add(this); setFocusCycleRoot(true); } public void resetSizeCache() { myCachedSize = null; } private Container createWrapperPane() { final JPanel result = new JPanel() { @Override public void doLayout() { synchronized (getTreeLock()) { final Container container = getParent(); if (container != null) { final Component[] components = getComponents(); LOG.assertTrue(components.length == 1); for (Component c : components) { Point location; if (myCachedSize == null) { myCachedSize = c.getPreferredSize(); location = getLocationInCenter(myCachedSize, c.getLocation()); } else { location = c.getLocation(); } final double _width = myCachedSize.getWidth(); final double _height = myCachedSize.getHeight(); final DialogWrapper dialogWrapper = myDialogWrapper.get(); if (dialogWrapper != null) { final int width = (int) (_width * dialogWrapper.getHorizontalStretch()); final int height = (int) (_height * dialogWrapper.getVerticalStretch()); c.setBounds((int) location.getX(), (int) location.getY(), width, height); } else { c.setBounds((int) location.getX(), (int) location.getY(), (int) _width, (int) _height); } } } } super.doLayout(); } }; result.setLayout(null); result.setOpaque(false); // to not pass events through transparent pane result.addMouseListener(new MouseAdapter() { }); result.addMouseMotionListener(new MouseMotionAdapter() { }); return result; } public Container getTransparentPane() { return myTransparentPane; } private TransparentLayeredPane getExistingTransparentPane() { for (int i = 0; i < myPane.getComponentCount(); i++) { Component c = myPane.getComponent(i); if (c instanceof TransparentLayeredPane) { return (TransparentLayeredPane) c; } } return null; } private boolean isTransparentPaneExist() { for (int i = 0; i < myPane.getComponentCount(); i++) { Component c = myPane.getComponent(i); if (c instanceof TransparentLayeredPane) { return true; } } return false; } @Override public void setVisible(final boolean show) { if (show) { if (!isTransparentPaneExist()) { myPane.add(myTransparentPane); } else { myPreviouslyFocusedComponent = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner(); } myTransparentPane.add(myWrapperPane); myTransparentPane.setLayer(myWrapperPane, myTransparentPane.getComponentCount() - 1); if (!myTransparentPane.isVisible()) { myTransparentPane.setVisible(true); } } super.setVisible(show); if (show) { myTransparentPane.revalidate(); myTransparentPane.repaint(); } else { myTransparentPane.remove(myWrapperPane); myTransparentPane.revalidate(); myTransparentPane.repaint(); if (myPreviouslyFocusedComponent != null) { myPreviouslyFocusedComponent.requestFocus(); myPreviouslyFocusedComponent = null; } if (myTransparentPane.getComponentCount() == 0) { myTransparentPane.setVisible(false); myPane.remove(myTransparentPane); } } } @Override public void paint(final Graphics g) { UISettings.setupAntialiasing(g); super.paint(g); } private JLayeredPane createTransparentPane() { JLayeredPane pane = getExistingTransparentPane(); if (pane == null) { pane = new TransparentLayeredPane(); } return pane; } @Override protected void paintComponent(final Graphics g) { final Graphics2D g2 = (Graphics2D) g; if (shadow != null) { UIUtil.drawImage(g2, shadow, 0, 0, null); } super.paintComponent(g); } @Override public void setBounds(final int x, final int y, final int width, final int height) { super.setBounds(x, y, width, height); if (myShadowSize == null || !myShadowSize.equals(getSize())) { createShadow(); myShadowSize = getSize(); } } @Override public void setLocation(final int x, final int y) { final Container p = myTransparentPane; if (p != null) { final Dimension s = p.getSize(); final int _x = (int) (x + getWidth() > s.getWidth() ? s.getWidth() - getWidth() : x); final int _y = (int) (y + getHeight() > s.getHeight() ? s.getHeight() - getHeight() : y); super.setLocation(_x, _y); } else { super.setLocation(x, y); } } private void createShadow() { if (!UISettings.isRemoteDesktopConnected() && !JBUI.isHiDPI()) { shadow = ShadowBorderPainter.createShadow(this, getWidth(), getHeight()); } } @Override public void dispose() { remove(getContentPane()); repaint(); final Runnable disposer = new Runnable() { @Override public void run() { setVisible(false); } }; if (EventQueue.isDispatchThread()) { disposer.run(); } else { //noinspection SSBasedInspection SwingUtilities.invokeLater(disposer); } myRootPane = null; } public void setContentPane(JComponent content) { if (myContentPane != null) { remove(myContentPane); myContentPane = null; } myContentPane = content; myContentPane.setOpaque(true); // should be opaque add(myContentPane, BorderLayout.CENTER); } public JComponent getContentPane() { return myContentPane; } @Override public JRootPane getRootPane() { return myRootPane; } @Override public DialogWrapper getDialogWrapper() { return myDialogWrapper.get(); } @Override public Object getData(@NonNls final String dataId) { final DialogWrapper wrapper = myDialogWrapper.get(); if (wrapper instanceof DataProvider) { return ((DataProvider) wrapper).getData(dataId); } else if (wrapper instanceof TypeSafeDataProvider) { TypeSafeDataProviderAdapter adapter = new TypeSafeDataProviderAdapter((TypeSafeDataProvider) wrapper); return adapter.getData(dataId); } return null; } @Override public void setSize(int width, int height) { Point location = getLocation(); Rectangle rect = new Rectangle(location.x, location.y, width, height); ScreenUtil.fitToScreen(rect); if (location.x != rect.x || location.y != rect.y) { setLocation(rect.x, rect.y); } super.setSize(rect.width, rect.height); } @Override public FocusTrackback getFocusTrackback() { return null; } @Nullable private Point getLocationInCenter(Dimension size, @Nullable Point _default) { if (myTransparentPane != null) { final Dimension d = myTransparentPane.getSize(); return new Point((d.width - size.width) / 2, (d.height - size.height) / 2); } return _default; } public void center() { final Point location = getLocationInCenter(getSize(), null); if (location != null) { setLocation(location); repaint(); } } public void setDefaultButton(final JButton defaultButton) { //((JComponent)myPane).getRootPane().setDefaultButton(defaultButton); myDefaultButton = defaultButton; } } private static class MyRootPane extends JRootPane implements Disposable { private MyDialog myDialog; private MyRootPane(final MyDialog dialog) { myDialog = dialog; } @Override protected JLayeredPane createLayeredPane() { JLayeredPane p = new JBLayeredPane(); p.setName(this.getName()+".layeredPane"); return p; } @Override public void dispose() { myDialog = null; } @Override public void registerKeyboardAction(final ActionListener anAction, final String aCommand, final KeyStroke aKeyStroke, final int aCondition) { myDialog.registerKeyboardAction(anAction, aCommand, aKeyStroke, aCondition); } @Override public void unregisterKeyboardAction(final KeyStroke aKeyStroke) { myDialog.unregisterKeyboardAction(aKeyStroke); } @Override public void setDefaultButton(final JButton defaultButton) { myDialog.setDefaultButton(defaultButton); } } public static class GlasspanePeerUnavailableException extends Exception { } public static class TransparentLayeredPane extends JBLayeredPane { private TransparentLayeredPane() { setLayout(new BorderLayout()); setOpaque(false); // to not pass events through transparent pane addMouseListener(new MouseAdapter() { }); addMouseMotionListener(new MouseMotionAdapter() { }); } @Override public void addNotify() { final Container container = getParent(); if (container != null) { setBounds(0, 0, container.getWidth(), container.getHeight()); } super.addNotify(); } @Override public boolean isOptimizedDrawingEnabled() { return getComponentCount() <= 1; } } }
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.test.ha; import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import java.io.File; import java.io.IOException; import java.util.logging.Level; import org.neo4j.cluster.ClusterSettings; import org.neo4j.cluster.client.Clusters; import org.neo4j.graphdb.DependencyResolver; import org.neo4j.graphdb.GraphDatabaseService; import org.neo4j.graphdb.Node; import org.neo4j.graphdb.Transaction; import org.neo4j.graphdb.TransactionTerminatedException; import org.neo4j.graphdb.TransientTransactionFailureException; import org.neo4j.graphdb.factory.TestHighlyAvailableGraphDatabaseFactory; import org.neo4j.helpers.collection.MapUtil; import org.neo4j.io.fs.DefaultFileSystemAbstraction; import org.neo4j.io.fs.FileUtils; import org.neo4j.io.pagecache.PageCache; import org.neo4j.kernel.api.exceptions.Status; import org.neo4j.kernel.ha.HaSettings; import org.neo4j.kernel.ha.HighlyAvailableGraphDatabase; import org.neo4j.kernel.impl.ha.ClusterManager; import org.neo4j.kernel.impl.store.MetaDataStore; import org.neo4j.kernel.impl.store.TransactionId; import org.neo4j.kernel.impl.storemigration.LogFiles; import org.neo4j.kernel.impl.transaction.log.TransactionIdStore; import org.neo4j.test.LoggerRule; import org.neo4j.test.TargetDirectory; import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.neo4j.helpers.Exceptions.rootCause; import static org.neo4j.helpers.collection.MapUtil.stringMap; import static org.neo4j.kernel.impl.ha.ClusterManager.allSeesAllAsAvailable; import static org.neo4j.kernel.impl.ha.ClusterManager.clusterOfSize; import static org.neo4j.kernel.impl.ha.ClusterManager.clusterWithAdditionalArbiters; import static org.neo4j.kernel.impl.ha.ClusterManager.masterAvailable; import static org.neo4j.kernel.impl.ha.ClusterManager.masterSeesSlavesAsAvailable; import static org.neo4j.kernel.impl.ha.ClusterManager.provided; import static org.neo4j.kernel.impl.pagecache.StandalonePageCacheFactory.createPageCache; import static org.neo4j.kernel.impl.store.MetaDataStore.Position.LAST_TRANSACTION_COMMIT_TIMESTAMP; public class ClusterTest { @Rule public LoggerRule logging = new LoggerRule( Level.OFF ); @Rule public TargetDirectory.TestDirectory testDirectory = TargetDirectory.testDirForTest( getClass() ); @Test public void testCluster() throws Throwable { ClusterManager clusterManager = new ClusterManager.Builder( testDirectory.directory( "testCluster" ) ) .withSharedConfig( MapUtil.stringMap( HaSettings.ha_server.name(), "localhost:6001-6005", HaSettings.tx_push_factor.name(), "2" ) ) .withProvider( clusterOfSize( 3 ) ) .build(); try { clusterManager.start(); clusterManager.getDefaultCluster().await( allSeesAllAsAvailable() ); long nodeId; HighlyAvailableGraphDatabase master = clusterManager.getDefaultCluster().getMaster(); try ( Transaction tx = master.beginTx() ) { Node node = master.createNode(); nodeId = node.getId(); node.setProperty( "foo", "bar" ); tx.success(); } HighlyAvailableGraphDatabase slave = clusterManager.getDefaultCluster().getAnySlave(); try ( Transaction transaction = slave.beginTx() ) { Node node = slave.getNodeById( nodeId ); assertThat( node.getProperty( "foo" ).toString(), CoreMatchers.equalTo( "bar" ) ); } } finally { clusterManager.safeShutdown(); } } @Test public void testClusterWithHostnames() throws Throwable { Clusters.Cluster cluster = new Clusters.Cluster( "neo4j.ha" ); for ( int i = 0; i < 3; i++ ) { cluster.getMembers().add( new Clusters.Member( "localhost:" + (5001 + i), true ) ); } final Clusters clusters = new Clusters(); clusters.getClusters().add( cluster ); ClusterManager clusterManager = new ClusterManager.Builder( testDirectory.directory( "testCluster" ) ) .withProvider( provided( clusters ) ) .withSharedConfig( stringMap( HaSettings.ha_server.name(), "localhost:6001-6005", HaSettings.tx_push_factor.name(), "2" ) ).build(); try { clusterManager.start(); clusterManager.getDefaultCluster().await( allSeesAllAsAvailable() ); long nodeId; HighlyAvailableGraphDatabase master = clusterManager.getDefaultCluster().getMaster(); try ( Transaction tx = master.beginTx() ) { Node node = master.createNode(); nodeId = node.getId(); node.setProperty( "foo", "bar" ); tx.success(); } HighlyAvailableGraphDatabase anySlave = clusterManager.getDefaultCluster().getAnySlave(); try ( Transaction ignore = anySlave.beginTx() ) { Node node = anySlave.getNodeById( nodeId ); assertThat( node.getProperty( "foo" ).toString(), CoreMatchers.equalTo( "bar" ) ); } } finally { clusterManager.safeShutdown(); } } @Test public void testClusterWithWildcardIP() throws Throwable { Clusters.Cluster cluster = new Clusters.Cluster( "neo4j.ha" ); for ( int i = 0; i < 3; i++ ) { cluster.getMembers().add( new Clusters.Member( (5001 + i), true ) ); } final Clusters clusters = new Clusters(); clusters.getClusters().add( cluster ); ClusterManager clusterManager = new ClusterManager.Builder( testDirectory.directory( "testCluster" ) ) .withProvider( provided( clusters ) ) .withSharedConfig( stringMap( HaSettings.ha_server.name(), "0.0.0.0:6001-6005", HaSettings.tx_push_factor.name(), "2" ) ).build(); try { clusterManager.start(); clusterManager.getDefaultCluster().await( allSeesAllAsAvailable() ); long nodeId; HighlyAvailableGraphDatabase master = clusterManager.getDefaultCluster().getMaster(); try ( Transaction tx = master.beginTx() ) { Node node = master.createNode(); nodeId = node.getId(); node.setProperty( "foo", "bar" ); tx.success(); } HighlyAvailableGraphDatabase anySlave = clusterManager.getDefaultCluster().getAnySlave(); try ( Transaction ignore = anySlave.beginTx() ) { Node node = anySlave.getNodeById( nodeId ); assertThat( node.getProperty( "foo" ).toString(), CoreMatchers.equalTo( "bar" ) ); } } finally { clusterManager.safeShutdown(); } } @Test public void testInstancesWithConflictingClusterPorts() throws Throwable { HighlyAvailableGraphDatabase first = null; try { String masterStoreDir = testDirectory.directory( "testConflictingClusterPortsMaster" ).getAbsolutePath(); first = (HighlyAvailableGraphDatabase) new TestHighlyAvailableGraphDatabaseFactory(). newHighlyAvailableDatabaseBuilder( masterStoreDir ) .setConfig( ClusterSettings.initial_hosts, "127.0.0.1:5001" ) .setConfig( ClusterSettings.cluster_server, "127.0.0.1:5001" ) .setConfig( ClusterSettings.server_id, "1" ) .setConfig( HaSettings.ha_server, "127.0.0.1:6666" ) .newGraphDatabase(); try { String slaveStoreDir = testDirectory.directory( "testConflictingClusterPortsSlave" ).getAbsolutePath(); HighlyAvailableGraphDatabase failed = (HighlyAvailableGraphDatabase) new TestHighlyAvailableGraphDatabaseFactory(). newHighlyAvailableDatabaseBuilder( slaveStoreDir ) .setConfig( ClusterSettings.initial_hosts, "127.0.0.1:5001" ) .setConfig( ClusterSettings.cluster_server, "127.0.0.1:5001" ) .setConfig( ClusterSettings.server_id, "2" ) .setConfig( HaSettings.ha_server, "127.0.0.1:6667" ) .newGraphDatabase(); failed.shutdown(); fail("Should not start when ports conflict"); } catch ( Exception e ) { // good } } finally { if ( first != null ) { first.shutdown(); } } } @Test public void testInstancesWithConflictingHaPorts() throws Throwable { HighlyAvailableGraphDatabase first = null; try { String storeDir = testDirectory.directory( "testConflictingHaPorts" ).getAbsolutePath(); first = (HighlyAvailableGraphDatabase) new TestHighlyAvailableGraphDatabaseFactory(). newHighlyAvailableDatabaseBuilder( storeDir ) .setConfig( ClusterSettings.initial_hosts, "127.0.0.1:5001" ) .setConfig( ClusterSettings.cluster_server, "127.0.0.1:5001" ) .setConfig( ClusterSettings.server_id, "1" ) .setConfig( HaSettings.ha_server, "127.0.0.1:6666" ) .newGraphDatabase(); try { HighlyAvailableGraphDatabase failed = (HighlyAvailableGraphDatabase) new TestHighlyAvailableGraphDatabaseFactory(). newHighlyAvailableDatabaseBuilder( storeDir ) .setConfig( ClusterSettings.initial_hosts, "127.0.0.1:5001" ) .setConfig( ClusterSettings.cluster_server, "127.0.0.1:5002" ) .setConfig( ClusterSettings.server_id, "2" ) .setConfig( HaSettings.ha_server, "127.0.0.1:6666" ) .newGraphDatabase(); failed.shutdown(); fail( "Should not start when ports conflict" ); } catch ( Exception e ) { // good } } finally { if ( first != null ) { first.shutdown(); } } } @Test public void given4instanceClusterWhenMasterGoesDownThenElectNewMaster() throws Throwable { ClusterManager clusterManager = new ClusterManager.Builder( testDirectory.directory( "4instances" ) ) .withProvider( ClusterManager.clusterOfSize( 4 ) ).build(); try { clusterManager.start(); ClusterManager.ManagedCluster cluster = clusterManager.getDefaultCluster(); cluster.await( allSeesAllAsAvailable() ); logging.getLogger().info( "STOPPING MASTER" ); cluster.shutdown( cluster.getMaster() ); logging.getLogger().info( "STOPPED MASTER" ); cluster.await( ClusterManager.masterAvailable() ); GraphDatabaseService master = cluster.getMaster(); logging.getLogger().info( "CREATE NODE" ); try ( Transaction tx = master.beginTx() ) { master.createNode(); logging.getLogger().info( "CREATED NODE" ); tx.success(); } logging.getLogger().info( "STOPPING CLUSTER" ); } finally { clusterManager.safeShutdown(); } } @Test public void givenEmptyHostListWhenClusterStartupThenFormClusterWithSingleInstance() throws Exception { HighlyAvailableGraphDatabase db = (HighlyAvailableGraphDatabase) new TestHighlyAvailableGraphDatabaseFactory(). newHighlyAvailableDatabaseBuilder( testDirectory.directory( "singleinstance" ).getAbsolutePath() ). setConfig( ClusterSettings.server_id, "1" ). setConfig( ClusterSettings.initial_hosts, "" ). newGraphDatabase(); try { assertTrue( "Single instance cluster was not formed in time", db.isAvailable( 1_000 ) ); } finally { db.shutdown(); } } @Test public void givenClusterWhenMasterGoesDownAndTxIsRunningThenDontWaitToSwitch() throws Throwable { ClusterManager clusterManager = new ClusterManager.Builder( testDirectory.directory( "waitfortx" ) ) .withProvider( ClusterManager.clusterOfSize( 3 ) ).build(); try { clusterManager.start(); ClusterManager.ManagedCluster cluster = clusterManager.getDefaultCluster(); cluster.await( allSeesAllAsAvailable() ); HighlyAvailableGraphDatabase slave = cluster.getAnySlave(); Transaction tx = slave.beginTx(); // Do a little write operation so that all "write" aspects of this tx is initializes properly slave.createNode(); // Shut down master while we're keeping this transaction open cluster.shutdown( cluster.getMaster() ); cluster.await( masterAvailable() ); cluster.await( masterSeesSlavesAsAvailable( 1 ) ); // Ending up here means that we didn't wait for this transaction to complete tx.success(); try { tx.close(); fail( "Exception expected" ); } catch ( Exception e ) { assertThat( e, instanceOf( TransientTransactionFailureException.class ) ); Throwable rootCause = rootCause( e ); assertThat( rootCause, instanceOf( TransactionTerminatedException.class ) ); assertThat( ((TransactionTerminatedException)rootCause).status(), Matchers.<Status>equalTo( Status.General.DatabaseUnavailable ) ); } } finally { clusterManager.stop(); } } @Test public void lastTxCommitTimestampShouldGetInitializedOnSlaveIfNotPresent() throws Throwable { ClusterManager clusterManager = new ClusterManager.Builder( testDirectory.directory( "lastTxTimestamp" ) ) .withProvider( ClusterManager.clusterOfSize( 3 ) ).build(); try { clusterManager.start(); ClusterManager.ManagedCluster cluster = clusterManager.getDefaultCluster(); cluster.await( allSeesAllAsAvailable() ); runSomeTransactions( cluster.getMaster() ); cluster.sync(); HighlyAvailableGraphDatabase slave = cluster.getAnySlave(); File storeDir = new File( slave.getStoreDir() ); ClusterManager.RepairKit slaveRepairKit = cluster.shutdown( slave ); clearLastTransactionCommitTimestampField( storeDir ); HighlyAvailableGraphDatabase repairedSlave = slaveRepairKit.repair(); cluster.await( allSeesAllAsAvailable() ); assertEquals( lastCommittedTxTimestamp( cluster.getMaster() ), lastCommittedTxTimestamp( repairedSlave ) ); } finally { clusterManager.stop(); } } @Test public void lastTxCommitTimestampShouldBeUnknownAfterStartIfNoFiledOrLogsPresent() throws Throwable { ClusterManager clusterManager = new ClusterManager.Builder( testDirectory.directory( "lastTxTimestamp" ) ) .withProvider( ClusterManager.clusterOfSize( 3 ) ).build(); try { clusterManager.start(); ClusterManager.ManagedCluster cluster = clusterManager.getDefaultCluster(); cluster.await( allSeesAllAsAvailable() ); runSomeTransactions( cluster.getMaster() ); cluster.sync(); HighlyAvailableGraphDatabase slave = cluster.getAnySlave(); File storeDir = new File( slave.getStoreDir() ); ClusterManager.RepairKit slaveRepairKit = cluster.shutdown( slave ); clearLastTransactionCommitTimestampField( storeDir ); deleteLogs( storeDir ); HighlyAvailableGraphDatabase repairedSlave = slaveRepairKit.repair(); cluster.await( allSeesAllAsAvailable() ); assertEquals( TransactionIdStore.UNKNOWN_TX_COMMIT_TIMESTAMP, lastCommittedTxTimestamp( repairedSlave ) ); } finally { clusterManager.stop(); } } private static void deleteLogs( File storeDir ) { for ( File file : storeDir.listFiles( LogFiles.FILENAME_FILTER ) ) { FileUtils.deleteFile( file ); } } private static void runSomeTransactions( HighlyAvailableGraphDatabase db ) { for ( int i = 0; i < 10; i++ ) { try ( Transaction tx = db.beginTx() ) { for ( int j = 0; j < 10; j++ ) { db.createNode(); } tx.success(); } } } private static void clearLastTransactionCommitTimestampField( File storeDir ) throws IOException { try ( PageCache pageCache = createPageCache( new DefaultFileSystemAbstraction() ) ) { File neoStore = new File( storeDir, MetaDataStore.DEFAULT_NAME ); MetaDataStore.setRecord( pageCache, neoStore, LAST_TRANSACTION_COMMIT_TIMESTAMP, MetaDataStore.BASE_TX_COMMIT_TIMESTAMP ); } } private static long lastCommittedTxTimestamp( HighlyAvailableGraphDatabase db ) { DependencyResolver resolver = db.getDependencyResolver(); MetaDataStore metaDataStore = resolver.resolveDependency( MetaDataStore.class ); TransactionId txInfo = metaDataStore.getLastCommittedTransaction(); return txInfo.commitTimestamp(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gora.accumulo.store; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.net.InetAddress; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.apache.accumulo.core.Constants; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; import org.apache.accumulo.core.client.BatchWriter; import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.IsolatedScanner; import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.client.MutationsRejectedException; import org.apache.accumulo.core.client.RowIterator; import org.apache.accumulo.core.client.Scanner; import org.apache.accumulo.core.client.TableDeletedException; import org.apache.accumulo.core.client.TableExistsException; import org.apache.accumulo.core.client.TableNotFoundException; import org.apache.accumulo.core.client.TableOfflineException; import org.apache.accumulo.core.client.ZooKeeperInstance; import org.apache.accumulo.core.client.impl.Tables; import org.apache.accumulo.core.client.impl.TabletLocator; import org.apache.accumulo.core.client.mock.MockConnector; import org.apache.accumulo.core.client.mock.MockInstance; import org.apache.accumulo.core.client.mock.MockTabletLocator; import org.apache.accumulo.core.data.ByteSequence; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.KeyExtent; import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.core.data.Range; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.iterators.SortedKeyIterator; import org.apache.accumulo.core.iterators.user.TimestampFilter; import org.apache.accumulo.core.master.state.tables.TableState; import org.apache.accumulo.core.security.ColumnVisibility; import org.apache.accumulo.core.security.thrift.AuthInfo; import org.apache.accumulo.core.util.Pair; import org.apache.accumulo.core.util.TextUtil; import org.apache.accumulo.core.util.UtilWaitThread; import org.apache.avro.Schema; import org.apache.avro.Schema.Field; import org.apache.avro.generic.GenericArray; import org.apache.avro.io.BinaryDecoder; import org.apache.avro.io.BinaryEncoder; import org.apache.avro.io.DecoderFactory; import org.apache.avro.specific.SpecificDatumReader; import org.apache.avro.specific.SpecificDatumWriter; import org.apache.avro.util.Utf8; import org.apache.gora.accumulo.encoders.Encoder; import org.apache.gora.accumulo.query.AccumuloQuery; import org.apache.gora.accumulo.query.AccumuloResult; import org.apache.gora.persistency.ListGenericArray; import org.apache.gora.persistency.Persistent; import org.apache.gora.persistency.State; import org.apache.gora.persistency.StateManager; import org.apache.gora.persistency.StatefulHashMap; import org.apache.gora.persistency.StatefulMap; import org.apache.gora.persistency.impl.PersistentBase; import org.apache.gora.query.PartitionQuery; import org.apache.gora.query.Query; import org.apache.gora.query.Result; import org.apache.gora.query.impl.PartitionQueryImpl; import org.apache.gora.store.DataStoreFactory; import org.apache.gora.store.impl.DataStoreBase; import org.apache.gora.util.AvroUtils; import org.apache.hadoop.io.Text; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; /** * */ public class AccumuloStore<K,T extends PersistentBase> extends DataStoreBase<K,T> { protected static final String MOCK_PROPERTY = "accumulo.mock"; protected static final String INSTANCE_NAME_PROPERTY = "accumulo.instance"; protected static final String ZOOKEEPERS_NAME_PROPERTY = "accumulo.zookeepers"; protected static final String USERNAME_PROPERTY = "accumulo.user"; protected static final String PASSWORD_PROPERTY = "accumulo.password"; protected static final String DEFAULT_MAPPING_FILE = "gora-accumulo-mapping.xml"; private Connector conn; private BatchWriter batchWriter; private AccumuloMapping mapping; private AuthInfo authInfo; private Encoder encoder; public Object fromBytes(Schema schema, byte data[]) { return fromBytes(encoder, schema, data); } public static Object fromBytes(Encoder encoder, Schema schema, byte data[]) { switch (schema.getType()) { case BOOLEAN: return encoder.decodeBoolean(data); case DOUBLE: return encoder.decodeDouble(data); case FLOAT: return encoder.decodeFloat(data); case INT: return encoder.decodeInt(data); case LONG: return encoder.decodeLong(data); case STRING: return new Utf8(data); case BYTES: return ByteBuffer.wrap(data); case ENUM: return AvroUtils.getEnumValue(schema, encoder.decodeInt(data)); } throw new IllegalArgumentException("Unknown type " + schema.getType()); } public K fromBytes(Class<K> clazz, byte[] val) { return fromBytes(encoder, clazz, val); } @SuppressWarnings("unchecked") public static <K> K fromBytes(Encoder encoder, Class<K> clazz, byte[] val) { try { if (clazz.equals(Byte.TYPE) || clazz.equals(Byte.class)) { return (K) Byte.valueOf(encoder.decodeByte(val)); } else if (clazz.equals(Boolean.TYPE) || clazz.equals(Boolean.class)) { return (K) Boolean.valueOf(encoder.decodeBoolean(val)); } else if (clazz.equals(Short.TYPE) || clazz.equals(Short.class)) { return (K) Short.valueOf(encoder.decodeShort(val)); } else if (clazz.equals(Integer.TYPE) || clazz.equals(Integer.class)) { return (K) Integer.valueOf(encoder.decodeInt(val)); } else if (clazz.equals(Long.TYPE) || clazz.equals(Long.class)) { return (K) Long.valueOf(encoder.decodeLong(val)); } else if (clazz.equals(Float.TYPE) || clazz.equals(Float.class)) { return (K) Float.valueOf(encoder.decodeFloat(val)); } else if (clazz.equals(Double.TYPE) || clazz.equals(Double.class)) { return (K) Double.valueOf(encoder.decodeDouble(val)); } else if (clazz.equals(String.class)) { return (K) new String(val, "UTF-8"); } else if (clazz.equals(Utf8.class)) { return (K) new Utf8(val); } throw new IllegalArgumentException("Unknown type " + clazz.getName()); } catch (IOException ioe) { throw new RuntimeException(ioe); } } private static byte[] copyIfNeeded(byte b[], int offset, int len) { if (len != b.length || offset != 0) { byte copy[] = new byte[len]; System.arraycopy(b, offset, copy, 0, copy.length); b = copy; } return b; } public byte[] toBytes(Object o) { return toBytes(encoder, o); } public static byte[] toBytes(Encoder encoder, Object o) { try { if (o instanceof String) { return ((String) o).getBytes("UTF-8"); } else if (o instanceof Utf8) { return copyIfNeeded(((Utf8) o).getBytes(), 0, ((Utf8) o).getLength()); } else if (o instanceof ByteBuffer) { return copyIfNeeded(((ByteBuffer) o).array(), ((ByteBuffer) o).arrayOffset() + ((ByteBuffer) o).position(), ((ByteBuffer) o).remaining()); } else if (o instanceof Long) { return encoder.encodeLong((Long) o); } else if (o instanceof Integer) { return encoder.encodeInt((Integer) o); } else if (o instanceof Short) { return encoder.encodeShort((Short) o); } else if (o instanceof Byte) { return encoder.encodeByte((Byte) o); } else if (o instanceof Boolean) { return encoder.encodeBoolean((Boolean) o); } else if (o instanceof Float) { return encoder.encodeFloat((Float) o); } else if (o instanceof Double) { return encoder.encodeDouble((Double) o); } else if (o instanceof Enum) { return encoder.encodeInt(((Enum) o).ordinal()); } } catch (IOException ioe) { throw new RuntimeException(ioe); } throw new IllegalArgumentException("Uknown type " + o.getClass().getName()); } private BatchWriter getBatchWriter() throws IOException { if (batchWriter == null) try { batchWriter = conn.createBatchWriter(mapping.tableName, 10000000, 60000l, 4); } catch (TableNotFoundException e) { throw new IOException(e); } return batchWriter; } @Override public void initialize(Class<K> keyClass, Class<T> persistentClass, Properties properties) throws IOException { super.initialize(keyClass, persistentClass, properties); String mock = DataStoreFactory.findProperty(properties, this, MOCK_PROPERTY, null); String mappingFile = DataStoreFactory.getMappingFile(properties, this, DEFAULT_MAPPING_FILE); String user = DataStoreFactory.findProperty(properties, this, USERNAME_PROPERTY, null); String password = DataStoreFactory.findProperty(properties, this, PASSWORD_PROPERTY, null); mapping = readMapping(mappingFile); if (mapping.encoder == null || mapping.encoder.equals("")) { encoder = new org.apache.gora.accumulo.encoders.BinaryEncoder(); } else { try { encoder = (Encoder) getClass().getClassLoader().loadClass(mapping.encoder).newInstance(); } catch (InstantiationException e) { throw new IOException(e); } catch (IllegalAccessException e) { throw new IOException(e); } catch (ClassNotFoundException e) { throw new IOException(e); } } try { if (mock == null || !mock.equals("true")) { String instance = DataStoreFactory.findProperty(properties, this, INSTANCE_NAME_PROPERTY, null); String zookeepers = DataStoreFactory.findProperty(properties, this, ZOOKEEPERS_NAME_PROPERTY, null); conn = new ZooKeeperInstance(instance, zookeepers).getConnector(user, password); authInfo = new AuthInfo(user, ByteBuffer.wrap(password.getBytes()), conn.getInstance().getInstanceID()); } else { conn = new MockInstance().getConnector(user, password); } if (autoCreateSchema) createSchema(); } catch (AccumuloException e) { throw new IOException(e); } catch (AccumuloSecurityException e) { throw new IOException(e); } } protected AccumuloMapping readMapping(String filename) throws IOException { try { AccumuloMapping mapping = new AccumuloMapping(); DocumentBuilder db = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Document dom = db.parse(getClass().getClassLoader().getResourceAsStream(filename)); Element root = dom.getDocumentElement(); NodeList nl = root.getElementsByTagName("class"); for (int i = 0; i < nl.getLength(); i++) { Element classElement = (Element) nl.item(i); if (classElement.getAttribute("keyClass").equals(keyClass.getCanonicalName()) && classElement.getAttribute("name").equals(persistentClass.getCanonicalName())) { mapping.tableName = getSchemaName(classElement.getAttribute("table"), persistentClass); mapping.encoder = classElement.getAttribute("encoder"); NodeList fields = classElement.getElementsByTagName("field"); for (int j = 0; j < fields.getLength(); j++) { Element fieldElement = (Element) fields.item(j); String name = fieldElement.getAttribute("name"); String family = fieldElement.getAttribute("family"); String qualifier = fieldElement.getAttribute("qualifier"); if (qualifier.equals("")) qualifier = null; Pair<Text,Text> col = new Pair<Text,Text>(new Text(family), qualifier == null ? null : new Text(qualifier)); mapping.fieldMap.put(name, col); mapping.columnMap.put(col, name); } } } nl = root.getElementsByTagName("table"); for (int i = 0; i < nl.getLength(); i++) { Element tableElement = (Element) nl.item(i); if (tableElement.getAttribute("name").equals(mapping.tableName)) { NodeList configs = tableElement.getElementsByTagName("config"); for (int j = 0; j < configs.getLength(); j++) { Element configElement = (Element) configs.item(j); String key = configElement.getAttribute("key"); String val = configElement.getAttribute("value"); mapping.tableConfig.put(key, val); } } } return mapping; } catch (Exception ex) { throw new IOException(ex); } } @Override public String getSchemaName() { return mapping.tableName; } @Override public void createSchema() throws IOException { try { conn.tableOperations().create(mapping.tableName); Set<Entry<String,String>> es = mapping.tableConfig.entrySet(); for (Entry<String,String> entry : es) { conn.tableOperations().setProperty(mapping.tableName, entry.getKey(), entry.getValue()); } } catch (AccumuloException e) { throw new IOException(e); } catch (AccumuloSecurityException e) { throw new IOException(e); } catch (TableExistsException e) { return; } } @Override public void deleteSchema() throws IOException { try { if (batchWriter != null) batchWriter.close(); batchWriter = null; conn.tableOperations().delete(mapping.tableName); } catch (AccumuloException e) { throw new IOException(e); } catch (AccumuloSecurityException e) { throw new IOException(e); } catch (TableNotFoundException e) { return; } } @Override public boolean schemaExists() throws IOException { return conn.tableOperations().exists(mapping.tableName); } public ByteSequence populate(Iterator<Entry<Key,Value>> iter, T persistent) throws IOException { ByteSequence row = null; Map currentMap = null; ArrayList currentArray = null; Text currentFam = null; int currentPos = 0; Schema currentSchema = null; Field currentField = null; while (iter.hasNext()) { Entry<Key,Value> entry = iter.next(); if (currentMap != null) { if (currentFam.equals(entry.getKey().getColumnFamily())) { currentMap.put(new Utf8(entry.getKey().getColumnQualifierData().toArray()), fromBytes(currentSchema, entry.getValue().get())); continue; } else { persistent.put(currentPos, currentMap); currentMap = null; } } else if (currentArray != null) { if (currentFam.equals(entry.getKey().getColumnFamily())) { currentArray.add(fromBytes(currentSchema, entry.getValue().get())); continue; } else { persistent.put(currentPos, new ListGenericArray<T>(currentField.schema(), currentArray)); currentArray = null; } } if (row == null) row = entry.getKey().getRowData(); String fieldName = mapping.columnMap.get(new Pair<Text,Text>(entry.getKey().getColumnFamily(), entry.getKey().getColumnQualifier())); if (fieldName == null) fieldName = mapping.columnMap.get(new Pair<Text,Text>(entry.getKey().getColumnFamily(), null)); Field field = fieldMap.get(fieldName); switch (field.schema().getType()) { case MAP: currentMap = new StatefulHashMap(); currentPos = field.pos(); currentFam = entry.getKey().getColumnFamily(); currentSchema = field.schema().getValueType(); currentMap.put(new Utf8(entry.getKey().getColumnQualifierData().toArray()), fromBytes(currentSchema, entry.getValue().get())); break; case ARRAY: currentArray = new ArrayList(); currentPos = field.pos(); currentFam = entry.getKey().getColumnFamily(); currentSchema = field.schema().getElementType(); currentField = field; currentArray.add(fromBytes(currentSchema, entry.getValue().get())); break; case RECORD: SpecificDatumReader reader = new SpecificDatumReader(field.schema()); byte[] val = entry.getValue().get(); // TODO reuse decoder BinaryDecoder decoder = DecoderFactory.defaultFactory().createBinaryDecoder(val, null); persistent.put(field.pos(), reader.read(null, decoder)); break; default: persistent.put(field.pos(), fromBytes(field.schema(), entry.getValue().get())); } } if (currentMap != null) { persistent.put(currentPos, currentMap); } else if (currentArray != null) { persistent.put(currentPos, new ListGenericArray<T>(currentField.schema(), currentArray)); } persistent.clearDirty(); return row; } private void setFetchColumns(Scanner scanner, String fields[]) { fields = getFieldsToQuery(fields); for (String field : fields) { Pair<Text,Text> col = mapping.fieldMap.get(field); if (col.getSecond() == null) { scanner.fetchColumnFamily(col.getFirst()); } else { scanner.fetchColumn(col.getFirst(), col.getSecond()); } } } @Override public T get(K key, String[] fields) throws IOException { try { // TODO make isolated scanner optional? Scanner scanner = new IsolatedScanner(conn.createScanner(mapping.tableName, Constants.NO_AUTHS)); Range rowRange = new Range(new Text(toBytes(key))); scanner.setRange(rowRange); setFetchColumns(scanner, fields); T persistent = newPersistent(); ByteSequence row = populate(scanner.iterator(), persistent); if (row == null) return null; return persistent; } catch (TableNotFoundException e) { return null; } } @Override public void put(K key, T val) throws IOException { Mutation m = new Mutation(new Text(toBytes(key))); Schema schema = val.getSchema(); StateManager stateManager = val.getStateManager(); Iterator<Field> iter = schema.getFields().iterator(); int count = 0; for (int i = 0; iter.hasNext(); i++) { Field field = iter.next(); if (!stateManager.isDirty(val, i)) { continue; } Object o = val.get(i); Pair<Text,Text> col = mapping.fieldMap.get(field.name()); switch (field.schema().getType()) { case MAP: if (o instanceof StatefulMap) { StatefulMap map = (StatefulMap) o; Set<?> es = map.states().entrySet(); for (Object entry : es) { Object mapKey = ((Entry) entry).getKey(); State state = (State) ((Entry) entry).getValue(); switch (state) { case NEW: case DIRTY: m.put(col.getFirst(), new Text(toBytes(mapKey)), new Value(toBytes(map.get(mapKey)))); count++; break; case DELETED: m.putDelete(col.getFirst(), new Text(toBytes(mapKey))); count++; break; } } } else { Map map = (Map) o; Set<?> es = map.entrySet(); for (Object entry : es) { Object mapKey = ((Entry) entry).getKey(); Object mapVal = ((Entry) entry).getValue(); m.put(col.getFirst(), new Text(toBytes(mapKey)), new Value(toBytes(mapVal))); count++; } } break; case ARRAY: GenericArray array = (GenericArray) o; int j = 0; for (Object item : array) { m.put(col.getFirst(), new Text(toBytes(j++)), new Value(toBytes(item))); count++; } break; case RECORD: SpecificDatumWriter writer = new SpecificDatumWriter(field.schema()); ByteArrayOutputStream os = new ByteArrayOutputStream(); BinaryEncoder encoder = new BinaryEncoder(os); writer.write(o, encoder); encoder.flush(); m.put(col.getFirst(), col.getSecond(), new Value(os.toByteArray())); break; default: m.put(col.getFirst(), col.getSecond(), new Value(toBytes(o))); count++; } } if (count > 0) try { getBatchWriter().addMutation(m); } catch (MutationsRejectedException e) { throw new IOException(e); } } @Override public boolean delete(K key) throws IOException { Query<K,T> q = newQuery(); q.setKey(key); return deleteByQuery(q) > 0; } @Override public long deleteByQuery(Query<K,T> query) throws IOException { try { Scanner scanner = createScanner(query); // add iterator that drops values on the server side scanner.addScanIterator(new IteratorSetting(Integer.MAX_VALUE, SortedKeyIterator.class)); RowIterator iterator = new RowIterator(scanner.iterator()); long count = 0; while (iterator.hasNext()) { Iterator<Entry<Key,Value>> row = iterator.next(); Mutation m = null; while (row.hasNext()) { Entry<Key,Value> entry = row.next(); Key key = entry.getKey(); if (m == null) m = new Mutation(key.getRow()); // TODO optimize to avoid continually creating column vis? prob does not matter for empty m.putDelete(key.getColumnFamily(), key.getColumnQualifier(), new ColumnVisibility(key.getColumnVisibility()), key.getTimestamp()); } getBatchWriter().addMutation(m); count++; } return count; } catch (TableNotFoundException e) { // TODO return 0? throw new IOException(e); } catch (MutationsRejectedException e) { throw new IOException(e); } } private Range createRange(Query<K,T> query) { Text startRow = null; Text endRow = null; if (query.getStartKey() != null) startRow = new Text(toBytes(query.getStartKey())); if (query.getEndKey() != null) endRow = new Text(toBytes(query.getEndKey())); return new Range(startRow, true, endRow, true); } private Scanner createScanner(Query<K,T> query) throws TableNotFoundException { // TODO make isolated scanner optional? Scanner scanner = new IsolatedScanner(conn.createScanner(mapping.tableName, Constants.NO_AUTHS)); setFetchColumns(scanner, query.getFields()); scanner.setRange(createRange(query)); if (query.getStartTime() != -1 || query.getEndTime() != -1) { IteratorSetting is = new IteratorSetting(30, TimestampFilter.class); if (query.getStartTime() != -1) TimestampFilter.setStart(is, query.getStartTime(), true); if (query.getEndTime() != -1) TimestampFilter.setEnd(is, query.getEndTime(), true); scanner.addScanIterator(is); } return scanner; } @Override public Result<K,T> execute(Query<K,T> query) throws IOException { try { Scanner scanner = createScanner(query); return new AccumuloResult<K,T>(this, query, scanner); } catch (TableNotFoundException e) { // TODO return empty result? throw new IOException(e); } } @Override public Query<K,T> newQuery() { return new AccumuloQuery<K,T>(this); } Text pad(Text key, int bytes) { if (key.getLength() < bytes) key = new Text(key); while (key.getLength() < bytes) { key.append(new byte[] {0}, 0, 1); } return key; } @Override public List<PartitionQuery<K,T>> getPartitions(Query<K,T> query) throws IOException { try { TabletLocator tl; if (conn instanceof MockConnector) tl = new MockTabletLocator(); else tl = TabletLocator.getInstance(conn.getInstance(), authInfo, new Text(Tables.getTableId(conn.getInstance(), mapping.tableName))); Map<String,Map<KeyExtent,List<Range>>> binnedRanges = new HashMap<String,Map<KeyExtent,List<Range>>>(); tl.invalidateCache(); while (tl.binRanges(Collections.singletonList(createRange(query)), binnedRanges).size() > 0) { // TODO log? if (!Tables.exists(conn.getInstance(), Tables.getTableId(conn.getInstance(), mapping.tableName))) throw new TableDeletedException(Tables.getTableId(conn.getInstance(), mapping.tableName)); else if (Tables.getTableState(conn.getInstance(), Tables.getTableId(conn.getInstance(), mapping.tableName)) == TableState.OFFLINE) throw new TableOfflineException(conn.getInstance(), Tables.getTableId(conn.getInstance(), mapping.tableName)); UtilWaitThread.sleep(100); tl.invalidateCache(); } List<PartitionQuery<K,T>> ret = new ArrayList<PartitionQuery<K,T>>(); Text startRow = null; Text endRow = null; if (query.getStartKey() != null) startRow = new Text(toBytes(query.getStartKey())); if (query.getEndKey() != null) endRow = new Text(toBytes(query.getEndKey())); //hadoop expects hostnames, accumulo keeps track of IPs... so need to convert HashMap<String,String> hostNameCache = new HashMap<String,String>(); for (Entry<String,Map<KeyExtent,List<Range>>> entry : binnedRanges.entrySet()) { String ip = entry.getKey().split(":", 2)[0]; String location = hostNameCache.get(ip); if (location == null) { InetAddress inetAddress = InetAddress.getByName(ip); location = inetAddress.getHostName(); hostNameCache.put(ip, location); } Map<KeyExtent,List<Range>> tablets = entry.getValue(); for (KeyExtent ke : tablets.keySet()) { K startKey = null; if (startRow == null || !ke.contains(startRow)) { if (ke.getPrevEndRow() != null) { startKey = followingKey(encoder, getKeyClass(), TextUtil.getBytes(ke.getPrevEndRow())); } } else { startKey = fromBytes(getKeyClass(), TextUtil.getBytes(startRow)); } K endKey = null; if (endRow == null || !ke.contains(endRow)) { if (ke.getEndRow() != null) endKey = lastPossibleKey(encoder, getKeyClass(), TextUtil.getBytes(ke.getEndRow())); } else { endKey = fromBytes(getKeyClass(), TextUtil.getBytes(endRow)); } PartitionQueryImpl pqi = new PartitionQueryImpl<K,T>(query, startKey, endKey, new String[] {location}); ret.add(pqi); } } return ret; } catch (TableNotFoundException e) { throw new IOException(e); } catch (AccumuloException e) { throw new IOException(e); } catch (AccumuloSecurityException e) { throw new IOException(e); } } static <K> K lastPossibleKey(Encoder encoder, Class<K> clazz, byte[] er) { if (clazz.equals(Byte.TYPE) || clazz.equals(Byte.class)) { throw new UnsupportedOperationException(); } else if (clazz.equals(Boolean.TYPE) || clazz.equals(Boolean.class)) { throw new UnsupportedOperationException(); } else if (clazz.equals(Short.TYPE) || clazz.equals(Short.class)) { return fromBytes(encoder, clazz, encoder.lastPossibleKey(2, er)); } else if (clazz.equals(Integer.TYPE) || clazz.equals(Integer.class)) { return fromBytes(encoder, clazz, encoder.lastPossibleKey(4, er)); } else if (clazz.equals(Long.TYPE) || clazz.equals(Long.class)) { return fromBytes(encoder, clazz, encoder.lastPossibleKey(8, er)); } else if (clazz.equals(Float.TYPE) || clazz.equals(Float.class)) { return fromBytes(encoder, clazz, encoder.lastPossibleKey(4, er)); } else if (clazz.equals(Double.TYPE) || clazz.equals(Double.class)) { return fromBytes(encoder, clazz, encoder.lastPossibleKey(8, er)); } else if (clazz.equals(String.class)) { throw new UnsupportedOperationException(); } else if (clazz.equals(Utf8.class)) { return fromBytes(encoder, clazz, er); } throw new IllegalArgumentException("Unknown type " + clazz.getName()); } /** * @param keyClass * @param bytes * @return */ static <K> K followingKey(Encoder encoder, Class<K> clazz, byte[] per) { if (clazz.equals(Byte.TYPE) || clazz.equals(Byte.class)) { return (K) Byte.valueOf(encoder.followingKey(1, per)[0]); } else if (clazz.equals(Boolean.TYPE) || clazz.equals(Boolean.class)) { throw new UnsupportedOperationException(); } else if (clazz.equals(Short.TYPE) || clazz.equals(Short.class)) { return fromBytes(encoder, clazz, encoder.followingKey(2, per)); } else if (clazz.equals(Integer.TYPE) || clazz.equals(Integer.class)) { return fromBytes(encoder, clazz, encoder.followingKey(4, per)); } else if (clazz.equals(Long.TYPE) || clazz.equals(Long.class)) { return fromBytes(encoder, clazz, encoder.followingKey(8, per)); } else if (clazz.equals(Float.TYPE) || clazz.equals(Float.class)) { return fromBytes(encoder, clazz, encoder.followingKey(4, per)); } else if (clazz.equals(Double.TYPE) || clazz.equals(Double.class)) { return fromBytes(encoder, clazz, encoder.followingKey(8, per)); } else if (clazz.equals(String.class)) { throw new UnsupportedOperationException(); } else if (clazz.equals(Utf8.class)) { return fromBytes(encoder, clazz, Arrays.copyOf(per, per.length + 1)); } throw new IllegalArgumentException("Unknown type " + clazz.getName()); } @Override public void flush() throws IOException { try { if (batchWriter != null) { batchWriter.flush(); } } catch (MutationsRejectedException e) { throw new IOException(e); } } @Override public void close() throws IOException { try { if (batchWriter != null) { batchWriter.close(); batchWriter = null; } } catch (MutationsRejectedException e) { throw new IOException(e); } } }
package com.graphlib.graph.layout; import java.util.ArrayList; import java.util.List; import com.graphlib.graph.layout.Spline.BezierCurve; /** * A polygon represents a closed two-dimensional region bounded by a set of line * segments. The endpoints of the line segments form the vertices of the * polygon. * * @author Chitresh Kakwani * */ public final class Polygon { private List<Point> points = new ArrayList<>(); public Polygon() { } public Polygon(Polygon poly) { setPoints(poly.getPoints()); } public void addPoint(Point p) { points.add(p); } public Point removePoint(int index) { return points.remove(index); } public Point getPoint(int index) { return points.get(index); } public List<Point> getPoints() { List<Point> res = new ArrayList<>(); res.addAll(points); return res; } public void setPoints(List<Point> points) { this.points.clear(); this.points.addAll(points); } public int getNumPoints() { return points.size(); } /** * Checks if the line segment formed by joining the points at the given * indices lies entirely within the polygon or not. * * @param p1Index * @param p2Index * @return True if the line segment lies entirely within the polygon, false * otherwise */ public boolean isDiagonal(int p1Index, int p2Index) { Point p1 = points.get(p1Index); Point p2 = points.get(p2Index); boolean result; /* * Neighborhood test. */ Point ccwNeighbor = points.get((p1Index + 1) % points.size()); Point cwNeighbor = points.get((p1Index + points.size() - 1) % points.size()); if (Point.getOrientation(cwNeighbor, p1, ccwNeighbor) == Point.Orientation.COUNTER_CLOCKWISE) { result = (Point.getOrientation(p1, p2, cwNeighbor) == Point.Orientation.COUNTER_CLOCKWISE) && (Point.getOrientation(p2, p1, ccwNeighbor) == Point.Orientation.COUNTER_CLOCKWISE); } else { /* * (cwNeighbor, p1, ccwNeighbor) are assumed to be non colinear. */ result = Point.getOrientation(p1, p2, ccwNeighbor) == Point.Orientation.CLOCKWISE; } if (!result) { return false; } /* * Check if the line segment formed by the given points intersects any * edge of the polygon. If it does, then the line segment is not * entirely inside the polygon. */ for (int i = 0; i < points.size(); i++) { Point j1 = points.get(i); Point j2 = points.get((i + 1) % points.size()); if (!(j1.equals(p1) || j2.equals(p1) || j1.equals(p2) || j2.equals(p2))) { if (Point.intersects(p1, p2, j1, j2)) { return false; } } } return true; } /** * Decomposes the polygon into a set of triangles with pairwise * non-intersecting interiors whose union is the polygon. * * @return List of triangles whose union is the polygon */ public List<Triangle> triangulate() { return triangulateRec(this); } /** * * @param poly * @return */ private static List<Triangle> triangulateRec(Polygon poly) { List<Triangle> triangles = new ArrayList<>(); if (poly.getNumPoints() > 3) { for (int pnli = 0; pnli < poly.getNumPoints(); pnli++) { int pnlip1 = (pnli + 1) % poly.getNumPoints(); int pnlip2 = (pnli + 2) % poly.getNumPoints(); if (poly.isDiagonal(pnli, pnlip2)) { triangles.add(new Triangle(poly.getPoint(pnli), poly.getPoint(pnlip1), poly.getPoint(pnlip2))); Polygon poly2 = new Polygon(poly); poly2.removePoint(pnlip1); triangles.addAll(triangulateRec(poly2)); return triangles; } } throw new IllegalStateException("Polygon triangulation failure: " + poly.getPoints()); } else { triangles.add(new Triangle(poly.getPoint(0), poly.getPoint(1), poly.getPoint(2))); } return triangles; } public DualGraph getDualGraph() { DualGraph dg = new DualGraph(); List<Triangle> triangles = triangulate(); for (Triangle t : triangles) { dg.addVertex(t); for (Triangle t1 : triangles) { if (!t1.equals(t)) { dg.addVertex(t1); Point[] sharedEdge = t.getSharedEdge(t1); if (sharedEdge != null) { dg.addEdge(new Diagonal(t, t1, sharedEdge[0], sharedEdge[1])); dg.addEdge(new Diagonal(t1, t, sharedEdge[0], sharedEdge[1])); } } } } return dg; } /** * Checks if the given Bezier curve lies inside the polygon or not assuming the * first and last control points of the curve are inside the polygon. * * @param curve Bezier curve to test for containment * @return true if the curve is inside the polygon, false otherwise */ public boolean containsCurve(BezierCurve curve) { Point[] line = new Point[2]; for (int i = 0; i < points.size(); i++) { line[0] = points.get(i); line[1] = points.get((i + 1) % points.size()); List<Double> roots = curve.getLineIntersectionRoots(line[0], line[1]); if (roots == null) { continue; } for (int r = 0; r < roots.size(); r++) { if (roots.get(r) < GraphLayoutParameters.DOUBLE_PRECISION || roots.get(r) > 1 - GraphLayoutParameters.DOUBLE_PRECISION) { /* * Skip the roots that are approximately 0 or 1. They * correspond to (x,y) values on the curve near first and * last control points. Since the first and last control * points of the curve are known to be inside the polygon at * this point, the roots with values approximately 0 or 1 * are going to be inside the polygon as well. */ continue; } Point intersectionPoint = curve.getCurvePoint(roots.get(r)); if (intersectionPoint.distanceSquared(line[0]) < Point.PRECISION || intersectionPoint.distanceSquared(line[1]) < Point.PRECISION) { /* * If the intersection point lies approximately on the endpoints of the line * segment, i.e., corners of the polygon, it can be ignored. */ continue; } return false; } } return true; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.common.geo.builders; import org.elasticsearch.core.Tuple; import org.elasticsearch.common.geo.GeoShapeType; import org.elasticsearch.common.geo.Orientation; import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.LinearRing; import org.locationtech.jts.geom.MultiPolygon; import org.locationtech.jts.geom.Polygon; import org.locationtech.spatial4j.exception.InvalidShapeException; import org.locationtech.spatial4j.shape.jts.JtsGeometry; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Objects; import java.util.concurrent.atomic.AtomicBoolean; /** * The {@link PolygonBuilder} implements the groundwork to create polygons. This contains * Methods to wrap polygons at the dateline and building shapes from the data held by the * builder. */ public class PolygonBuilder extends ShapeBuilder<JtsGeometry, org.elasticsearch.geometry.Geometry, PolygonBuilder> { public static final GeoShapeType TYPE = GeoShapeType.POLYGON; private static final Coordinate[][] EMPTY = new Coordinate[0][]; private Orientation orientation = Orientation.RIGHT; // line string defining the shell of the polygon private LineStringBuilder shell; // List of line strings defining the holes of the polygon private final List<LineStringBuilder> holes = new ArrayList<>(); public PolygonBuilder(LineStringBuilder lineString, Orientation orientation, boolean coerce) { this.orientation = orientation; if (coerce) { lineString.close(); } validateLinearRing(lineString); this.shell = lineString; } public PolygonBuilder(LineStringBuilder lineString, Orientation orientation) { this(lineString, orientation, false); } public PolygonBuilder(CoordinatesBuilder coordinates, Orientation orientation) { this(new LineStringBuilder(coordinates), orientation, false); } public PolygonBuilder(CoordinatesBuilder coordinates) { this(coordinates, Orientation.RIGHT); } /** * Read from a stream. */ public PolygonBuilder(StreamInput in) throws IOException { shell = new LineStringBuilder(in); orientation = Orientation.readFrom(in); int holes = in.readVInt(); for (int i = 0; i < holes; i++) { hole(new LineStringBuilder(in)); } } @Override public void writeTo(StreamOutput out) throws IOException { shell.writeTo(out); orientation.writeTo(out); out.writeVInt(holes.size()); for (LineStringBuilder hole : holes) { hole.writeTo(out); } } public Orientation orientation() { return this.orientation; } /** * Add a new hole to the polygon * @param hole linear ring defining the hole * @return this */ public PolygonBuilder hole(LineStringBuilder hole) { return this.hole(hole, false); } /** * Add a new hole to the polygon * @param hole linear ring defining the hole * @param coerce if set to true, it will try to close the hole by adding starting point as end point * @return this */ public PolygonBuilder hole(LineStringBuilder hole, boolean coerce) { if (coerce) { hole.close(); } validateLinearRing(hole); holes.add(hole); return this; } /** * @return the list of holes defined for this polygon */ public List<LineStringBuilder> holes() { return this.holes; } /** * @return the list of points of the shell for this polygon */ public LineStringBuilder shell() { return this.shell; } /** * Close the shell of the polygon */ public PolygonBuilder close() { shell.close(); return this; } private static void validateLinearRing(LineStringBuilder lineString) { /** * Per GeoJSON spec (http://geojson.org/geojson-spec.html#linestring) * A LinearRing is closed LineString with 4 or more positions. The first and last positions * are equivalent (they represent equivalent points). Though a LinearRing is not explicitly * represented as a GeoJSON geometry type, it is referred to in the Polygon geometry type definition. */ List<Coordinate> points = lineString.coordinates; if (points.size() < 4) { throw new IllegalArgumentException( "invalid number of points in LinearRing (found [" + points.size() + "] - must be >= 4)"); } if (points.get(0).equals(points.get(points.size() - 1)) == false) { throw new IllegalArgumentException("invalid LinearRing found (coordinates are not closed)"); } } /** * Validates only 1 vertex is tangential (shared) between the interior and exterior of a polygon */ protected void validateHole(LineStringBuilder shell, LineStringBuilder hole) { HashSet<Coordinate> exterior = Sets.newHashSet(shell.coordinates); HashSet<Coordinate> interior = Sets.newHashSet(hole.coordinates); exterior.retainAll(interior); if (exterior.size() >= 2) { throw new InvalidShapeException("Invalid polygon, interior cannot share more than one point with the exterior"); } } /** * The coordinates setup by the builder will be assembled to a polygon. The result will consist of * a set of polygons. Each of these components holds a list of linestrings defining the polygon: the * first set of coordinates will be used as the shell of the polygon. The others are defined to holes * within the polygon. * This Method also wraps the polygons at the dateline. In order to this fact the result may * contains more polygons and less holes than defined in the builder it self. * * @return coordinates of the polygon */ public Coordinate[][][] coordinates() { LineStringBuilder shell = filterRing(this.shell); LineStringBuilder[] holes = new LineStringBuilder[this.holes.size()]; int numEdges = shell.coordinates.size() - 1; // Last point is repeated for (int i = 0; i < this.holes.size(); i++) { holes[i] = filterRing(this.holes.get(i)); numEdges += holes[i].coordinates.size() - 1; validateHole(shell, holes[i]); } Edge[] edges = new Edge[numEdges]; Edge[] holeComponents = new Edge[holes.length]; final AtomicBoolean translated = new AtomicBoolean(false); int offset = createEdges(0, orientation, shell, null, edges, 0, translated); for (int i = 0; i < holes.length; i++) { int length = createEdges(i+1, orientation, shell, holes[i], edges, offset, translated); holeComponents[i] = edges[offset]; offset += length; } int numHoles = holeComponents.length; numHoles = merge(edges, 0, intersections(+DATELINE, edges), holeComponents, numHoles); numHoles = merge(edges, 0, intersections(-DATELINE, edges), holeComponents, numHoles); return compose(edges, holeComponents, numHoles); } /** * This method removes duplicated points and coplanar points on vertical lines (vertical lines * do not cross the dateline). */ private static LineStringBuilder filterRing(LineStringBuilder linearRing) { int numPoints = linearRing.coordinates.size(); List<Coordinate> coordinates = new ArrayList<>(); coordinates.add(linearRing.coordinates.get(0)); for (int i = 1; i < numPoints - 1; i++) { if (linearRing.coordinates.get(i - 1).x == linearRing.coordinates.get(i).x) { if (linearRing.coordinates.get(i - 1).y == linearRing.coordinates.get(i).y) { // same point continue; } if (linearRing.coordinates.get(i - 1).x == linearRing.coordinates.get(i + 1).x && linearRing.coordinates.get(i - 1).y > linearRing.coordinates.get(i).y != linearRing.coordinates.get(i + 1).y > linearRing.coordinates.get(i).y) { // coplanar continue; } } coordinates.add(linearRing.coordinates.get(i)); } coordinates.add(linearRing.coordinates.get(numPoints - 1)); return new LineStringBuilder(coordinates); } @Override public JtsGeometry buildS4J() { return jtsGeometry(buildS4JGeometry(FACTORY, wrapdateline)); } @Override public org.elasticsearch.geometry.Geometry buildGeometry() { return toPolygonGeometry(); } protected XContentBuilder coordinatesArray(XContentBuilder builder, Params params) throws IOException { shell.coordinatesToXcontent(builder, true); for(LineStringBuilder hole : holes) { hole.coordinatesToXcontent(builder, true); } return builder; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName()); builder.field(ShapeParser.FIELD_ORIENTATION.getPreferredName(), orientation.name().toLowerCase(Locale.ROOT)); builder.startArray(ShapeParser.FIELD_COORDINATES.getPreferredName()); coordinatesArray(builder, params); builder.endArray(); builder.endObject(); return builder; } public Geometry buildS4JGeometry(GeometryFactory factory, boolean fixDateline) { if(fixDateline) { Coordinate[][][] polygons = coordinates(); return polygons.length == 1 ? polygonS4J(factory, polygons[0]) : multipolygonS4J(factory, polygons); } else { return toPolygonS4J(factory); } } public Polygon toPolygonS4J() { return toPolygonS4J(FACTORY); } protected Polygon toPolygonS4J(GeometryFactory factory) { final LinearRing shell = linearRingS4J(factory, this.shell.coordinates); final LinearRing[] holes = new LinearRing[this.holes.size()]; Iterator<LineStringBuilder> iterator = this.holes.iterator(); for (int i = 0; iterator.hasNext(); i++) { holes[i] = linearRingS4J(factory, iterator.next().coordinates); } return factory.createPolygon(shell, holes); } public org.elasticsearch.geometry.Polygon toPolygonGeometry() { final List<org.elasticsearch.geometry.LinearRing> holes = new ArrayList<>(this.holes.size()); for (int i = 0; i < this.holes.size(); ++i) { holes.add(linearRing(this.holes.get(i).coordinates)); } return new org.elasticsearch.geometry.Polygon(linearRing(this.shell.coordinates), holes); } protected static org.elasticsearch.geometry.LinearRing linearRing(List<Coordinate> coordinates) { return new org.elasticsearch.geometry.LinearRing(coordinates.stream().mapToDouble(i -> i.x).toArray(), coordinates.stream().mapToDouble(i -> i.y).toArray() ); } protected static LinearRing linearRingS4J(GeometryFactory factory, List<Coordinate> coordinates) { return factory.createLinearRing(coordinates.toArray(new Coordinate[coordinates.size()])); } @Override public GeoShapeType type() { return TYPE; } @Override public int numDimensions() { if (shell == null) { throw new IllegalStateException("unable to get number of dimensions, " + "Polygon has not yet been initialized"); } return shell.numDimensions(); } protected static Polygon polygonS4J(GeometryFactory factory, Coordinate[][] polygon) { LinearRing shell = factory.createLinearRing(polygon[0]); LinearRing[] holes; if(polygon.length > 1) { holes = new LinearRing[polygon.length-1]; for (int i = 0; i < holes.length; i++) { holes[i] = factory.createLinearRing(polygon[i+1]); } } else { holes = null; } return factory.createPolygon(shell, holes); } /** * Create a Multipolygon from a set of coordinates. Each primary array contains a polygon which * in turn contains an array of linestrings. These line Strings are represented as an array of * coordinates. The first linestring will be the shell of the polygon the others define holes * within the polygon. * * @param factory {@link GeometryFactory} to use * @param polygons definition of polygons * @return a new Multipolygon */ protected static MultiPolygon multipolygonS4J(GeometryFactory factory, Coordinate[][][] polygons) { Polygon[] polygonSet = new Polygon[polygons.length]; for (int i = 0; i < polygonSet.length; i++) { polygonSet[i] = polygonS4J(factory, polygons[i]); } return factory.createMultiPolygon(polygonSet); } /** * This method sets the component id of all edges in a ring to a given id and shifts the * coordinates of this component according to the dateline * * @param edge An arbitrary edge of the component * @param id id to apply to the component * @param edges a list of edges to which all edges of the component will be added (could be <code>null</code>) * @return number of edges that belong to this component */ private static int component(final Edge edge, final int id, final ArrayList<Edge> edges, double[] partitionPoint) { // find a coordinate that is not part of the dateline Edge any = edge; while(any.coordinate.x == +DATELINE || any.coordinate.x == -DATELINE) { if((any = any.next) == edge) { break; } } double shiftOffset = any.coordinate.x > DATELINE ? DATELINE : (any.coordinate.x < -DATELINE ? -DATELINE : 0); if (debugEnabled()) { LOGGER.debug("shift: [{}]", shiftOffset); } // run along the border of the component, collect the // edges, shift them according to the dateline and // update the component id int length = 0, connectedComponents = 0; // if there are two connected components, splitIndex keeps track of where to split the edge array // start at 1 since the source coordinate is shared int splitIndex = 1; Edge current = edge; Edge prev = edge; // bookkeep the source and sink of each visited coordinate HashMap<Coordinate, Tuple<Edge, Edge>> visitedEdge = new HashMap<>(); do { current.coordinate = shift(current.coordinate, shiftOffset); current.component = id; if (edges != null) { // found a closed loop - we have two connected components so we need to slice into two distinct components if (visitedEdge.containsKey(current.coordinate)) { partitionPoint[0] = current.coordinate.x; partitionPoint[1] = current.coordinate.y; partitionPoint[2] = current.coordinate.z; if (connectedComponents > 0 && current.next != edge) { throw new InvalidShapeException("Shape contains more than one shared point"); } // a negative id flags the edge as visited for the edges(...) method. // since we're splitting connected components, we want the edges method to visit // the newly separated component final int visitID = -id; Edge firstAppearance = visitedEdge.get(current.coordinate).v2(); // correct the graph pointers by correcting the 'next' pointer for both the // first appearance and this appearance of the edge Edge temp = firstAppearance.next; firstAppearance.next = current.next; current.next = temp; current.component = visitID; // backtrack until we get back to this coordinate, setting the visit id to // a non-visited value (anything positive) do { prev.component = visitID; prev = visitedEdge.get(prev.coordinate).v1(); ++splitIndex; } while (current.coordinate.equals(prev.coordinate) == false); ++connectedComponents; } else { visitedEdge.put(current.coordinate, new Tuple<Edge, Edge>(prev, current)); } edges.add(current); prev = current; } length++; } while(connectedComponents == 0 && (current = current.next) != edge); return (splitIndex != 1) ? length-splitIndex: length; } /** * Compute all coordinates of a component * @param component an arbitrary edge of the component * @param coordinates Array of coordinates to write the result to * @return the coordinates parameter */ private static Coordinate[] coordinates(Edge component, Coordinate[] coordinates, double[] partitionPoint) { for (int i = 0; i < coordinates.length; i++) { coordinates[i] = (component = component.next).coordinate; } // First and last coordinates must be equal if (coordinates[0].equals(coordinates[coordinates.length - 1]) == false) { if (Double.isNaN(partitionPoint[2])) { throw new InvalidShapeException("Self-intersection at or near point [" + partitionPoint[0] + "," + partitionPoint[1] + "]"); } else { throw new InvalidShapeException("Self-intersection at or near point [" + partitionPoint[0] + "," + partitionPoint[1] + "," + partitionPoint[2] + "]"); } } return coordinates; } private static Coordinate[][][] buildCoordinates(List<List<Coordinate[]>> components) { Coordinate[][][] result = new Coordinate[components.size()][][]; for (int i = 0; i < result.length; i++) { List<Coordinate[]> component = components.get(i); result[i] = component.toArray(new Coordinate[component.size()][]); } if(debugEnabled()) { for (int i = 0; i < result.length; i++) { LOGGER.debug("Component [{}]:", i); for (int j = 0; j < result[i].length; j++) { LOGGER.debug("\t{}", Arrays.toString(result[i][j])); } } } return result; } private static Coordinate[][] holes(Edge[] holes, int numHoles) { if (numHoles == 0) { return EMPTY; } final Coordinate[][] points = new Coordinate[numHoles][]; for (int i = 0; i < numHoles; i++) { double[] partitionPoint = new double[3]; int length = component(holes[i], -(i+1), null, partitionPoint); // mark as visited by inverting the sign points[i] = coordinates(holes[i], new Coordinate[length+1], partitionPoint); } return points; } private static Edge[] edges(Edge[] edges, int numHoles, List<List<Coordinate[]>> components) { ArrayList<Edge> mainEdges = new ArrayList<>(edges.length); for (int i = 0; i < edges.length; i++) { if (edges[i].component >= 0) { double[] partitionPoint = new double[3]; int length = component(edges[i], -(components.size()+numHoles+1), mainEdges, partitionPoint); List<Coordinate[]> component = new ArrayList<>(); component.add(coordinates(edges[i], new Coordinate[length+1], partitionPoint)); components.add(component); } } return mainEdges.toArray(new Edge[mainEdges.size()]); } private static Coordinate[][][] compose(Edge[] edges, Edge[] holes, int numHoles) { final List<List<Coordinate[]>> components = new ArrayList<>(); assign(holes, holes(holes, numHoles), numHoles, edges(edges, numHoles, components), components); return buildCoordinates(components); } private static void assign(Edge[] holes, Coordinate[][] points, int numHoles, Edge[] edges, List<List<Coordinate[]>> components) { // Assign Hole to related components // To find the new component the hole belongs to all intersections of the // polygon edges with a vertical line are calculated. This vertical line // is an arbitrary point of the hole. The polygon edge next to this point // is part of the polygon the hole belongs to. if (debugEnabled()) { LOGGER.debug("Holes: {}", Arrays.toString(holes)); } for (int i = 0; i < numHoles; i++) { // To do the assignment we assume (and later, elsewhere, check) that each hole is within // a single component, and the components do not overlap. Based on this assumption, it's // enough to find a component that contains some vertex of the hole, and // holes[i].coordinate is such a vertex, so we use that one. // First, we sort all the edges according to their order of intersection with the line // of longitude through holes[i].coordinate, in order from south to north. Edges that do // not intersect this line are sorted to the end of the array and of no further interest // here. final Edge current = new Edge(holes[i].coordinate, holes[i].next); current.intersect = current.coordinate; final int intersections = intersections(current.coordinate.x, edges); if (intersections == 0) { // There were no edges that intersect the line of longitude through // holes[i].coordinate, so there's no way this hole is within the polygon. throw new InvalidShapeException("Invalid shape: Hole is not within polygon"); } // Next we do a binary search to find the position of holes[i].coordinate in the array. // The binary search returns the index of an exact match, or (-insertionPoint - 1) if // the vertex lies between the intersections of edges[insertionPoint] and // edges[insertionPoint+1]. The latter case is vastly more common. final int pos; boolean sharedVertex = false; if (((pos = Arrays.binarySearch(edges, 0, intersections, current, INTERSECTION_ORDER)) >= 0) && (sharedVertex = (edges[pos].intersect.compareTo(current.coordinate) == 0)) == false) { // The binary search returned an exact match, but we checked again using compareTo() // and it didn't match after all. // TODO Can this actually happen? Needs a test to exercise it, or else needs to be removed. throw new InvalidShapeException("Invalid shape: Hole is not within polygon"); } final int index; if (sharedVertex) { // holes[i].coordinate lies exactly on an edge. index = 0; // TODO Should this be pos instead of 0? This assigns exact matches to the southernmost component. } else if (pos == -1) { // holes[i].coordinate is strictly south of all intersections. Assign it to the // southernmost component, and allow later validation to spot that it is not // entirely within the chosen component. index = 0; } else { // holes[i].coordinate is strictly north of at least one intersection. Assign it to // the component immediately to its south. index = -(pos + 2); } final int component = -edges[index].component - numHoles - 1; if(debugEnabled()) { LOGGER.debug("\tposition ({}) of edge {}: {}", index, current, edges[index]); LOGGER.debug("\tComponent: {}", component); LOGGER.debug("\tHole intersections ({}): {}", current.coordinate.x, Arrays.toString(edges)); } components.get(component).add(points[i]); } } private static int merge(Edge[] intersections, int offset, int length, Edge[] holes, int numHoles) { // Intersections appear pairwise. On the first edge the inner of // of the polygon is entered. On the second edge the outer face // is entered. Other kinds of intersections are discard by the // intersection function for (int i = 0; i < length; i += 2) { Edge e1 = intersections[offset + i + 0]; Edge e2 = intersections[offset + i + 1]; // If two segments are connected maybe a hole must be deleted // Since Edges of components appear pairwise we need to check // the second edge only (the first edge is either polygon or // already handled) if (e2.component > 0) { //TODO: Check if we could save the set null step numHoles--; holes[e2.component-1] = holes[numHoles]; holes[numHoles] = null; } // only connect edges if intersections are pairwise // 1. per the comment above, the edge array is sorted by y-value of the intersection // with the dateline. Two edges have the same y intercept when they cross the // dateline thus they appear sequentially (pairwise) in the edge array. Two edges // do not have the same y intercept when we're forming a multi-poly from a poly // that wraps the dateline (but there are 2 ordered intercepts). // The connect method creates a new edge for these paired edges in the linked list. // For boundary conditions (e.g., intersect but not crossing) there is no sibling edge // to connect. Thus the first logic check enforces the pairwise rule // 2. the second logic check ensures the two candidate edges aren't already connected by an // existing edge along the dateline - this is necessary due to a logic change in // ShapeBuilder.intersection that computes dateline edges as valid intersect points // in support of OGC standards if (e1.intersect != Edge.MAX_COORDINATE && e2.intersect != Edge.MAX_COORDINATE && (e1.next.next.coordinate.equals3D(e2.coordinate) && Math.abs(e1.next.coordinate.x) == DATELINE && Math.abs(e2.coordinate.x) == DATELINE) == false ) { connect(e1, e2); } } return numHoles; } private static void connect(Edge in, Edge out) { assert in != null && out != null; assert in != out; // Connecting two Edges by inserting the point at // dateline intersection and connect these by adding // two edges between this points. One per direction if(in.intersect != in.next.coordinate) { // NOTE: the order of the object creation is crucial here! Don't change it! // first edge has no point on dateline Edge e1 = new Edge(in.intersect, in.next); if(out.intersect != out.next.coordinate) { // second edge has no point on dateline Edge e2 = new Edge(out.intersect, out.next); in.next = new Edge(in.intersect, e2, in.intersect); } else { // second edge intersects with dateline in.next = new Edge(in.intersect, out.next, in.intersect); } out.next = new Edge(out.intersect, e1, out.intersect); } else if (in.next != out && in.coordinate != out.intersect) { // first edge intersects with dateline Edge e2 = new Edge(out.intersect, in.next, out.intersect); if(out.intersect != out.next.coordinate) { // second edge has no point on dateline Edge e1 = new Edge(out.intersect, out.next); in.next = new Edge(in.intersect, e1, in.intersect); } else { // second edge intersects with dateline in.next = new Edge(in.intersect, out.next, in.intersect); } out.next = e2; } } private static int createEdges(int component, Orientation orientation, LineStringBuilder shell, LineStringBuilder hole, Edge[] edges, int offset, final AtomicBoolean translated) { // inner rings (holes) have an opposite direction than the outer rings // XOR will invert the orientation for outer ring cases (Truth Table:, T/T = F, T/F = T, F/T = T, F/F = F) boolean direction = (component == 0 ^ orientation == Orientation.RIGHT); // set the points array accordingly (shell or hole) Coordinate[] points = (hole != null) ? hole.coordinates(false) : shell.coordinates(false); ring(component, direction, orientation == Orientation.LEFT, points, 0, edges, offset, points.length-1, translated); return points.length-1; } /** * Create a connected list of a list of coordinates * * @param points * array of point * @param offset * index of the first point * @param length * number of points * @return Array of edges */ private static Edge[] ring(int component, boolean direction, boolean handedness, Coordinate[] points, int offset, Edge[] edges, int toffset, int length, final AtomicBoolean translated) { double signedArea = 0; double minX = Double.POSITIVE_INFINITY; double maxX = Double.NEGATIVE_INFINITY; for (int i = offset; i < offset + length; i++) { signedArea += points[i].x * points[i + 1].y - points[i].y * points[i + 1].x; minX = Math.min(minX, points[i].x); maxX = Math.max(maxX, points[i].x); } if (signedArea == 0) { // Points are collinear or self-intersection throw new InvalidShapeException("Cannot determine orientation: signed area equal to 0"); } boolean orientation = signedArea < 0; // OGC requires shell as ccw (Right-Handedness) and holes as cw (Left-Handedness) // since GeoJSON doesn't specify (and doesn't need to) GEO core will assume OGC standards // thus if orientation is computed as cw, the logic will translate points across dateline // and convert to a right handed system // calculate range final double rng = maxX - minX; // translate the points if the following is true // 1. shell orientation is cw and range is greater than a hemisphere (180 degrees) but not spanning 2 hemispheres // (translation would result in a collapsed poly) // 2. the shell of the candidate hole has been translated (to preserve the coordinate system) boolean incorrectOrientation = component == 0 && handedness != orientation; if ( (incorrectOrientation && (rng > DATELINE && rng != 2*DATELINE)) || (translated.get() && component != 0)) { translate(points); // flip the translation bit if the shell is being translated if (component == 0) { translated.set(true); } // correct the orientation post translation (ccw for shell, cw for holes) if (component == 0 || (component != 0 && handedness == orientation)) { orientation = orientation == false; } } return concat(component, direction ^ orientation, points, offset, edges, toffset, length); } /** * Concatenate a set of points to a polygon * * @param component * component id of the polygon * @param direction * direction of the ring * @param points * list of points to concatenate * @param pointOffset * index of the first point * @param edges * Array of edges to write the result to * @param edgeOffset * index of the first edge in the result * @param length * number of points to use * @return the edges creates */ private static Edge[] concat(int component, boolean direction, Coordinate[] points, final int pointOffset, Edge[] edges, final int edgeOffset, int length) { assert edges.length >= length+edgeOffset; assert points.length >= length+pointOffset; edges[edgeOffset] = new Edge(points[pointOffset], null); for (int i = 1; i < length; i++) { if (direction) { edges[edgeOffset + i] = new Edge(points[pointOffset + i], edges[edgeOffset + i - 1]); edges[edgeOffset + i].component = component; } else if (edges[edgeOffset + i - 1].coordinate.equals(points[pointOffset + i]) == false) { edges[edgeOffset + i - 1].next = edges[edgeOffset + i] = new Edge(points[pointOffset + i], null); edges[edgeOffset + i - 1].component = component; } else { throw new InvalidShapeException("Provided shape has duplicate consecutive coordinates at: " + points[pointOffset + i]); } } if (direction) { edges[edgeOffset].setNext(edges[edgeOffset + length - 1]); edges[edgeOffset].component = component; } else { edges[edgeOffset + length - 1].setNext(edges[edgeOffset]); edges[edgeOffset + length - 1].component = component; } return edges; } /** * Transforms coordinates in the eastern hemisphere (-180:0) to a (180:360) range */ private static void translate(Coordinate[] points) { for (Coordinate c : points) { if (c.x < 0) { c.x += 2*DATELINE; } } } @Override protected StringBuilder contentToWKT() { StringBuilder sb = new StringBuilder(); sb.append('('); sb.append(ShapeBuilder.coordinateListToWKT(shell.coordinates)); for (LineStringBuilder hole : holes) { sb.append(", "); sb.append(ShapeBuilder.coordinateListToWKT(hole.coordinates)); } sb.append(')'); return sb; } @Override public int hashCode() { return Objects.hash(shell, holes, orientation); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } PolygonBuilder other = (PolygonBuilder) obj; return Objects.equals(shell, other.shell) && Objects.equals(holes, other.holes) && Objects.equals(orientation, other.orientation); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.markup.parser.filter; import java.text.ParseException; import org.apache.wicket.Application; import org.apache.wicket.WicketRuntimeException; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.markup.MarkupElement; import org.apache.wicket.markup.WicketTag; import org.apache.wicket.markup.parser.AbstractMarkupFilter; import org.apache.wicket.util.collections.ArrayListStack; import org.apache.wicket.util.string.StringValueConversionException; import org.apache.wicket.util.string.Strings; import org.apache.wicket.util.value.IValueMap; /** * This is a markup inline filter. It identifies xml tags which include a href attribute and which * are not Wicket specific components and flags these tags (ComponentTag) as autolink enabled. A * component resolver will later resolve the href and assign a BookmarkablePageLink to it * (automatically). * <p> * An application setting is used as default value, which might be modified for specific regions. * These regions are identified by &lt;wicket:link&gt; tags with an optional 'autolink' attribute. * The default value for the attribute is true, thus enabling autolinking. An open-close * &lt;wicket:link/&gt tag will change the autolink status until the end of the markup document or * the next &lt;wicket:link&gt; tag respectively. &lt;wicket:link&gt; regions may be nested. * * @author Juergen Donnerstag */ public class WicketLinkTagHandler extends AbstractMarkupFilter { /** The id of autolink components */ public static final String AUTOLINK_ID = "_autolink_"; static { // register "wicket:link" WicketTagIdentifier.registerWellKnownTagName("link"); } /** Allow to have link regions within link regions */ private ArrayListStack<Boolean> autolinkStatus; /** Current status */ private boolean autolinking = true; /** * Construct. */ public WicketLinkTagHandler() { setAutomaticLinking(Application.get().getMarkupSettings().getAutomaticLinking()); } /** * Set the default value for autolinking * * @param enable * if true, autolinks are enabled */ public void setAutomaticLinking(final boolean enable) { autolinking = enable; } /** * Get the next MarkupElement from the parent MarkupFilter and handles it if the specific filter * criteria are met. Depending on the filter, it may return the MarkupElement unchanged, * modified or it remove by asking the parent handler for the next tag. * * @see org.apache.wicket.markup.parser.IMarkupFilter#nextTag() * @return Return the next eligible MarkupElement */ public final MarkupElement nextTag() throws ParseException { // Get next tag. Null, if no more tag available final ComponentTag tag = (ComponentTag)getParent().nextTag(); if (tag == null) { return tag; } // Only xml tags not already identified as Wicket components will be // considered for autolinking. This is because it is assumed that Wicket // components like images or all other kind of Wicket Links will handle // it themselves. // Subclass analyzeAutolinkCondition() to implement you own // implementation and register the new tag handler with the markup // parser through Application.newMarkupParser(). if ((autolinking == true) && (analyzeAutolinkCondition(tag) == true)) { // Mark it as autolink enabled tag.enableAutolink(true); // Just a dummy name. The ComponentTag will not be forwarded. tag.setId(AUTOLINK_ID); tag.setAutoComponentTag(true); tag.setModified(true); return tag; } // For all <wicket:link ..> tags which probably change the // current autolink status. if (tag instanceof WicketTag) { final WicketTag wtag = (WicketTag)tag; if (wtag.isLinkTag()) { // Beginning of the region if (tag.isOpen() || tag.isOpenClose()) { if (tag.isOpen()) { if (autolinkStatus == null) { autolinkStatus = new ArrayListStack<Boolean>(); } // remember the current setting to be reset after the // region autolinkStatus.push(autolinking); } // html allows to represent true in different ways final String autolink = tag.getAttributes().getString("autolink"); try { autolinking = Strings.isEmpty(autolink) || Strings.isTrue(autolink); } catch (StringValueConversionException e) { throw new WicketRuntimeException("Invalid autolink attribute value \"" + autolink + "\""); } } else if (tag.isClose()) { // restore the autolink setting from before the region autolinking = autolinkStatus.pop(); } return wtag; } } return tag; } /** * Analyze the tag. If return value == true, a autolink component will be created. * <p> * Subclass analyzeAutolinkCondition() to implement you own implementation and register the new * tag handler with the markup parser through Application.newMarkupParser(). * * @param tag * The current tag being parsed * @return If true, tag will become auto-component */ protected boolean analyzeAutolinkCondition(final ComponentTag tag) { if (tag.getId() == null) { IValueMap attributes = tag.getAttributes(); String ref = attributes.getString("href"); if (checkRef(ref)) { return true; } ref = attributes.getString("src"); if (checkRef(ref)) { return true; } } return false; } /** * * @param ref * @return true if ref is not null and does nto contain namespace */ private final boolean checkRef(String ref) { return (ref != null) && (ref.indexOf(":") == -1); } }
package alien4cloud.authorization; import alien4cloud.application.ApplicationEnvironmentService; import alien4cloud.dao.IGenericSearchDAO; import alien4cloud.model.application.ApplicationEnvironment; import alien4cloud.model.application.EnvironmentType; import alien4cloud.security.AbstractSecurityEnabledResource; import alien4cloud.security.ISecurityEnabledResource; import alien4cloud.security.Permission; import alien4cloud.security.Subject; import alien4cloud.security.groups.IAlienGroupDao; import alien4cloud.security.model.Group; import alien4cloud.security.model.User; import alien4cloud.security.users.IAlienUserDao; import com.google.common.collect.Sets; import lombok.AllArgsConstructor; import lombok.NoArgsConstructor; import org.alien4cloud.alm.events.AfterPermissionRevokedEvent; import org.alien4cloud.alm.events.BeforePermissionRevokedEvent; import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang3.ArrayUtils; import org.elasticsearch.common.collect.Lists; import org.springframework.context.ApplicationEventPublisher; import org.springframework.stereotype.Service; import javax.annotation.Resource; import javax.inject.Inject; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; /** * Service managing permissions to resources */ @Service @AllArgsConstructor @NoArgsConstructor public class ResourcePermissionService { @Resource(name = "alien-es-dao") private IGenericSearchDAO alienDAO; @Inject private IAlienUserDao alienUserDao; @Inject private IAlienGroupDao alienGroupDao; @Inject private ApplicationEnvironmentService applicationEnvironmentService; @Inject private ApplicationEventPublisher publisher; /** * Add admin permission to the given resource for the given subject. * * @param resource the resource to secure * @param subjects list of subjects */ public void grantPermission(ISecurityEnabledResource resource, Subject subjectType, String... subjects) { grantPermission(resource, (resource1 -> alienDAO.save(resource1)), subjectType, subjects); } /** * Add admin permission to the given resource for the given subject. * * @param resource the resource to secure * @param saver a callback to save the resource after modification * @param subjects list of subjects */ public void grantPermission(ISecurityEnabledResource resource, IResourceSaver saver, Subject subjectType, String... subjects) { Arrays.stream(subjects).forEach(subject -> resource.addPermissions(subjectType, subject, Sets.newHashSet(Permission.ADMIN))); if (saver != null) { saver.save(resource); } } /** * Revoke admin permission from the given resource from the given subjects. * * @param resource the resource to revoke * @param subjectType the type of the subject * @param subjects the subjects from which the permissions are revoked */ public void revokePermission(ISecurityEnabledResource resource, Subject subjectType, String... subjects) { revokePermission(resource, (resource1 -> alienDAO.save(resource1)), subjectType, subjects); } /** * Revoke admin permission from the given resource from the given subjects. * * @param resource the resource to revoke * @param saver a callback to save the resource after modification * @param subjectType the type of the subject * @param subjects the subjects from which the permissions are revoked */ public void revokePermission(ISecurityEnabledResource resource, IResourceSaver saver, Subject subjectType, String... subjects) { publisher.publishEvent(new BeforePermissionRevokedEvent(this, new BeforePermissionRevokedEvent.OnResource(resource.getClass(), resource.getId()), subjectType, subjects)); Arrays.stream(subjects).forEach(subject -> resource.removePermissions(subjectType, subject, Sets.newHashSet(Permission.ADMIN))); if (saver != null) { saver.save(resource); } publisher.publishEvent(new AfterPermissionRevokedEvent(this, new BeforePermissionRevokedEvent.OnResource(resource.getClass(), resource.getId()), subjectType, subjects)); } /** * Check if the given subject has admin privilege on the given resource. * * @param resource the resource * @param subjectType subject's type * @param subject the subject's id * * @return true if the subject has admin privilege, false otherwise */ private boolean hasPermission(ISecurityEnabledResource resource, Subject subjectType, String subject) { return resource.getPermissions(subjectType, subject).contains(Permission.ADMIN); } /** * Checks if any of the given subjects has admin privilege on the given resource. * * @param resource the resource * @param subjects the subjects' ids * @return true if any of the subjects has admin privilege, false otherwise */ public boolean anyHasPermission(ISecurityEnabledResource resource, Map<Subject, Set<String>> subjects) { return subjects.entrySet().stream() .anyMatch(subjectEntry -> subjectEntry.getValue().stream().anyMatch(subject -> hasPermission(resource, subjectEntry.getKey(), subject))); } /** * Checks if all the given subjects have admin privilege on the given resource. * * @param resource the resource * @param subjects the subjects' ids * @return true if all the subjects have admin privilege, false otherwise */ public boolean allHavePermission(ISecurityEnabledResource resource, Map<Subject, Set<String>> subjects) { return subjects.entrySet().stream() .allMatch(subjectEntry -> subjectEntry.getValue().stream().allMatch(subject -> hasPermission(resource, subjectEntry.getKey(), subject))); } /** * Get summary infos of all authorized users of the resource * * @param resource * @return */ // TODO consider enabling pagination here public List<User> getAuthorizedUsers(AbstractSecurityEnabledResource resource) { List<User> userDTOs = Lists.newArrayList(); if (MapUtils.isNotEmpty(resource.getUserPermissions())) { List<User> users = alienUserDao.find(resource.getUserPermissions().keySet().toArray(new String[resource.getUserPermissions().size()])); users.sort(Comparator.comparing(User::getUsername)); userDTOs.addAll(users); } return userDTOs; } /** * Get summary infos of all authorized groups of the resource * * @param resource * @return */ public List<Group> getAuthorizedGroups(AbstractSecurityEnabledResource resource) { List<Group> groupDTOS = Lists.newArrayList(); if (resource.getGroupPermissions() != null && resource.getGroupPermissions().size() > 0) { List<Group> groups = alienGroupDao.find(resource.getGroupPermissions().keySet().toArray(new String[resource.getGroupPermissions().size()])); groups.sort(Comparator.comparing(Group::getName)); groupDTOS.addAll(groups); } return groupDTOS; } public interface IResourceSaver { void save(ISecurityEnabledResource resource); } public void revokeAuthorizedEnvironmentsAndEnvironmentTypesPerApplication(AbstractSecurityEnabledResource resource, String[] applicationsToDelete, String[] environmentsToDelete, String[] environmentTypesToDelete) { IResourceSaver noSave = null; if (ArrayUtils.isNotEmpty(applicationsToDelete)) { revokePermission(resource, noSave, Subject.APPLICATION, applicationsToDelete); } if (ArrayUtils.isNotEmpty(environmentsToDelete)) { revokePermission(resource, noSave, Subject.ENVIRONMENT, environmentsToDelete); } if (ArrayUtils.isNotEmpty(environmentTypesToDelete)) { revokePermission(resource, noSave, Subject.ENVIRONMENT_TYPE, environmentTypesToDelete); } alienDAO.save(resource); } public void grantAuthorizedEnvironmentsAndEnvTypesPerApplication(AbstractSecurityEnabledResource resource, String[] applicationsToAdd, String[] environmentsToAdd, String[] environmentTypesToAdd) { List<String> envIds = Lists.newArrayList(); IResourceSaver noSave = null; if (ArrayUtils.isNotEmpty(applicationsToAdd)) { grantPermission(resource, noSave, Subject.APPLICATION, applicationsToAdd); // when an app is added, all eventual existing env authorizations are removed for (String applicationToAddId : applicationsToAdd) { ApplicationEnvironment[] aes = applicationEnvironmentService.getByApplicationId(applicationToAddId); for (ApplicationEnvironment ae : aes) { envIds.add(ae.getId()); } } if (!envIds.isEmpty()) { revokePermission(resource, noSave, Subject.ENVIRONMENT, envIds.toArray(new String[envIds.size()])); } // remove all all eventual existing env type authorizations Set<String> envTypes = Sets.newHashSet(); for (String applicationToAddId : applicationsToAdd) { for (EnvironmentType envType : EnvironmentType.values()) { envTypes.add(applicationToAddId + ":" + envType.toString()); } } if (!envTypes.isEmpty()) { revokePermission(resource, noSave, Subject.ENVIRONMENT_TYPE, envTypes.toArray(new String[envTypes.size()])); } } if (ArrayUtils.isNotEmpty(environmentsToAdd)) { List<String> envToAddSet = Arrays.stream(environmentsToAdd).filter(env -> !envIds.contains(env)).collect(Collectors.toList()); grantPermission(resource, noSave, Subject.ENVIRONMENT, envToAddSet.toArray(new String[envToAddSet.size()])); } if (ArrayUtils.isNotEmpty(environmentTypesToAdd)) { grantPermission(resource, noSave, Subject.ENVIRONMENT_TYPE, environmentTypesToAdd); } alienDAO.save(resource); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. /** * DescribeSnapshotsRestorableByType.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: 1.5.6 Built on : Aug 30, 2011 (10:01:01 CEST) */ package com.amazon.ec2; /** * DescribeSnapshotsRestorableByType bean class */ public class DescribeSnapshotsRestorableByType implements org.apache.axis2.databinding.ADBBean{ /* This type was generated from the piece of schema that had name = DescribeSnapshotsRestorableByType Namespace URI = http://ec2.amazonaws.com/doc/2012-08-15/ Namespace Prefix = ns1 */ private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://ec2.amazonaws.com/doc/2012-08-15/")){ return "ns1"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * field for User */ protected java.lang.String localUser ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getUser(){ return localUser; } /** * Auto generated setter method * @param param User */ public void setUser(java.lang.String param){ this.localUser=param; } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,parentQName){ public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { DescribeSnapshotsRestorableByType.this.serialize(parentQName,factory,xmlWriter); } }; return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl( parentQName,factory,dataSource); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,factory,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ java.lang.String prefix = null; java.lang.String namespace = null; prefix = parentQName.getPrefix(); namespace = parentQName.getNamespaceURI(); if ((namespace != null) && (namespace.trim().length() > 0)) { java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, parentQName.getLocalPart()); } else { if (prefix == null) { prefix = generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } else { xmlWriter.writeStartElement(parentQName.getLocalPart()); } if (serializeType){ java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2012-08-15/"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){ writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", namespacePrefix+":DescribeSnapshotsRestorableByType", xmlWriter); } else { writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", "DescribeSnapshotsRestorableByType", xmlWriter); } } namespace = "http://ec2.amazonaws.com/doc/2012-08-15/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"user", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"user"); } } else { xmlWriter.writeStartElement("user"); } if (localUser==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("user cannot be null!!"); }else{ xmlWriter.writeCharacters(localUser); } xmlWriter.writeEndElement(); xmlWriter.writeEndElement(); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ java.util.ArrayList elementList = new java.util.ArrayList(); java.util.ArrayList attribList = new java.util.ArrayList(); elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/", "user")); if (localUser != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localUser)); } else { throw new org.apache.axis2.databinding.ADBException("user cannot be null!!"); } return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray()); } /** * Factory class that keeps the parse method */ public static class Factory{ /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static DescribeSnapshotsRestorableByType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ DescribeSnapshotsRestorableByType object = new DescribeSnapshotsRestorableByType(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){ java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName!=null){ java.lang.String nsPrefix = null; if (fullTypeName.indexOf(":") > -1){ nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":")); } nsPrefix = nsPrefix==null?"":nsPrefix; java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1); if (!"DescribeSnapshotsRestorableByType".equals(type)){ //find namespace for the prefix java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (DescribeSnapshotsRestorableByType)com.amazon.ec2.ExtensionMapper.getTypeObject( nsUri,type,reader); } } } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); reader.next(); while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2012-08-15/","user").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setUser( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement()) // A start element we are not expecting indicates a trailing invalid property throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2018_06_01.implementation; import com.microsoft.azure.management.network.v2018_06_01.ApplicationGatewaySku; import com.microsoft.azure.management.network.v2018_06_01.ApplicationGatewaySslPolicy; import com.microsoft.azure.management.network.v2018_06_01.ApplicationGatewayOperationalState; import java.util.List; import com.microsoft.azure.management.network.v2018_06_01.ApplicationGatewayIPConfiguration; import com.microsoft.azure.management.network.v2018_06_01.ApplicationGatewayAuthenticationCertificate; import com.microsoft.azure.management.network.v2018_06_01.ApplicationGatewaySslCertificate; import com.microsoft.azure.management.network.v2018_06_01.ApplicationGatewayFrontendIPConfiguration; import com.microsoft.azure.management.network.v2018_06_01.ApplicationGatewayFrontendPort; import com.microsoft.azure.management.network.v2018_06_01.ApplicationGatewayProbe; import com.microsoft.azure.management.network.v2018_06_01.ApplicationGatewayBackendAddressPool; import com.microsoft.azure.management.network.v2018_06_01.ApplicationGatewayBackendHttpSettings; import com.microsoft.azure.management.network.v2018_06_01.ApplicationGatewayHttpListener; import com.microsoft.azure.management.network.v2018_06_01.ApplicationGatewayUrlPathMap; import com.microsoft.azure.management.network.v2018_06_01.ApplicationGatewayRequestRoutingRule; import com.microsoft.azure.management.network.v2018_06_01.ApplicationGatewayRedirectConfiguration; import com.microsoft.azure.management.network.v2018_06_01.ApplicationGatewayWebApplicationFirewallConfiguration; import com.microsoft.azure.management.network.v2018_06_01.ApplicationGatewayAutoscaleConfiguration; import com.fasterxml.jackson.annotation.JsonProperty; import com.microsoft.rest.serializer.JsonFlatten; import com.microsoft.rest.SkipParentValidation; import com.microsoft.azure.Resource; /** * Application gateway resource. */ @JsonFlatten @SkipParentValidation public class ApplicationGatewayInner extends Resource { /** * SKU of the application gateway resource. */ @JsonProperty(value = "properties.sku") private ApplicationGatewaySku sku; /** * SSL policy of the application gateway resource. */ @JsonProperty(value = "properties.sslPolicy") private ApplicationGatewaySslPolicy sslPolicy; /** * Operational state of the application gateway resource. Possible values * include: 'Stopped', 'Starting', 'Running', 'Stopping'. */ @JsonProperty(value = "properties.operationalState", access = JsonProperty.Access.WRITE_ONLY) private ApplicationGatewayOperationalState operationalState; /** * Subnets of application the gateway resource. */ @JsonProperty(value = "properties.gatewayIPConfigurations") private List<ApplicationGatewayIPConfiguration> gatewayIPConfigurations; /** * Authentication certificates of the application gateway resource. */ @JsonProperty(value = "properties.authenticationCertificates") private List<ApplicationGatewayAuthenticationCertificate> authenticationCertificates; /** * SSL certificates of the application gateway resource. */ @JsonProperty(value = "properties.sslCertificates") private List<ApplicationGatewaySslCertificate> sslCertificates; /** * Frontend IP addresses of the application gateway resource. */ @JsonProperty(value = "properties.frontendIPConfigurations") private List<ApplicationGatewayFrontendIPConfiguration> frontendIPConfigurations; /** * Frontend ports of the application gateway resource. */ @JsonProperty(value = "properties.frontendPorts") private List<ApplicationGatewayFrontendPort> frontendPorts; /** * Probes of the application gateway resource. */ @JsonProperty(value = "properties.probes") private List<ApplicationGatewayProbe> probes; /** * Backend address pool of the application gateway resource. */ @JsonProperty(value = "properties.backendAddressPools") private List<ApplicationGatewayBackendAddressPool> backendAddressPools; /** * Backend http settings of the application gateway resource. */ @JsonProperty(value = "properties.backendHttpSettingsCollection") private List<ApplicationGatewayBackendHttpSettings> backendHttpSettingsCollection; /** * Http listeners of the application gateway resource. */ @JsonProperty(value = "properties.httpListeners") private List<ApplicationGatewayHttpListener> httpListeners; /** * URL path map of the application gateway resource. */ @JsonProperty(value = "properties.urlPathMaps") private List<ApplicationGatewayUrlPathMap> urlPathMaps; /** * Request routing rules of the application gateway resource. */ @JsonProperty(value = "properties.requestRoutingRules") private List<ApplicationGatewayRequestRoutingRule> requestRoutingRules; /** * Redirect configurations of the application gateway resource. */ @JsonProperty(value = "properties.redirectConfigurations") private List<ApplicationGatewayRedirectConfiguration> redirectConfigurations; /** * Web application firewall configuration. */ @JsonProperty(value = "properties.webApplicationFirewallConfiguration") private ApplicationGatewayWebApplicationFirewallConfiguration webApplicationFirewallConfiguration; /** * Whether HTTP2 is enabled on the application gateway resource. */ @JsonProperty(value = "properties.enableHttp2") private Boolean enableHttp2; /** * Whether FIPS is enabled on the application gateway resource. */ @JsonProperty(value = "properties.enableFips") private Boolean enableFips; /** * Autoscale Configuration. */ @JsonProperty(value = "properties.autoscaleConfiguration") private ApplicationGatewayAutoscaleConfiguration autoscaleConfiguration; /** * Resource GUID property of the application gateway resource. */ @JsonProperty(value = "properties.resourceGuid") private String resourceGuid; /** * Provisioning state of the application gateway resource. Possible values * are: 'Updating', 'Deleting', and 'Failed'. */ @JsonProperty(value = "properties.provisioningState") private String provisioningState; /** * A unique read-only string that changes whenever the resource is updated. */ @JsonProperty(value = "etag") private String etag; /** * A list of availability zones denoting where the resource needs to come * from. */ @JsonProperty(value = "zones") private List<String> zones; /** * Resource ID. */ @JsonProperty(value = "id") private String id; /** * Get sKU of the application gateway resource. * * @return the sku value */ public ApplicationGatewaySku sku() { return this.sku; } /** * Set sKU of the application gateway resource. * * @param sku the sku value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withSku(ApplicationGatewaySku sku) { this.sku = sku; return this; } /** * Get sSL policy of the application gateway resource. * * @return the sslPolicy value */ public ApplicationGatewaySslPolicy sslPolicy() { return this.sslPolicy; } /** * Set sSL policy of the application gateway resource. * * @param sslPolicy the sslPolicy value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withSslPolicy(ApplicationGatewaySslPolicy sslPolicy) { this.sslPolicy = sslPolicy; return this; } /** * Get operational state of the application gateway resource. Possible values include: 'Stopped', 'Starting', 'Running', 'Stopping'. * * @return the operationalState value */ public ApplicationGatewayOperationalState operationalState() { return this.operationalState; } /** * Get subnets of application the gateway resource. * * @return the gatewayIPConfigurations value */ public List<ApplicationGatewayIPConfiguration> gatewayIPConfigurations() { return this.gatewayIPConfigurations; } /** * Set subnets of application the gateway resource. * * @param gatewayIPConfigurations the gatewayIPConfigurations value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withGatewayIPConfigurations(List<ApplicationGatewayIPConfiguration> gatewayIPConfigurations) { this.gatewayIPConfigurations = gatewayIPConfigurations; return this; } /** * Get authentication certificates of the application gateway resource. * * @return the authenticationCertificates value */ public List<ApplicationGatewayAuthenticationCertificate> authenticationCertificates() { return this.authenticationCertificates; } /** * Set authentication certificates of the application gateway resource. * * @param authenticationCertificates the authenticationCertificates value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withAuthenticationCertificates(List<ApplicationGatewayAuthenticationCertificate> authenticationCertificates) { this.authenticationCertificates = authenticationCertificates; return this; } /** * Get sSL certificates of the application gateway resource. * * @return the sslCertificates value */ public List<ApplicationGatewaySslCertificate> sslCertificates() { return this.sslCertificates; } /** * Set sSL certificates of the application gateway resource. * * @param sslCertificates the sslCertificates value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withSslCertificates(List<ApplicationGatewaySslCertificate> sslCertificates) { this.sslCertificates = sslCertificates; return this; } /** * Get frontend IP addresses of the application gateway resource. * * @return the frontendIPConfigurations value */ public List<ApplicationGatewayFrontendIPConfiguration> frontendIPConfigurations() { return this.frontendIPConfigurations; } /** * Set frontend IP addresses of the application gateway resource. * * @param frontendIPConfigurations the frontendIPConfigurations value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withFrontendIPConfigurations(List<ApplicationGatewayFrontendIPConfiguration> frontendIPConfigurations) { this.frontendIPConfigurations = frontendIPConfigurations; return this; } /** * Get frontend ports of the application gateway resource. * * @return the frontendPorts value */ public List<ApplicationGatewayFrontendPort> frontendPorts() { return this.frontendPorts; } /** * Set frontend ports of the application gateway resource. * * @param frontendPorts the frontendPorts value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withFrontendPorts(List<ApplicationGatewayFrontendPort> frontendPorts) { this.frontendPorts = frontendPorts; return this; } /** * Get probes of the application gateway resource. * * @return the probes value */ public List<ApplicationGatewayProbe> probes() { return this.probes; } /** * Set probes of the application gateway resource. * * @param probes the probes value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withProbes(List<ApplicationGatewayProbe> probes) { this.probes = probes; return this; } /** * Get backend address pool of the application gateway resource. * * @return the backendAddressPools value */ public List<ApplicationGatewayBackendAddressPool> backendAddressPools() { return this.backendAddressPools; } /** * Set backend address pool of the application gateway resource. * * @param backendAddressPools the backendAddressPools value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withBackendAddressPools(List<ApplicationGatewayBackendAddressPool> backendAddressPools) { this.backendAddressPools = backendAddressPools; return this; } /** * Get backend http settings of the application gateway resource. * * @return the backendHttpSettingsCollection value */ public List<ApplicationGatewayBackendHttpSettings> backendHttpSettingsCollection() { return this.backendHttpSettingsCollection; } /** * Set backend http settings of the application gateway resource. * * @param backendHttpSettingsCollection the backendHttpSettingsCollection value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withBackendHttpSettingsCollection(List<ApplicationGatewayBackendHttpSettings> backendHttpSettingsCollection) { this.backendHttpSettingsCollection = backendHttpSettingsCollection; return this; } /** * Get http listeners of the application gateway resource. * * @return the httpListeners value */ public List<ApplicationGatewayHttpListener> httpListeners() { return this.httpListeners; } /** * Set http listeners of the application gateway resource. * * @param httpListeners the httpListeners value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withHttpListeners(List<ApplicationGatewayHttpListener> httpListeners) { this.httpListeners = httpListeners; return this; } /** * Get uRL path map of the application gateway resource. * * @return the urlPathMaps value */ public List<ApplicationGatewayUrlPathMap> urlPathMaps() { return this.urlPathMaps; } /** * Set uRL path map of the application gateway resource. * * @param urlPathMaps the urlPathMaps value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withUrlPathMaps(List<ApplicationGatewayUrlPathMap> urlPathMaps) { this.urlPathMaps = urlPathMaps; return this; } /** * Get request routing rules of the application gateway resource. * * @return the requestRoutingRules value */ public List<ApplicationGatewayRequestRoutingRule> requestRoutingRules() { return this.requestRoutingRules; } /** * Set request routing rules of the application gateway resource. * * @param requestRoutingRules the requestRoutingRules value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withRequestRoutingRules(List<ApplicationGatewayRequestRoutingRule> requestRoutingRules) { this.requestRoutingRules = requestRoutingRules; return this; } /** * Get redirect configurations of the application gateway resource. * * @return the redirectConfigurations value */ public List<ApplicationGatewayRedirectConfiguration> redirectConfigurations() { return this.redirectConfigurations; } /** * Set redirect configurations of the application gateway resource. * * @param redirectConfigurations the redirectConfigurations value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withRedirectConfigurations(List<ApplicationGatewayRedirectConfiguration> redirectConfigurations) { this.redirectConfigurations = redirectConfigurations; return this; } /** * Get web application firewall configuration. * * @return the webApplicationFirewallConfiguration value */ public ApplicationGatewayWebApplicationFirewallConfiguration webApplicationFirewallConfiguration() { return this.webApplicationFirewallConfiguration; } /** * Set web application firewall configuration. * * @param webApplicationFirewallConfiguration the webApplicationFirewallConfiguration value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withWebApplicationFirewallConfiguration(ApplicationGatewayWebApplicationFirewallConfiguration webApplicationFirewallConfiguration) { this.webApplicationFirewallConfiguration = webApplicationFirewallConfiguration; return this; } /** * Get whether HTTP2 is enabled on the application gateway resource. * * @return the enableHttp2 value */ public Boolean enableHttp2() { return this.enableHttp2; } /** * Set whether HTTP2 is enabled on the application gateway resource. * * @param enableHttp2 the enableHttp2 value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withEnableHttp2(Boolean enableHttp2) { this.enableHttp2 = enableHttp2; return this; } /** * Get whether FIPS is enabled on the application gateway resource. * * @return the enableFips value */ public Boolean enableFips() { return this.enableFips; } /** * Set whether FIPS is enabled on the application gateway resource. * * @param enableFips the enableFips value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withEnableFips(Boolean enableFips) { this.enableFips = enableFips; return this; } /** * Get autoscale Configuration. * * @return the autoscaleConfiguration value */ public ApplicationGatewayAutoscaleConfiguration autoscaleConfiguration() { return this.autoscaleConfiguration; } /** * Set autoscale Configuration. * * @param autoscaleConfiguration the autoscaleConfiguration value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withAutoscaleConfiguration(ApplicationGatewayAutoscaleConfiguration autoscaleConfiguration) { this.autoscaleConfiguration = autoscaleConfiguration; return this; } /** * Get resource GUID property of the application gateway resource. * * @return the resourceGuid value */ public String resourceGuid() { return this.resourceGuid; } /** * Set resource GUID property of the application gateway resource. * * @param resourceGuid the resourceGuid value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withResourceGuid(String resourceGuid) { this.resourceGuid = resourceGuid; return this; } /** * Get provisioning state of the application gateway resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. * * @return the provisioningState value */ public String provisioningState() { return this.provisioningState; } /** * Set provisioning state of the application gateway resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. * * @param provisioningState the provisioningState value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withProvisioningState(String provisioningState) { this.provisioningState = provisioningState; return this; } /** * Get a unique read-only string that changes whenever the resource is updated. * * @return the etag value */ public String etag() { return this.etag; } /** * Set a unique read-only string that changes whenever the resource is updated. * * @param etag the etag value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withEtag(String etag) { this.etag = etag; return this; } /** * Get a list of availability zones denoting where the resource needs to come from. * * @return the zones value */ public List<String> zones() { return this.zones; } /** * Set a list of availability zones denoting where the resource needs to come from. * * @param zones the zones value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withZones(List<String> zones) { this.zones = zones; return this; } /** * Get resource ID. * * @return the id value */ public String id() { return this.id; } /** * Set resource ID. * * @param id the id value to set * @return the ApplicationGatewayInner object itself. */ public ApplicationGatewayInner withId(String id) { this.id = id; return this; } }
package org.apache.velocity.runtime.log; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.IOException; import java.lang.reflect.Field; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; import org.apache.log4j.RollingFileAppender; import org.apache.velocity.runtime.RuntimeConstants; import org.apache.velocity.runtime.RuntimeServices; import org.apache.velocity.util.ExceptionUtils; /** * Implementation of a simple log4j system that will either latch onto * an existing category, or just do a simple rolling file log. * * @author <a href="mailto:geirm@apache.org>Geir Magnusson Jr.</a> * @author <a href="mailto:dlr@finemaltcoding.com>Daniel L. Rall</a> * @author <a href="mailto:nbubna@apache.org>Nathan Bubna</a> * @version $Id: Log4JLogChute.java 730039 2008-12-30 03:53:19Z byron $ * @since Velocity 1.5 * @since 1.5 */ public class Log4JLogChute implements LogChute { public static final String RUNTIME_LOG_LOG4J_LOGGER = "runtime.log.logsystem.log4j.logger"; public static final String RUNTIME_LOG_LOG4J_LOGGER_LEVEL = "runtime.log.logsystem.log4j.logger.level"; private RuntimeServices rsvc = null; private boolean hasTrace = false; private RollingFileAppender appender = null; /** * <a href="http://jakarta.apache.org/log4j/">Log4J</a> logging API. */ protected Logger logger = null; /** * @see org.apache.velocity.runtime.log.LogChute#init(org.apache.velocity.runtime.RuntimeServices) */ public void init(RuntimeServices rs) throws Exception { rsvc = rs; /* first see if there is a category specified and just use that - it allows * the application to make us use an existing logger */ String name = (String)rsvc.getProperty(RUNTIME_LOG_LOG4J_LOGGER); if (name != null) { logger = Logger.getLogger(name); log(DEBUG_ID, "Log4JLogChute using logger '" + name + '\''); } else { // create a logger with this class name to avoid conflicts logger = Logger.getLogger(this.getClass().getName()); // if we have a file property, then create a separate // rolling file log for velocity messages only String file = rsvc.getString(RuntimeConstants.RUNTIME_LOG); if (file != null && file.length() > 0) { initAppender(file); } } /* get and set specified level for this logger */ String lvl = rsvc.getString(RUNTIME_LOG_LOG4J_LOGGER_LEVEL); if (lvl != null) { Level level = Level.toLevel(lvl); logger.setLevel(level); } /* Ok, now let's see if this version of log4j supports the trace level. */ try { Field traceLevel = Level.class.getField("TRACE"); // we'll never get here in pre 1.2.12 log4j hasTrace = true; } catch (NoSuchFieldException e) { log(DEBUG_ID, "The version of log4j being used does not support the \"trace\" level."); } } // This tries to create a file appender for the specified file name. private void initAppender(String file) throws Exception { try { // to add the appender PatternLayout layout = new PatternLayout("%d - %m%n"); this.appender = new RollingFileAppender(layout, file, true); // if we successfully created the file appender, // configure it and set the logger to use only it appender.setMaxBackupIndex(1); appender.setMaximumFileSize(100000); // don't inherit appenders from higher in the logger heirarchy logger.setAdditivity(false); logger.addAppender(appender); log(DEBUG_ID, "Log4JLogChute initialized using file '"+file+'\''); } catch (IOException ioe) { rsvc.getLog().error("Could not create file appender '"+file+'\'', ioe); throw ExceptionUtils.createRuntimeException("Error configuring Log4JLogChute : ", ioe); } } /** * logs messages * * @param level severity level * @param message complete error message */ public void log(int level, String message) { switch (level) { case LogChute.WARN_ID: logger.warn(message); break; case LogChute.INFO_ID: logger.info(message); break; case LogChute.TRACE_ID: if (hasTrace) { logger.trace(message); } else { logger.debug(message); } break; case LogChute.ERROR_ID: logger.error(message); break; case LogChute.DEBUG_ID: default: logger.debug(message); break; } } /** * @see org.apache.velocity.runtime.log.LogChute#log(int, java.lang.String, java.lang.Throwable) */ public void log(int level, String message, Throwable t) { switch (level) { case LogChute.WARN_ID: logger.warn(message, t); break; case LogChute.INFO_ID: logger.info(message, t); break; case LogChute.TRACE_ID: if (hasTrace) { logger.trace(message, t); } else { logger.debug(message, t); } break; case LogChute.ERROR_ID: logger.error(message, t); break; case LogChute.DEBUG_ID: default: logger.debug(message, t); break; } } /** * @see org.apache.velocity.runtime.log.LogChute#isLevelEnabled(int) */ public boolean isLevelEnabled(int level) { switch (level) { case LogChute.DEBUG_ID: return logger.isDebugEnabled(); case LogChute.INFO_ID: return logger.isInfoEnabled(); case LogChute.TRACE_ID: if (hasTrace) { return logger.isTraceEnabled(); } else { return logger.isDebugEnabled(); } case LogChute.WARN_ID: return logger.isEnabledFor(Level.WARN); case LogChute.ERROR_ID: // can't be disabled in log4j return logger.isEnabledFor(Level.ERROR); default: return true; } } /** * Also do a shutdown if the object is destroy()'d. * @throws Throwable */ protected void finalize() throws Throwable { shutdown(); } /** Close all destinations*/ public void shutdown() { if (appender != null) { logger.removeAppender(appender); appender.close(); appender = null; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.query.netty; import org.apache.flink.runtime.io.network.netty.NettyBufferPool; import org.apache.flink.runtime.query.KvStateRegistry; import org.apache.flink.runtime.query.KvStateServerAddress; import org.apache.flink.runtime.query.netty.message.KvStateRequest; import org.apache.flink.util.Preconditions; import com.google.common.util.concurrent.ThreadFactoryBuilder; import io.netty.bootstrap.ServerBootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelOption; import io.netty.channel.EventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.SocketChannel; import io.netty.channel.socket.nio.NioServerSocketChannel; import io.netty.handler.codec.LengthFieldBasedFrameDecoder; import io.netty.handler.stream.ChunkedWriteHandler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.InetAddress; import java.net.InetSocketAddress; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; /** * Netty-based server answering {@link KvStateRequest} messages. * * <p>Requests are handled by asynchronous query tasks (see {@link KvStateServerHandler.AsyncKvStateQueryTask}) * that are executed by a separate query Thread pool. This pool is shared among * all TCP connections. * * <p>The incoming pipeline looks as follows: * <pre> * Socket.read() -> LengthFieldBasedFrameDecoder -> KvStateServerHandler * </pre> * * <p>Received binary messages are expected to contain a frame length field. Netty's * {@link LengthFieldBasedFrameDecoder} is used to fully receive the frame before * giving it to our {@link KvStateServerHandler}. * * <p>Connections are established and closed by the client. The server only * closes the connection on a fatal failure that cannot be recovered. A * server-side connection close is considered a failure by the client. */ public class KvStateServer { private static final Logger LOG = LoggerFactory.getLogger(KvStateServer.class); /** Server config: low water mark. */ private static final int LOW_WATER_MARK = 8 * 1024; /** Server config: high water mark. */ private static final int HIGH_WATER_MARK = 32 * 1024; /** Netty's ServerBootstrap. */ private final ServerBootstrap bootstrap; /** Query executor thread pool. */ private final ExecutorService queryExecutor; /** Address of this server. */ private KvStateServerAddress serverAddress; /** * Creates the {@link KvStateServer}. * * <p>The server needs to be started via {@link #start()} in order to bind * to the configured bind address. * * @param bindAddress Address to bind to * @param bindPort Port to bind to. Pick random port if 0. * @param numEventLoopThreads Number of event loop threads * @param numQueryThreads Number of query threads * @param kvStateRegistry KvStateRegistry to query for KvState instances * @param stats Statistics tracker */ public KvStateServer( InetAddress bindAddress, int bindPort, int numEventLoopThreads, int numQueryThreads, KvStateRegistry kvStateRegistry, KvStateRequestStats stats) { Preconditions.checkArgument(bindPort >= 0 && bindPort <= 65536, "Port " + bindPort + " is out of valid port range (0-65536)."); Preconditions.checkArgument(numEventLoopThreads >= 1, "Non-positive number of event loop threads."); Preconditions.checkArgument(numQueryThreads >= 1, "Non-positive number of query threads."); Preconditions.checkNotNull(kvStateRegistry, "KvStateRegistry"); Preconditions.checkNotNull(stats, "KvStateRequestStats"); NettyBufferPool bufferPool = new NettyBufferPool(numEventLoopThreads); ThreadFactory threadFactory = new ThreadFactoryBuilder() .setDaemon(true) .setNameFormat("Flink KvStateServer EventLoop Thread %d") .build(); NioEventLoopGroup nioGroup = new NioEventLoopGroup(numEventLoopThreads, threadFactory); queryExecutor = createQueryExecutor(numQueryThreads); // Shared between all channels KvStateServerHandler serverHandler = new KvStateServerHandler( kvStateRegistry, queryExecutor, stats); bootstrap = new ServerBootstrap() // Bind address and port .localAddress(bindAddress, bindPort) // NIO server channels .group(nioGroup) .channel(NioServerSocketChannel.class) // Server channel Options .option(ChannelOption.ALLOCATOR, bufferPool) // Child channel options .childOption(ChannelOption.ALLOCATOR, bufferPool) .childOption(ChannelOption.WRITE_BUFFER_HIGH_WATER_MARK, HIGH_WATER_MARK) .childOption(ChannelOption.WRITE_BUFFER_LOW_WATER_MARK, LOW_WATER_MARK) // See initializer for pipeline details .childHandler(new KvStateServerChannelInitializer(serverHandler)); } /** * Starts the server by binding to the configured bind address (blocking). * * @throws InterruptedException If interrupted during the bind operation */ public void start() throws InterruptedException { Channel channel = bootstrap.bind().sync().channel(); InetSocketAddress localAddress = (InetSocketAddress) channel.localAddress(); serverAddress = new KvStateServerAddress(localAddress.getAddress(), localAddress.getPort()); } /** * Returns the address of this server. * * @return Server address * @throws IllegalStateException If server has not been started yet */ public KvStateServerAddress getAddress() { if (serverAddress == null) { throw new IllegalStateException("KvStateServer not started yet."); } return serverAddress; } /** * Shuts down the server and all related thread pools. */ public void shutDown() { if (bootstrap != null) { EventLoopGroup group = bootstrap.group(); if (group != null) { group.shutdownGracefully(0, 10, TimeUnit.SECONDS); } } if (queryExecutor != null) { queryExecutor.shutdown(); } serverAddress = null; } /** * Creates a thread pool for the query execution. * * @param numQueryThreads Number of query threads. * @return Thread pool for query execution */ private static ExecutorService createQueryExecutor(int numQueryThreads) { ThreadFactory threadFactory = new ThreadFactoryBuilder() .setDaemon(true) .setNameFormat("Flink KvStateServer Query Thread %d") .build(); return Executors.newFixedThreadPool(numQueryThreads, threadFactory); } /** * Channel pipeline initializer. * * <p>The request handler is shared, whereas the other handlers are created * per channel. */ private static final class KvStateServerChannelInitializer extends ChannelInitializer<SocketChannel> { /** The shared request handler. */ private final KvStateServerHandler sharedRequestHandler; /** * Creates the channel pipeline initializer with the shared request handler. * * @param sharedRequestHandler Shared request handler. */ public KvStateServerChannelInitializer(KvStateServerHandler sharedRequestHandler) { this.sharedRequestHandler = Preconditions.checkNotNull(sharedRequestHandler, "Request handler"); } @Override protected void initChannel(SocketChannel ch) throws Exception { ch.pipeline() .addLast(new ChunkedWriteHandler()) .addLast(new LengthFieldBasedFrameDecoder(Integer.MAX_VALUE, 0, 4, 0, 4)) .addLast(sharedRequestHandler); } } }
package cc.blynk.integration; import cc.blynk.common.model.messages.Message; import cc.blynk.common.model.messages.protocol.appllication.GetGraphDataResponseMessage; import cc.blynk.integration.model.ClientPair; import cc.blynk.integration.model.TestHardClient; import cc.blynk.server.TransportTypeHolder; import cc.blynk.server.core.application.AppServer; import cc.blynk.server.core.hardware.HardwareServer; import io.netty.channel.ChannelFuture; import org.apache.commons.io.FileUtils; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.runners.MockitoJUnitRunner; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.List; import java.util.zip.InflaterInputStream; import static cc.blynk.common.enums.Command.HARDWARE; import static cc.blynk.common.enums.Response.*; import static cc.blynk.common.model.messages.MessageFactory.produce; import static org.junit.Assert.*; import static org.mockito.Mockito.*; /** * The Blynk Project. * Created by Dmitriy Dumanskiy. * Created on 2/2/2015. * */ @RunWith(MockitoJUnitRunner.class) public class MainWorkflowTest extends IntegrationBase { private AppServer appServer; private HardwareServer hardwareServer; private ClientPair clientPair; private static String decompress(byte[] bytes) { InputStream in = new InflaterInputStream(new ByteArrayInputStream(bytes)); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { byte[] buffer = new byte[4096]; int len; while((len = in.read(buffer)) > 0) { baos.write(buffer, 0, len); } return new String(baos.toByteArray()); } catch (IOException e) { throw new AssertionError(e); } } @Before public void init() throws Exception { initServerStructures(); FileUtils.deleteDirectory(fileManager.getDataDir().toFile()); hardwareServer = new HardwareServer(properties, userRegistry, sessionsHolder, stats, notificationsProcessor, new TransportTypeHolder(properties), storageDao); appServer = new AppServer(properties, userRegistry, sessionsHolder, stats, new TransportTypeHolder(properties), storageDao); new Thread(hardwareServer).start(); new Thread(appServer).start(); //todo improve this //wait util server starts. sleep(500); clientPair = initAppAndHardPair(); } @After public void shutdown() { appServer.stop(); hardwareServer.stop(); clientPair.stop(); } @Test public void testConnectAppAndHardware() throws Exception { // we just test that app and hardware can actually connect } @Test public void testHardwareDeviceWentOffline() throws Exception { String newProfile = readTestUserProfile("user_profile_json_3_dashes.txt"); clientPair.appClient.send("saveProfile " + newProfile); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, OK))); ChannelFuture channelFuture = clientPair.hardwareClient.stop(); channelFuture.await(); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(0, DEVICE_WENT_OFFLINE))); } @Test public void testPingCommandWorks() throws Exception { clientPair.appClient.send("ping"); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, OK))); } @Test public void testPingCommandOk() throws Exception { clientPair.appClient.send("ping"); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, OK))); clientPair.appClient.reset(); clientPair.appClient.send("ping"); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, OK))); } @Test public void testGetGraphEmptyData() throws Exception { clientPair.appClient.send("getgraphdata 1 d 8"); ArgumentCaptor<GetGraphDataResponseMessage> objectArgumentCaptor = ArgumentCaptor.forClass(GetGraphDataResponseMessage.class); verify(clientPair.appClient.responseMock, timeout(1000)).channelRead(any(), objectArgumentCaptor.capture()); List<GetGraphDataResponseMessage> arguments = objectArgumentCaptor.getAllValues(); GetGraphDataResponseMessage graphMessage = arguments.get(0); assertNotNull(graphMessage); assertEquals(1, graphMessage.id); assertEquals(0, graphMessage.length); } @Test public void testGetAllGraphData() throws Exception { for (int i = 0; i < 1000; i++) { clientPair.hardwareClient.send("hardware aw 8 " + i); } verify(clientPair.appClient.responseMock, timeout(1000).times(1000)).channelRead(any(), any()); clientPair.appClient.reset(); clientPair.appClient.send("getgraphdata 1 a 8"); ArgumentCaptor<GetGraphDataResponseMessage> objectArgumentCaptor = ArgumentCaptor.forClass(GetGraphDataResponseMessage.class); verify(clientPair.appClient.responseMock, timeout(1000)).channelRead(any(), objectArgumentCaptor.capture()); List<GetGraphDataResponseMessage> arguments = objectArgumentCaptor.getAllValues(); GetGraphDataResponseMessage graphMessage = arguments.get(0); assertEquals(1, graphMessage.id); String result = decompress(graphMessage.data); String[] splitted = result.split("\0"); assertEquals(2000, splitted.length); for (int i = 0; i < 1000; i++) { assertEquals(String.valueOf(i), splitted[i * 2]); } } @Test public void testAppSendAnyHardCommandAndBack() throws Exception { clientPair.appClient.send("hardware 1 1"); verify(clientPair.hardwareClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, HARDWARE, "1 1".replaceAll(" ", "\0")))); clientPair.hardwareClient.send("hardware ar 1"); ArgumentCaptor<Message> objectArgumentCaptor = ArgumentCaptor.forClass(Message.class); verify(clientPair.appClient.responseMock, timeout(500).times(1)).channelRead(any(), objectArgumentCaptor.capture()); List<Message> arguments = objectArgumentCaptor.getAllValues(); Message hardMessage = arguments.get(0); assertEquals(1, hardMessage.id); assertEquals(HARDWARE, hardMessage.command); assertEquals(4, hardMessage.length); assertEquals("ar 1".replaceAll(" ", "\0"), hardMessage.body); } @Test public void testAppNoActiveDashForHard() throws Exception { clientPair.hardwareClient.send("hardware aw 1 1"); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, HARDWARE, "aw 1 1".replaceAll(" ", "\0")))); clientPair.appClient.send("deactivate 1"); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, OK))); clientPair.hardwareClient.send("hardware aw 1 1"); verify(clientPair.appClient.responseMock, timeout(500).times(0)).channelRead(any(), eq(produce(2, NO_ACTIVE_DASHBOARD))); } @Test public void testAppChangeActiveDash() throws Exception { clientPair.hardwareClient.send("hardware aw 1 1"); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, HARDWARE, "aw 1 1".replaceAll(" ", "\0")))); clientPair.appClient.send("deactivate 1"); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, OK))); String newProfile = readTestUserProfile("user_profile_json_3_dashes.txt"); clientPair.appClient.send("saveProfile " + newProfile); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(2, OK))); clientPair.hardwareClient.send("hardware aw 1 1"); verify(clientPair.appClient.responseMock, timeout(500).times(0)).channelRead(any(), eq(produce(2, NO_ACTIVE_DASHBOARD))); clientPair.appClient.send("activate 2"); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(3, OK))); clientPair.hardwareClient.send("hardware aw 1 1"); verify(clientPair.appClient.responseMock, timeout(500).times(0)).channelRead(any(), eq(produce(3, NO_ACTIVE_DASHBOARD))); clientPair.appClient.send("activate 1"); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(4, OK))); clientPair.hardwareClient.send("hardware aw 1 1"); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(4, HARDWARE, "aw 1 1".replaceAll(" ", "\0")))); } @Test public void testPushWhenHardwareOffline() throws Exception { ChannelFuture channelFuture = clientPair.hardwareClient.stop(); channelFuture.await(); verify(notificationsProcessor, timeout(500)).push(any(), any(), eq("Your UNO went offline. \"My Dashboard\" project is disconnected.")); } @Test public void testPushHandler() throws Exception { clientPair.hardwareClient.send("push Yo!"); verify(notificationsProcessor, timeout(500)).push(any(), any(), eq("Yo!"), eq(1)); } @Test public void testAppSendWriteHardCommandNotGraphAndBack() throws Exception { clientPair.appClient.send("hardware ar 11"); verify(clientPair.hardwareClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, HARDWARE, "ar 11".replaceAll(" ", "\0")))); String body = "aw 11 333"; clientPair.hardwareClient.send("hardware " + body); ArgumentCaptor<Message> objectArgumentCaptor = ArgumentCaptor.forClass(Message.class); verify(clientPair.appClient.responseMock, timeout(500).times(1)).channelRead(any(), objectArgumentCaptor.capture()); List<Message> arguments = objectArgumentCaptor.getAllValues(); Message hardMessage = arguments.get(0); assertEquals(1, hardMessage.id); assertEquals(HARDWARE, hardMessage.command); assertEquals(body.length(), hardMessage.length); assertTrue(hardMessage.body.startsWith(body.replaceAll(" ", "\0"))); } @Test public void testActivateWorkflow() throws Exception { clientPair.appClient.send("activate 2"); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, ILLEGAL_COMMAND))); clientPair.appClient.send("deactivate"); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(2, OK))); clientPair.appClient.send("hardware ar 1 1"); //todo check no response verify(clientPair.appClient.responseMock, never()).channelRead(any(), eq(produce(3, OK))); clientPair.appClient.send("activate 1"); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(4, OK))); clientPair.appClient.send("hardware ar 1 1"); verify(clientPair.hardwareClient.responseMock, timeout(500)).channelRead(any(), eq(produce(5, HARDWARE, "ar 1 1".replaceAll(" ", "\0")))); String userProfileWithGraph = readTestUserProfile(); clientPair.appClient.send("saveProfile " + userProfileWithGraph); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(6, OK))); clientPair.appClient.send("hardware ar 1 1"); verify(clientPair.hardwareClient.responseMock, timeout(500)).channelRead(any(), eq(produce(7, HARDWARE, "ar 1 1".replaceAll(" ", "\0")))); } @Test public void testTweetNotWorks() throws Exception { reset(notificationsProcessor); clientPair.hardwareClient.send("tweet"); verify(clientPair.hardwareClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, NOTIFICATION_INVALID_BODY_EXCEPTION))); clientPair.hardwareClient.send("tweet "); verify(clientPair.hardwareClient.responseMock, timeout(500)).channelRead(any(), eq(produce(2, NOTIFICATION_INVALID_BODY_EXCEPTION))); StringBuilder a = new StringBuilder(); for (int i = 0; i < 141; i++) { a.append("a"); } clientPair.hardwareClient.send("tweet " + a); verify(clientPair.hardwareClient.responseMock, timeout(500)).channelRead(any(), eq(produce(3, NOTIFICATION_INVALID_BODY_EXCEPTION))); clientPair.appClient.send("deactivate 1"); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, OK))); clientPair.hardwareClient.send("tweet yo"); verify(clientPair.hardwareClient.responseMock, timeout(500)).channelRead(any(), eq(produce(4, NOTIFICATION_NOT_AUTHORIZED_EXCEPTION))); } @Test public void testTweetWorks() throws Exception { reset(notificationsProcessor); String userProfileWithTwit = readTestUserProfile(); clientPair.appClient.send("saveProfile " + userProfileWithTwit); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, OK))); clientPair.hardwareClient.send("tweet yo"); verify(notificationsProcessor, timeout(500)).twit(any(), eq("token"), eq("secret"), eq("yo"), eq(1)); clientPair.hardwareClient.send("tweet yo"); verify(clientPair.hardwareClient.responseMock, timeout(500)).channelRead(any(), eq(produce(2, QUOTA_LIMIT_EXCEPTION))); } @Test public void testAppSendWriteHardCommandForGraphAndBack() throws Exception { String userProfileWithGraph = readTestUserProfile(); clientPair.appClient.send("saveProfile " + userProfileWithGraph); verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, OK))); reset(clientPair.appClient.responseMock); clientPair.appClient.reset(); clientPair.appClient.send("hardware ar 8"); verify(clientPair.hardwareClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, HARDWARE, "ar 8".replaceAll(" ", "\0")))); String body = "aw 8 333"; clientPair.hardwareClient.send("hardware " + body); ArgumentCaptor<Message> objectArgumentCaptor = ArgumentCaptor.forClass(Message.class); verify(clientPair.appClient.responseMock, timeout(500).times(1)).channelRead(any(), objectArgumentCaptor.capture()); List<Message> arguments = objectArgumentCaptor.getAllValues(); Message hardMessage = arguments.get(0); assertEquals(1, hardMessage.id); assertEquals(HARDWARE, hardMessage.command); //"aw 11 333".length + ts.length + separator assertEquals(body.length() + 14, hardMessage.length); assertTrue(hardMessage.body.startsWith(body.replaceAll(" ", "\0"))); } @Test @Ignore("Randomly fails on travis") //todo resolve it. //todo more tests for that public void testSendPinModeCommandWhenHardwareGoesOnline() throws Exception { ChannelFuture channelFuture = clientPair.hardwareClient.stop(); channelFuture.await(); if (!channelFuture.isDone()) { throw new RuntimeException("Error closing hard cahnnel."); } String body = "pm 13 in"; clientPair.appClient.send("hardware " + body); verify(clientPair.appClient.responseMock, timeout(1000)).channelRead(any(), eq(produce(1, DEVICE_NOT_IN_NETWORK))); TestHardClient hardClient = new TestHardClient(host, hardPort); hardClient.start(null); hardClient.send("login " + clientPair.token); verify(hardClient.responseMock, timeout(2000)).channelRead(any(), eq(produce(1, OK))); verify(hardClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, HARDWARE, body.replaceAll(" ", "\0")))); verify(hardClient.responseMock, times(2)).channelRead(any(), any()); } @Test public void testSendEmptyPinModeCommandWhenHardwareGoesOnline() throws Exception { ChannelFuture channelFuture = clientPair.hardwareClient.stop(); channelFuture.await(); if (!channelFuture.isDone()) { throw new RuntimeException("Error closing hard cahnnel."); } String body = "pm"; clientPair.appClient.send("hardware " + body); verify(clientPair.appClient.responseMock, timeout(2000)).channelRead(any(), eq(produce(1, DEVICE_NOT_IN_NETWORK))); TestHardClient hardClient = new TestHardClient(host, hardPort); hardClient.start(null); hardClient.send("login " + clientPair.token); verify(hardClient.responseMock, timeout(2000)).channelRead(any(), eq(produce(1, OK))); verify(hardClient.responseMock, times(1)).channelRead(any(), any()); } @Test public void testConnectAppAndHardwareAndSendCommands() throws Exception { for (int i = 0; i < 100; i++) { clientPair.appClient.send("hardware 1 1"); } verify(clientPair.hardwareClient.responseMock, timeout(500).times(100)).channelRead(any(), any()); } @Test @Ignore public void testTryReachQuotaLimit() throws Exception { String body = "ar 100 100"; //within 1 second sending more messages than default limit 100. for (int i = 0; i < 1000 / 9; i++) { clientPair.appClient.send("hardware " + body, 1); sleep(9); } verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, QUOTA_LIMIT_EXCEPTION))); verify(clientPair.hardwareClient.responseMock, atLeast(100)).channelRead(any(), eq(produce(1, HARDWARE, body.replaceAll(" ", "\0")))); clientPair.appClient.reset(); clientPair.hardwareClient.reset(); //check no more accepted for (int i = 0; i < 10; i++) { clientPair.appClient.send("hardware " + body, 1); sleep(9); } verify(clientPair.appClient.responseMock, times(0)).channelRead(any(), eq(produce(1, QUOTA_LIMIT_EXCEPTION))); verify(clientPair.hardwareClient.responseMock, times(0)).channelRead(any(), eq(produce(1, HARDWARE, body.replaceAll(" ", "\0")))); } @Test public void test2ClientPairsWorkCorrectly() throws Exception { final int ITERATIONS = 100; ClientPair clientPair2 = initAppAndHardPair("localhost", appPort, hardPort, "dima2@mail.ua 1", null, properties); String body = "ar 1"; for (int i = 1; i <= ITERATIONS; i++) { clientPair.appClient.send("hardware " + body); clientPair2.appClient.send("hardware " + body); } verify(clientPair.hardwareClient.responseMock, timeout(500).times(ITERATIONS)).channelRead(any(), any()); verify(clientPair2.hardwareClient.responseMock, timeout(500).times(ITERATIONS)).channelRead(any(), any()); for (int i = 1; i <= ITERATIONS; i++) { verify(clientPair.hardwareClient.responseMock, timeout(500)).channelRead(any(), eq(produce(i, HARDWARE, body.replaceAll(" ", "\0")))); verify(clientPair2.hardwareClient.responseMock, timeout(500)).channelRead(any(), eq(produce(i, HARDWARE, body.replaceAll(" ", "\0")))); } } @Test @Ignore("hard to test this case...") public void testTryReachQuotaLimitAndWarningExceededLimit() throws Exception { String body = "ar 100 100"; //within 1 second sending more messages than default limit 100. for (int i = 0; i < 1000 / 9; i++) { clientPair.appClient.send("hardware " + body, 1); sleep(9); } verify(clientPair.appClient.responseMock, timeout(1000)).channelRead(any(), eq(produce(1, QUOTA_LIMIT_EXCEPTION))); verify(clientPair.hardwareClient.responseMock, atLeast(100)).channelRead(any(), eq(produce(1, HARDWARE, body.replaceAll(" ", "\0")))); clientPair.appClient.reset(); clientPair.hardwareClient.reset(); //waiting to avoid limit. sleep(1000); for (int i = 0; i < 100000 / 9; i++) { clientPair.appClient.send("hardware " + body, 1); sleep(9); } verify(clientPair.appClient.responseMock, timeout(500)).channelRead(any(), eq(produce(1, QUOTA_LIMIT_EXCEPTION))); verify(clientPair.hardwareClient.responseMock, atLeast(100)).channelRead(any(), eq(produce(1, HARDWARE, body.replaceAll(" ", "\0")))); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.axis2.jaxws.attachments; import junit.framework.TestCase; import org.apache.axiom.om.OMAbstractFactory; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.OMFactory; import org.apache.axiom.om.OMNamespace; import org.apache.axiom.om.OMOutputFormat; import org.apache.axiom.om.OMText; import org.apache.axiom.soap.SOAPBody; import org.apache.axiom.soap.SOAPEnvelope; import org.apache.axiom.soap.SOAPFactory; import org.apache.axiom.soap.impl.llom.soap11.SOAP11Factory; import org.apache.axis2.jaxws.message.Block; import org.apache.axis2.jaxws.message.Message; import org.apache.axis2.jaxws.message.Protocol; import org.apache.axis2.jaxws.message.databinding.JAXBBlockContext; import org.apache.axis2.jaxws.message.factory.BlockFactory; import org.apache.axis2.jaxws.message.factory.JAXBBlockFactory; import org.apache.axis2.jaxws.message.factory.MessageFactory; import org.apache.axis2.jaxws.providerapi.DataSourceImpl; import org.apache.axis2.jaxws.registry.FactoryRegistry; import org.apache.axis2.jaxws.unitTest.TestLogger; import org.test.mtom.ImageDepot; import org.test.mtom.ObjectFactory; import org.test.mtom.SendImage; import javax.activation.DataHandler; import javax.activation.DataSource; import javax.imageio.ImageIO; import javax.imageio.stream.FileImageInputStream; import javax.imageio.stream.ImageInputStream; import java.awt.*; import java.io.ByteArrayOutputStream; import java.io.File; public class MTOMSerializationTests extends TestCase { private DataSource imageDS; public void setUp() throws Exception { String imageResourceDir = System.getProperty("basedir",".")+"/"+"test-resources"+File.separator+"image"; //Create a DataSource from an image File file = new File(imageResourceDir+File.separator+"test.jpg"); ImageInputStream fiis = new FileImageInputStream(file); Image image = ImageIO.read(fiis); imageDS = new DataSourceImpl("image/jpeg","test.jpg",image); } public MTOMSerializationTests(String name) { super(name); } /* * Simulate building up an OM that is sourced from JAXB and contains * binary data that should be optimized when serialized. */ public void testPlainOMSerialization() throws Exception { TestLogger.logger.debug("---------------------------------------"); TestLogger.logger.debug("test: " + getName()); OMElement payload = createPayload(); OMOutputFormat format = new OMOutputFormat(); format.setDoOptimize(true); format.setSOAP11(true); ByteArrayOutputStream baos = new ByteArrayOutputStream(); payload.serializeAndConsume(baos, format); TestLogger.logger.debug("=================================="); TestLogger.logger.debug(baos.toString()); TestLogger.logger.debug("=================================="); } /* * Simulate building up an OM SOAPEnvelope that has the contents of * the body sourced from JAXB and contains binary data that should be * optimized when serialized. */ public void testSoapOMSerialization() throws Exception { TestLogger.logger.debug("---------------------------------------"); TestLogger.logger.debug("test: " + getName()); OMElement payload = createPayload(); SOAPFactory factory = new SOAP11Factory(); SOAPEnvelope env = factory.createSOAPEnvelope(); SOAPBody body = factory.createSOAPBody(env); body.addChild(payload); OMOutputFormat format = new OMOutputFormat(); format.setDoOptimize(true); format.setSOAP11(true); ByteArrayOutputStream baos = new ByteArrayOutputStream(); env.serializeAndConsume(baos, format); TestLogger.logger.debug("=================================="); TestLogger.logger.debug(baos.toString()); TestLogger.logger.debug("=================================="); } public void testMTOMAttachmentWriter() throws Exception { TestLogger.logger.debug("---------------------------------------"); TestLogger.logger.debug("test: " + getName()); //Create a DataHandler with the String DataSource object DataHandler dataHandler = new DataHandler(imageDS); //Store the data handler in ImageDepot bean ImageDepot imageDepot = new ObjectFactory().createImageDepot(); imageDepot.setImageData(dataHandler); //JAXBContext jbc = JAXBContext.newInstance("org.test.mtom"); JAXBBlockContext context = new JAXBBlockContext(SendImage.class.getPackage().getName()); //Create a request bean with imagedepot bean as value ObjectFactory factory = new ObjectFactory(); SendImage request = factory.createSendImage(); request.setInput(imageDepot); BlockFactory blkFactory = (JAXBBlockFactory) FactoryRegistry.getFactory(JAXBBlockFactory.class); Block block = blkFactory.createFrom(request, context, null); MessageFactory msgFactory = (MessageFactory) FactoryRegistry.getFactory(MessageFactory.class); Message msg = msgFactory.create(Protocol.soap11); msg.setBodyBlock(block); msg.setMTOMEnabled(true); SOAPEnvelope soapOM = (SOAPEnvelope) msg.getAsOMElement(); OMOutputFormat format = new OMOutputFormat(); format.setDoOptimize(true); format.setSOAP11(true); ByteArrayOutputStream baos = new ByteArrayOutputStream(); soapOM.serializeAndConsume(baos, format); String outputText = baos.toString(); // Make sure the attachment is serialized assertTrue(outputText.indexOf("Content-Type: image/jpeg") > 0); TestLogger.logger.debug("=================================="); TestLogger.logger.debug(outputText); TestLogger.logger.debug("=================================="); } public void testMTOMAttachmentWriter2() throws Exception { TestLogger.logger.debug("---------------------------------------"); TestLogger.logger.debug("test: " + getName()); //Create a DataHandler with the String DataSource object DataHandler dataHandler = new DataHandler(imageDS); //Store the data handler in ImageDepot bean ImageDepot imageDepot = new ObjectFactory().createImageDepot(); imageDepot.setImageData(dataHandler); //JAXBContext jbc = JAXBContext.newInstance("org.test.mtom"); JAXBBlockContext context = new JAXBBlockContext(SendImage.class.getPackage().getName()); //Create a request bean with imagedepot bean as value ObjectFactory factory = new ObjectFactory(); SendImage request = factory.createSendImage(); request.setInput(imageDepot); BlockFactory blkFactory = (JAXBBlockFactory) FactoryRegistry.getFactory(JAXBBlockFactory.class); Block block = blkFactory.createFrom(request, context, null); MessageFactory msgFactory = (MessageFactory) FactoryRegistry.getFactory(MessageFactory.class); Message msg = msgFactory.create(Protocol.soap11); msg.setBodyBlock(block); msg.setMTOMEnabled(true); // Convert message to SAAJ to simulate an outbound handler msg.getAsSOAPMessage(); // Now convert it back to AXIOM SOAPEnvelope soapOM = (SOAPEnvelope) msg.getAsOMElement(); OMOutputFormat format = new OMOutputFormat(); format.setDoOptimize(true); format.setSOAP11(true); ByteArrayOutputStream baos = new ByteArrayOutputStream(); soapOM.serializeAndConsume(baos, format); String outputText = baos.toString(); // Make sure the attachment is serialized assertTrue(outputText.indexOf("Content-Type: image/jpeg") > 0); TestLogger.logger.debug("=================================="); TestLogger.logger.debug(outputText); TestLogger.logger.debug("=================================="); } private OMElement createPayload() { //Create a DataHandler with the String DataSource object DataHandler dataHandler = new DataHandler(imageDS); OMFactory fac = OMAbstractFactory.getOMFactory(); OMNamespace omNs = fac.createOMNamespace("urn://mtom.test.org", "mtom"); OMElement sendImage = fac.createOMElement("sendImage", omNs); OMElement input = fac.createOMElement("input", omNs); sendImage.addChild(input); OMElement imageData = fac.createOMElement("imageData", omNs); input.addChild(imageData); OMText binaryData = fac.createOMText(dataHandler, true); imageData.addChild(binaryData); return sendImage; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hobsoft.symmetry.spring; import java.io.IOException; import java.io.Writer; import java.util.Collections; import java.util.List; import org.hobsoft.symmetry.Reflector; import org.hobsoft.symmetry.ReflectorException; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.springframework.http.MediaType; import org.springframework.http.converter.HttpMessageNotReadableException; import org.springframework.http.converter.HttpMessageNotWritableException; import org.springframework.mock.http.MockHttpInputMessage; import org.springframework.mock.http.MockHttpOutputMessage; import static java.nio.charset.StandardCharsets.ISO_8859_1; import static java.nio.charset.StandardCharsets.UTF_8; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.hasEntry; import static org.junit.Assert.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import static org.springframework.http.MediaType.parseMediaType; /** * Tests {@code SymmetryHttpMessageConverter}. */ public class SymmetryHttpMessageConverterTest { // ---------------------------------------------------------------------------------------------------------------- // types // ---------------------------------------------------------------------------------------------------------------- private static class DummyComponent { // dummy type } private static class DummySubcomponent extends DummyComponent { // dummy type } // ---------------------------------------------------------------------------------------------------------------- // fields // ---------------------------------------------------------------------------------------------------------------- private ExpectedException thrown = ExpectedException.none(); // ---------------------------------------------------------------------------------------------------------------- // JUnit methods // ---------------------------------------------------------------------------------------------------------------- @Rule public ExpectedException getThrown() { return thrown; } // ---------------------------------------------------------------------------------------------------------------- // tests // ---------------------------------------------------------------------------------------------------------------- @Test public void canReadWithComponentAndContentTypeReturnsFalse() { Reflector<DummyComponent> reflector = mockReflector(DummyComponent.class, "x/y"); boolean actual = newConverter(reflector).canRead(DummyComponent.class, parseMediaType("x/y")); assertThat(actual, is(false)); } @Test public void canWriteWithComponentAndContentTypeReturnsTrue() { Reflector<DummyComponent> reflector = mockReflector(DummyComponent.class, "x/y"); boolean actual = newConverter(reflector).canWrite(DummyComponent.class, parseMediaType("x/y")); assertThat(actual, is(true)); } @Test public void canWriteWithSubtypeComponentReturnsTrue() { Reflector<DummyComponent> reflector = mockReflector(DummyComponent.class, "x/y"); boolean actual = newConverter(reflector).canWrite(DummySubcomponent.class, parseMediaType("x/y")); assertThat(actual, is(true)); } @Test public void canWriteWithDifferentComponentReturnsFalse() { Reflector<DummyComponent> reflector = mockReflector(DummyComponent.class, "x/y"); boolean actual = newConverter(reflector).canWrite(Void.class, parseMediaType("x/y")); assertThat(actual, is(false)); } @Test public void canWriteWithCompatibleContentTypeReturnsTrue() { Reflector<DummyComponent> reflector = mockReflector(DummyComponent.class, "x/y"); boolean actual = newConverter(reflector).canWrite(DummyComponent.class, parseMediaType("x/*")); assertThat(actual, is(true)); } @Test public void canWriteWithDifferentContentTypeReturnsFalse() { Reflector<DummyComponent> reflector = mockReflector(DummyComponent.class, "x/y"); boolean actual = newConverter(reflector).canWrite(DummyComponent.class, parseMediaType("x/z")); assertThat(actual, is(false)); } @Test public void getSupportedMediaTypesReturnsContentType() { Reflector<?> reflector = mockReflector(someComponentType(), "x/y"); List<MediaType> actuals = newConverter(reflector).getSupportedMediaTypes(); assertThat(actuals, contains(parseMediaType("x/y"))); } @Test public void readWithComponentThrowsException() throws IOException { Reflector<DummyComponent> reflector = mockReflector(DummyComponent.class, someContentType()); MockHttpInputMessage inputMessage = new MockHttpInputMessage(new byte[0]); thrown.expect(HttpMessageNotReadableException.class); thrown.expectMessage("SymmetryHttpMessageConverter cannot read components"); newConverter(reflector).read(DummyComponent.class, inputMessage); } @Test public void writeSetsContentType() throws IOException { Reflector<Object> reflector = mockReflector(someComponentType(), "x/y"); MockHttpOutputMessage outputMessage = new MockHttpOutputMessage(); newConverter(reflector).write(someComponent(), null, outputMessage); assertThat(outputMessage.getHeaders(), hasEntry("Content-Type", Collections.singletonList("x/y"))); } @Test public void writeWithComponentInvokesReflector() throws IOException, ReflectorException { Reflector<DummyComponent> reflector = mockReflector(DummyComponent.class, someContentType()); DummyComponent component = new DummyComponent(); newConverter(reflector).write(component, null, new MockHttpOutputMessage()); verify(reflector).reflect(eq(component), any(Writer.class)); } @Test public void writeWithComponentWritesReflection() throws IOException, ReflectorException { Reflector<DummyComponent> reflector = mockReflector(DummyComponent.class, someContentType()); doAnswer(write(1, "x")).when(reflector).reflect(any(DummyComponent.class), any(Writer.class)); MockHttpOutputMessage outputMessage = new MockHttpOutputMessage(); newConverter(reflector).write(new DummyComponent(), null, outputMessage); assertThat(outputMessage.getBodyAsString(), is("x")); } @Test public void writeWithoutCharsetEncodesReflectionUsingIso88591() throws IOException, ReflectorException { Reflector<DummyComponent> reflector = mockReflector(DummyComponent.class, "x/y"); doAnswer(write(1, "\u20AC")).when(reflector).reflect(any(DummyComponent.class), any(Writer.class)); MockHttpOutputMessage outputMessage = new MockHttpOutputMessage(); newConverter(reflector).write(new DummyComponent(), parseMediaType("x/y"), outputMessage); assertThat(outputMessage.getBodyAsString(ISO_8859_1), is("?")); } @Test public void writeWithCharsetEncodesReflectionUsingCharset() throws IOException, ReflectorException { Reflector<DummyComponent> reflector = mockReflector(DummyComponent.class, "x/y; charset=UTF-8"); doAnswer(write(1, "\u20AC")).when(reflector).reflect(any(DummyComponent.class), any(Writer.class)); MockHttpOutputMessage outputMessage = new MockHttpOutputMessage(); newConverter(reflector).write(new DummyComponent(), parseMediaType("x/y; charset=UTF-8"), outputMessage); assertThat(outputMessage.getBodyAsString(UTF_8), is("\u20AC")); } @Test public void writeWhenIOExceptionThrowsException() throws IOException, ReflectorException { Reflector<DummyComponent> reflector = mockReflector(DummyComponent.class, someContentType()); IOException exception = new IOException(); doThrow(exception).when(reflector).reflect(any(DummyComponent.class), any(Writer.class)); thrown.expect(is(exception)); newConverter(reflector).write(new DummyComponent(), null, new MockHttpOutputMessage()); } @Test public void writeWhenReflectorExceptionThrowsSpringException() throws IOException, ReflectorException { Reflector<DummyComponent> reflector = mockReflector(DummyComponent.class, someContentType()); ReflectorException exception = new ReflectorException("x"); doThrow(exception).when(reflector).reflect(any(DummyComponent.class), any(Writer.class)); thrown.expect(HttpMessageNotWritableException.class); thrown.expectMessage("Error writing component"); thrown.expectCause(is(exception)); newConverter(reflector).write(new DummyComponent(), null, new MockHttpOutputMessage()); } // ---------------------------------------------------------------------------------------------------------------- // private methods // ---------------------------------------------------------------------------------------------------------------- private static <T> Reflector<T> mockReflector(Class<T> componentType, String contentType) { Reflector<T> reflector = mock(Reflector.class); when(reflector.getComponentType()).thenReturn(componentType); when(reflector.getContentType()).thenReturn(contentType); return reflector; } private static <T> SymmetryHttpMessageConverter<T> newConverter(Reflector<T> reflector) { return new SymmetryHttpMessageConverter<>(reflector); } private static Object someComponent() { return new Object(); } private static Class<Object> someComponentType() { return Object.class; } private static String someContentType() { return "_/_"; } private static Answer<Object> write(final int writerIndex, final String string) { return new Answer<Object>() { @Override public Object answer(InvocationOnMock invocation) throws IOException { Writer writer = invocation.getArgumentAt(writerIndex, Writer.class); writer.write(string); return null; } }; } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.jps.incremental; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.io.FileUtil; import com.intellij.util.containers.ContainerUtil; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.jps.ModuleChunk; import org.jetbrains.jps.builders.BuildRootDescriptor; import org.jetbrains.jps.builders.BuildRootIndex; import org.jetbrains.jps.builders.BuildTarget; import org.jetbrains.jps.builders.FileProcessor; import org.jetbrains.jps.builders.impl.BuildTargetChunk; import org.jetbrains.jps.builders.java.JavaBuilderUtil; import org.jetbrains.jps.builders.java.JavaSourceRootDescriptor; import org.jetbrains.jps.cmdline.ProjectDescriptor; import org.jetbrains.jps.incremental.fs.CompilationRound; import org.jetbrains.jps.incremental.storage.Timestamps; import org.jetbrains.jps.model.java.JpsJavaClasspathKind; import org.jetbrains.jps.model.java.JpsJavaExtensionService; import org.jetbrains.jps.model.module.JpsModule; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.nio.file.*; import java.nio.file.attribute.BasicFileAttributes; import java.util.EnumSet; import java.util.HashSet; import java.util.Set; /** * @author Eugene Zhuravlev */ public class FSOperations { private static final Logger LOG = Logger.getInstance("#org.jetbrains.jps.incremental.FSOperations"); public static final GlobalContextKey<Set<File>> ALL_OUTPUTS_KEY = GlobalContextKey.create("_all_project_output_dirs_"); private static final GlobalContextKey<Set<BuildTarget<?>>> TARGETS_COMPLETELY_MARKED_DIRTY = GlobalContextKey.create("_targets_completely_marked_dirty_"); /** * @param context * @param round * @param file * @return true if file is marked as "dirty" in the specified compilation round * @throws IOException */ public static boolean isMarkedDirty(CompileContext context, final CompilationRound round, final File file) throws IOException { final JavaSourceRootDescriptor rd = context.getProjectDescriptor().getBuildRootIndex().findJavaRootDescriptor(context, file); if (rd != null) { final ProjectDescriptor pd = context.getProjectDescriptor(); return pd.fsState.isMarkedForRecompilation(context, round, rd, file); } return false; } /** * @deprecated use markDirty(CompileContext context, final CompilationRound round, final File file) * * Note: marked file will well be visible as "dirty" only on the <b>next</b> compilation round! * @throws IOException * */ @Deprecated public static void markDirty(CompileContext context, final File file) throws IOException { markDirty(context, CompilationRound.NEXT, file); } public static void markDirty(CompileContext context, final CompilationRound round, final File file) throws IOException { final JavaSourceRootDescriptor rd = context.getProjectDescriptor().getBuildRootIndex().findJavaRootDescriptor(context, file); if (rd != null) { final ProjectDescriptor pd = context.getProjectDescriptor(); pd.fsState.markDirty(context, round, file, rd, pd.timestamps.getStorage(), false); } } /** * @deprecated use markDirtyIfNotDeleted(CompileContext context, final CompilationRound round, final File file) */ @Deprecated public static void markDirtyIfNotDeleted(CompileContext context, final File file) throws IOException { markDirtyIfNotDeleted(context, CompilationRound.NEXT, file); } public static void markDirtyIfNotDeleted(CompileContext context, final CompilationRound round, final File file) throws IOException { final JavaSourceRootDescriptor rd = context.getProjectDescriptor().getBuildRootIndex().findJavaRootDescriptor(context, file); if (rd != null) { final ProjectDescriptor pd = context.getProjectDescriptor(); pd.fsState.markDirtyIfNotDeleted(context, round, file, rd, pd.timestamps.getStorage()); } } public static void markDeleted(CompileContext context, File file) throws IOException { final JavaSourceRootDescriptor rd = context.getProjectDescriptor().getBuildRootIndex().findJavaRootDescriptor(context, file); if (rd != null) { final ProjectDescriptor pd = context.getProjectDescriptor(); pd.fsState.registerDeleted(context, rd.target, file, pd.timestamps.getStorage()); } } /** * @deprecated use markDirty(CompileContext context, final CompilationRound round, final ModuleChunk chunk, @Nullable FileFilter filter) */ @Deprecated public static void markDirty(CompileContext context, final ModuleChunk chunk, @Nullable FileFilter filter) throws IOException { markDirty(context, CompilationRound.NEXT, chunk, filter); } public static void markDirty(CompileContext context, final CompilationRound round, final ModuleChunk chunk, @Nullable FileFilter filter) throws IOException { for (ModuleBuildTarget target : chunk.getTargets()) { markDirty(context, round, target, filter); } } public static void markDirty(CompileContext context, final CompilationRound round, final ModuleBuildTarget target, @Nullable FileFilter filter) throws IOException { final ProjectDescriptor pd = context.getProjectDescriptor(); markDirtyFiles(context, target, round, pd.timestamps.getStorage(), true, null, filter); } /** * @deprecated use markDirtyRecursively(CompileContext context, final CompilationRound round, ModuleChunk chunk, FileFilter filter) */ @Deprecated public static void markDirtyRecursively(CompileContext context, ModuleChunk chunk) throws IOException { markDirtyRecursively(context, CompilationRound.NEXT, chunk); } public static void markDirtyRecursively(CompileContext context, final CompilationRound round, ModuleChunk chunk) throws IOException { markDirtyRecursively(context, round, chunk, null); } public static void markDirtyRecursively(CompileContext context, final CompilationRound round, ModuleChunk chunk, @Nullable FileFilter filter) throws IOException { Set<JpsModule> modules = chunk.getModules(); Set<ModuleBuildTarget> targets = chunk.getTargets(); final Set<ModuleBuildTarget> dirtyTargets = new HashSet<>(targets); // now mark all modules that depend on dirty modules final JpsJavaClasspathKind classpathKind = JpsJavaClasspathKind.compile(chunk.containsTests()); boolean found = false; for (BuildTargetChunk targetChunk : context.getProjectDescriptor().getBuildTargetIndex().getSortedTargetChunks(context)) { if (!found) { if (targetChunk.getTargets().equals(chunk.getTargets())) { found = true; } } else { for (final BuildTarget<?> target : targetChunk.getTargets()) { if (target instanceof ModuleBuildTarget) { final Set<JpsModule> deps = getDependentModulesRecursively(((ModuleBuildTarget)target).getModule(), classpathKind); if (ContainerUtil.intersects(deps, modules)) { for (BuildTarget<?> buildTarget : targetChunk.getTargets()) { if (buildTarget instanceof ModuleBuildTarget) { dirtyTargets.add((ModuleBuildTarget)buildTarget); } } break; } } } } } if (JavaBuilderUtil.isCompileJavaIncrementally(context)) { // mark as non-incremental only the module that triggered non-incremental change for (ModuleBuildTarget target : targets) { if (!isMarkedDirty(context, target)) { // if the target was marked dirty already, all its files were compiled, so // it makes no sense to mark it non-incremental context.markNonIncremental(target); } } } removeTargetsAlreadyMarkedDirty(context, dirtyTargets); final Timestamps timestamps = context.getProjectDescriptor().timestamps.getStorage(); for (ModuleBuildTarget target : dirtyTargets) { markDirtyFiles(context, target, round, timestamps, true, null, filter); } } private static Set<JpsModule> getDependentModulesRecursively(final JpsModule module, final JpsJavaClasspathKind kind) { return JpsJavaExtensionService.dependencies(module).includedIn(kind).recursivelyExportedOnly().getModules(); } public static void processFilesToRecompile(CompileContext context, ModuleChunk chunk, FileProcessor<JavaSourceRootDescriptor, ? super ModuleBuildTarget> processor) throws IOException { for (ModuleBuildTarget target : chunk.getTargets()) { processFilesToRecompile(context, target, processor); } } public static void processFilesToRecompile(CompileContext context, @NotNull ModuleBuildTarget target, FileProcessor<JavaSourceRootDescriptor, ? super ModuleBuildTarget> processor) throws IOException { context.getProjectDescriptor().fsState.processFilesToRecompile(context, target, processor); } static void markDirtyFiles(CompileContext context, BuildTarget<?> target, final CompilationRound round, Timestamps timestamps, boolean forceMarkDirty, @Nullable THashSet<File> currentFiles, @Nullable FileFilter filter) throws IOException { boolean completelyMarkedDirty = true; for (BuildRootDescriptor rd : context.getProjectDescriptor().getBuildRootIndex().getTargetRoots(target, context)) { if (!rd.getRootFile().exists() || //temp roots are managed by compilers themselves (rd instanceof JavaSourceRootDescriptor && ((JavaSourceRootDescriptor)rd).isTemp)) { continue; } if (filter == null) { context.getProjectDescriptor().fsState.clearRecompile(rd); } //final FSCache fsCache = rd.canUseFileCache() ? context.getProjectDescriptor().getFSCache() : FSCache.NO_CACHE; completelyMarkedDirty &= traverseRecursively(context, rd, round, rd.getRootFile(), timestamps, forceMarkDirty, currentFiles, filter); } if (completelyMarkedDirty) { addCompletelyMarkedDirtyTarget(context, target); } } /** * Marks changed files under {@code file} as dirty. * @return {@code true} if all compilable files were marked dirty and {@code false} if some of them were skipped because they weren't accepted * by {@code filter} or wasn't modified */ private static boolean traverseRecursively(CompileContext context, final BuildRootDescriptor rd, final CompilationRound round, final File file, @NotNull final Timestamps tsStorage, final boolean forceDirty, @Nullable Set<File> currentFiles, @Nullable FileFilter filter) throws IOException { final BuildRootIndex rootIndex = context.getProjectDescriptor().getBuildRootIndex(); final Ref<Boolean> allFilesMarked = Ref.create(Boolean.TRUE); Files.walkFileTree(file.toPath(), EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, new SimpleFileVisitor<Path>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) { return rootIndex.isDirectoryAccepted(dir.toFile(), rd)? FileVisitResult.CONTINUE : FileVisitResult.SKIP_SUBTREE; } @Override public FileVisitResult visitFile(Path f, BasicFileAttributes attrs) throws IOException { final File _file = f.toFile(); if (!rootIndex.isFileAccepted(_file, rd)) { // ignored file return FileVisitResult.CONTINUE; } if (filter != null && !filter.accept(_file)) { allFilesMarked.set(Boolean.FALSE); } else { boolean markDirty = forceDirty; if (!markDirty) { // for symlinks the attr structure reflects the symlink's timestamp and not symlink's target timestamp markDirty = tsStorage.getStamp(_file, rd.getTarget()) != attrs.lastModifiedTime().toMillis(); } if (markDirty) { // if it is full project rebuild, all storages are already completely cleared; // so passing null because there is no need to access the storage to clear non-existing data final Timestamps marker = context.isProjectRebuild() ? null : tsStorage; context.getProjectDescriptor().fsState.markDirty(context, round, _file, rd, marker, false); } if (currentFiles != null) { currentFiles.add(_file); } if (!markDirty) { allFilesMarked.set(Boolean.FALSE); } } return FileVisitResult.CONTINUE; } }); return allFilesMarked.get(); } public static void pruneEmptyDirs(CompileContext context, @Nullable final Set<File> dirsToDelete) { if (dirsToDelete == null || dirsToDelete.isEmpty()) return; Set<File> doNotDelete = ALL_OUTPUTS_KEY.get(context); if (doNotDelete == null) { doNotDelete = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY); for (BuildTarget<?> target : context.getProjectDescriptor().getBuildTargetIndex().getAllTargets()) { doNotDelete.addAll(target.getOutputRoots(context)); } ALL_OUTPUTS_KEY.set(context, doNotDelete); } Set<File> additionalDirs = null; Set<File> toDelete = dirsToDelete; while (toDelete != null) { for (File file : toDelete) { // important: do not force deletion if the directory is not empty! final boolean deleted = !doNotDelete.contains(file) && file.delete(); if (deleted) { final File parentFile = file.getParentFile(); if (parentFile != null) { if (additionalDirs == null) { additionalDirs = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY); } additionalDirs.add(parentFile); } } } toDelete = additionalDirs; additionalDirs = null; } } public static boolean isMarkedDirty(CompileContext context, ModuleChunk chunk) { synchronized (TARGETS_COMPLETELY_MARKED_DIRTY) { Set<BuildTarget<?>> marked = TARGETS_COMPLETELY_MARKED_DIRTY.get(context); return marked != null && marked.containsAll(chunk.getTargets()); } } public static long lastModified(File file) { return lastModified(file.toPath()); } private static long lastModified(Path path) { try { return Files.getLastModifiedTime(path).toMillis(); } catch (IOException e) { LOG.warn(e); } return 0L; } public static void copy(File fromFile, File toFile) throws IOException { final Path from = fromFile.toPath(); final Path to = toFile.toPath(); try { try { Files.copy(from, to, StandardCopyOption.REPLACE_EXISTING); } catch (AccessDeniedException e) { if (!Files.isWritable(to) && toFile.setWritable(true)) { Files.copy(from, to, StandardCopyOption.REPLACE_EXISTING); // repeat once the file seems to be writable again } else { throw e; } } catch (NoSuchFileException e) { final File parent = toFile.getParentFile(); if (parent != null && parent.mkdirs()) { Files.copy(from, to, StandardCopyOption.REPLACE_EXISTING); // repeat on successful target dir creation } else { throw e; } } } catch (IOException e) { // fallback: trying 'classic' copying via streams LOG.info("Error copying "+ fromFile.getPath() + " to " + toFile.getPath() + " with NIO API", e); FileUtil.copyContent(fromFile, toFile); } } public static boolean isMarkedDirty(CompileContext context, BuildTarget<?> target) { synchronized (TARGETS_COMPLETELY_MARKED_DIRTY) { Set<BuildTarget<?>> marked = TARGETS_COMPLETELY_MARKED_DIRTY.get(context); return marked != null && marked.contains(target); } } private static void addCompletelyMarkedDirtyTarget(CompileContext context, BuildTarget<?> target) { synchronized (TARGETS_COMPLETELY_MARKED_DIRTY) { Set<BuildTarget<?>> marked = TARGETS_COMPLETELY_MARKED_DIRTY.get(context); if (marked == null) { marked = new HashSet<>(); TARGETS_COMPLETELY_MARKED_DIRTY.set(context, marked); } marked.add(target); } } private static void removeTargetsAlreadyMarkedDirty(CompileContext context, Set<ModuleBuildTarget> targetsSetToFilter) { synchronized (TARGETS_COMPLETELY_MARKED_DIRTY) { Set<BuildTarget<?>> marked = TARGETS_COMPLETELY_MARKED_DIRTY.get(context); if (marked != null) { targetsSetToFilter.removeAll(marked); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIESOR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.aries.proxy.itests; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.ops4j.pax.exam.CoreOptions.composite; import static org.ops4j.pax.exam.CoreOptions.mavenBundle; import static org.ops4j.pax.exam.CoreOptions.systemProperty; import static org.ops4j.pax.exam.CoreOptions.when; import java.lang.reflect.Method; import java.util.AbstractList; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.Callable; import javax.inject.Inject; import org.apache.aries.proxy.InvocationListener; import org.apache.aries.proxy.ProxyManager; import org.junit.Test; import org.junit.runner.RunWith; import org.ops4j.pax.exam.CoreOptions; import org.ops4j.pax.exam.Option; import org.ops4j.pax.exam.junit.PaxExam; import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; import org.ops4j.pax.exam.spi.reactors.PerClass; import org.osgi.framework.Bundle; import org.osgi.framework.BundleContext; @RunWith(PaxExam.class) @ExamReactorStrategy(PerClass.class) public abstract class AbstractProxyTest { @Inject BundleContext bundleContext; @Inject ProxyManager mgr; public final static class TestCallable implements Callable<Object> { private Object list = new ArrayList<Object>(); public Object call() throws Exception { return list; } public void setReturn(Object o) { list = o; } } public static class TestDelegate extends AbstractList<String> implements Callable<String> { private final String message; /** * On HotSpot VMs newer than 1.6 u33, we can only generate subclass proxies for classes * with a no-args constructor. */ protected TestDelegate() { super(); this.message = null; } public TestDelegate(String message) { super(); this.message = message; } public String call() throws Exception { return message; } public boolean equals(Object o) { if(o instanceof TestDelegate){ return message.equals(((TestDelegate)o).message); } return false; } public void throwException() { throw new RuntimeException(); } public void testInternallyCaughtException() { try { throw new RuntimeException(); } catch (RuntimeException re) { // no op } } @Override public String get(int location) { return null; } @Override public int size() { return 0; } } private class TestListener implements InvocationListener { boolean preInvoke = false; boolean postInvoke = false; boolean postInvokeExceptionalReturn = false; Object token; public Object preInvoke(Object proxy, Method m, Object[] args) throws Throwable { preInvoke = true; token = new Object(); return token; } public void postInvoke(Object token, Object proxy, Method m, Object returnValue) throws Throwable { postInvoke = this.token == token; } public void postInvokeExceptionalReturn(Object token, Object proxy, Method m, Throwable exception) throws Throwable { postInvokeExceptionalReturn = this.token == token; } public void clear() { preInvoke = false; postInvoke = false; postInvokeExceptionalReturn = false; token = null; } } @Test public void testEquals() throws Exception { Bundle b = bundleContext.getBundle(); TestCallable c = new TestCallable(); c.setReturn(new TestDelegate("One")); TestCallable c2 = new TestCallable(); c.setReturn(new TestDelegate("Two")); Collection<Class<?>> classes = new ArrayList<Class<?>>(); classes.add(List.class); Object proxy = mgr.createDelegatingProxy(b, classes, c, new TestDelegate("Three")); Object otherProxy = mgr.createDelegatingProxy(b, classes, c, new TestDelegate("Four")); Object totallyOtherProxy = mgr.createDelegatingProxy(b, classes, c2, new TestDelegate("Five")); assertTrue("The object is not equal to itself", proxy.equals(proxy)); assertTrue("The object is not equal to another proxy of itself", proxy.equals(otherProxy)); assertFalse("The object is equal to proxy to another object", proxy.equals(totallyOtherProxy)); } @Test public void testDelegation() throws Exception { Bundle b = bundleContext.getBundle(); TestCallable c = new TestCallable(); Collection<Class<?>> classes = new ArrayList<Class<?>>(); classes.add(TestDelegate.class); TestDelegate proxy = (TestDelegate) mgr.createDelegatingProxy(b, classes, c, new TestDelegate("")); c.setReturn(new TestDelegate("Hello")); assertEquals("Wrong message", "Hello", proxy.call()); c.setReturn(new TestDelegate("Hello again")); assertEquals("Wrong message", "Hello again", proxy.call()); } @Test public void testInterception() throws Exception { Bundle b = bundleContext.getBundle(); TestDelegate td = new TestDelegate("Hello"); Collection<Class<?>> classes = new ArrayList<Class<?>>(); classes.add(TestDelegate.class); TestListener tl = new TestListener(); TestDelegate proxy = (TestDelegate) mgr.createInterceptingProxy(b, classes, td, tl); //We need to call clear here, because the object will have had its toString() called tl.clear(); assertCalled(tl, false, false, false); assertEquals("Wrong message", "Hello", proxy.call()); assertCalled(tl, true, true, false); tl.clear(); assertCalled(tl, false, false, false); try { proxy.throwException(); fail("Should throw an exception"); } catch (RuntimeException re) { assertCalled(tl, true, false, true); } tl.clear(); assertCalled(tl, false, false, false); try { proxy.testInternallyCaughtException(); } finally { assertCalled(tl, true, true, false); } } @Test public void testDelegationAndInterception() throws Exception { Bundle b = bundleContext.getBundle(); TestCallable c = new TestCallable(); Collection<Class<?>> classes = new ArrayList<Class<?>>(); classes.add(TestDelegate.class); TestListener tl = new TestListener(); TestDelegate proxy = (TestDelegate) mgr.createDelegatingInterceptingProxy(b, classes, c, new TestDelegate(""), tl); c.setReturn(new TestDelegate("Hello")); //We need to call clear here, because the object will have had its toString() called tl.clear(); assertCalled(tl, false, false, false); assertEquals("Wrong message", "Hello", proxy.call()); assertCalled(tl, true, true, false); tl.clear(); assertCalled(tl, false, false, false); c.setReturn(new TestDelegate("Hello again")); assertEquals("Wrong message", "Hello again", proxy.call()); assertCalled(tl, true, true, false); tl.clear(); assertCalled(tl, false, false, false); try { proxy.throwException(); fail("Should throw an exception"); } catch (RuntimeException re) { assertCalled(tl, true, false, true); } tl.clear(); assertCalled(tl, false, false, false); try { proxy.testInternallyCaughtException(); } finally { assertCalled(tl, true, true, false); } } private void assertCalled(TestListener listener, boolean pre, boolean post, boolean ex) { assertEquals(pre, listener.preInvoke); assertEquals(post, listener.postInvoke); assertEquals(ex, listener.postInvokeExceptionalReturn); } protected Option proxyOptions() { String localRepo = System.getProperty("maven.repo.local"); if (localRepo == null) { localRepo = System.getProperty("org.ops4j.pax.url.mvn.localRepository"); } return composite( CoreOptions.junitBundles(), systemProperty("org.ops4j.pax.logging.DefaultServiceLog.level").value("INFO"), when(localRepo != null).useOptions(CoreOptions.vmOption("-Dorg.ops4j.pax.url.mvn.localRepository=" + localRepo)), mavenBundle("org.ow2.asm", "asm-commons").versionAsInProject(), mavenBundle("org.ow2.asm", "asm").versionAsInProject(), mavenBundle("org.ops4j.pax.logging", "pax-logging-api").versionAsInProject(), mavenBundle("org.ops4j.pax.logging", "pax-logging-service").versionAsInProject(), mavenBundle("org.apache.aries.proxy", "org.apache.aries.proxy").versionAsInProject() /* vmOption ("-Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=5005"), waitForFrameworkStartup(),*/ ); } }
package com.brandon3055.townbuilder.schematics; import com.brandon3055.townbuilder.utills.LogHelper; import net.minecraft.block.Block; import net.minecraft.block.state.IBlockState; import net.minecraft.nbt.CompressedStreamTools; import net.minecraft.nbt.NBTBase; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.ResourceLocation; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; import net.minecraft.world.chunk.Chunk; import net.minecraftforge.fml.common.event.FMLPreInitializationEvent; import net.minecraftforge.fml.relauncher.ReflectionHelper; import java.io.*; import java.lang.reflect.InvocationTargetException; import java.util.HashMap; import java.util.Map; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; import java.util.zip.ZipException; /** * Created by Brandon on 21/02/2015. */ public class SchematicHandler { private static String savePath; private static File saveFolder; public static void init(FMLPreInitializationEvent event) { savePath = event.getModConfigurationDirectory().getParentFile().getAbsolutePath() + "/mods/townbuilder"; } public static File getSaveFolder() { if (saveFolder == null) { saveFolder = new File(savePath); } if (!saveFolder.exists()) saveFolder.mkdir(); return saveFolder; } public static NBTTagCompound loadCompoundFromFile(String fileName) throws SchematicException { File file = getFile(fileName); if (!file.exists()) throw new SchematicException("Schematic dose not exist"); try { LogHelper.info("Reading file [" + file.length() + " bytes]"); FileInputStream fis = new FileInputStream(file.getCanonicalFile()); DataInputStream is = new DataInputStream(new GZIPInputStream(fis)); int type = is.readByte(); String name = is.readUTF(); if (!name.equals("Schematic")) throw new SchematicException("Invalid Schematic [" + file.getName() + "]"); fis.close(); is.close(); fis = new FileInputStream(file.getCanonicalFile()); is = new DataInputStream(new GZIPInputStream(fis)); NBTTagCompound c = CompressedStreamTools.read(is); fis.close(); is.close(); LogHelper.info("Read Complete"); return c; } catch (IOException e) { if (e instanceof ZipException) { try { NBTTagCompound c = CompressedStreamTools.read(file); c.setBoolean("UseOldLoader", true); return c; } catch (IOException e1) { e1.printStackTrace(); } } else e.printStackTrace(); } throw new SchematicException("Failed to read schematic. Unknown error"); } public static File getFile(String fileName) { File file = new File(getSaveFolder(), fileName + ".schematic"); if (!file.exists()) return null; return file; } public static void saveCompoundToFile(NBTTagCompound compound, String fileName) { if (compound == null) return; File schematicFile = new File(getSaveFolder(), fileName + ".schematic"); try { LogHelper.info("Writing schematic to file..."); DataOutputStream os = new DataOutputStream(new GZIPOutputStream(new FileOutputStream(schematicFile))); writeTag(compound, os); os.close(); LogHelper.info("Write complete"); } catch (IOException e) { e.printStackTrace(); } } private static void writeTag(NBTBase nbtBase, DataOutput dataOutput) throws IOException { dataOutput.writeByte(nbtBase.getId()); if (nbtBase.getId() != 0) { dataOutput.writeUTF("Schematic"); try { ReflectionHelper.findMethod(NBTBase.class, "write", "func_74734_a", DataOutput.class).invoke(nbtBase, dataOutput); } catch (IllegalAccessException e) { e.printStackTrace(); } catch (InvocationTargetException e) { e.printStackTrace(); } } } public static void deleteCompoundFile(String fileName) { File schematicFile = new File(getSaveFolder(), fileName + ".schematic"); if (schematicFile.exists()) schematicFile.delete(); } public static NBTTagCompound getCompoundForArea(World world, int posX, int posY, int posZ, int width, int height, int length) { NBTTagCompound compound = new NBTTagCompound(); compound.setShort("Width", (short) width); compound.setShort("Height", (short) height); compound.setShort("Length", (short) length); NBTTagList tileList = new NBTTagList(); NBTTagCompound idNameConversion = new NBTTagCompound(); byte[] blocks = new byte[width * height * length]; byte[] addBlocks = null; byte[] blockData = new byte[width * height * length]; Map<Integer, String> idToNameMap = new HashMap<Integer, String>(); int totalBlocks = width * height * length; LogHelper.info("Creating schematic containing " + totalBlocks + " Blocks"); int i = totalBlocks / 10; int blocksCopied = 0; for (int x = 0; x < width; ++x) { for (int y = 0; y < height; ++y) { for (int z = 0; z < length; ++z) { int index = y * width * length + z * width + x; BlockPos pos = new BlockPos(x + posX, y + posY, z + posZ); IBlockState state = world.getBlockState(pos); Block block = state.getBlock(); // Save 4096 IDs in an AddBlocks section if (Block.getIdFromBlock(block) > 255) { if (addBlocks == null) { // Lazily create section addBlocks = new byte[(blocks.length >> 1) + 1]; } addBlocks[index >> 1] = (byte) (((index & 1) != 0) ? addBlocks[index >> 1] & 0xF0 | (Block.getIdFromBlock(block) >> 8) & 0xF : addBlocks[index >> 1] & 0xF | ((Block.getIdFromBlock(block) >> 8) & 0xF) << 4); } blocks[index] = (byte) Block.getIdFromBlock(block); blockData[index] = (byte) block.getMetaFromState(state);//world.getBlockMetadata(x + posX, y + posY, z + posZ); if (world.getTileEntity(pos) != null) { NBTTagCompound tileCompound = new NBTTagCompound(); world.getTileEntity(pos).writeToNBT(tileCompound); tileCompound.setInteger("x", x); tileCompound.setInteger("y", y); tileCompound.setInteger("z", z); tileList.appendTag(tileCompound); } if (!idToNameMap.containsKey(Block.getIdFromBlock(block))) { idToNameMap.put(Block.getIdFromBlock(block), Block.REGISTRY.getNameForObject(block).toString());//GameData.getBlockRegistry().getNameForObject(world.getBlock(x + posX, y + posY, z + posZ))); idNameConversion.setString(String.valueOf(Block.getIdFromBlock(block)), Block.REGISTRY.getNameForObject(block).toString());//GameData.getBlockRegistry().getNameForObject(world.getBlock(x + posX, y + posY, z + posZ))); } blocksCopied++; if (width > 1 && height > 1 && length > 1 && blocksCopied % i == 0) LogHelper.info("Progress: " + (((double) blocksCopied / (double) totalBlocks) * 100D) + "%%"); } } } compound.setByteArray("Blocks", blocks); compound.setByteArray("Data", blockData); if (addBlocks != null) compound.setByteArray("AddBlocks", addBlocks); compound.setTag("TileEntities", tileList); compound.setTag("idConversions", idNameConversion); return compound; } public static void loadAreaFromCompound(NBTTagCompound compound, World world, int posX, int posY, int posZ, boolean copyAir) throws SchematicException { if (compound != null && compound.hasKey("UseOldLoader")) { // loadAreaFromCompoundOld(compound, world, posX, posY, posZ, copyAir); LogHelper.error("The old schematic loader is nolonger supported!"); return; } if (!compound.hasKey("Blocks")) throw new SchematicException("Schematic file is missing a \"Blocks\" tag"); short width = compound.getShort("Width"); short height = compound.getShort("Height"); short length = compound.getShort("Length"); byte[] blockId = compound.getByteArray("Blocks"); byte[] blockData = compound.getByteArray("Data"); byte[] addId = new byte[0]; short[] blocks = new short[blockId.length]; if (compound.hasKey("AddBlocks")) addId = compound.getByteArray("AddBlocks"); for (int index = 0; index < blockId.length; index++) { if ((index >> 1) >= addId.length) { // No corresponding AddBlocks index blocks[index] = (short) (blockId[index] & 0xFF); } else { if ((index & 1) != 0) { blocks[index] = (short) (((addId[index >> 1] & 0x0F) << 8) + (short) (blockId[index] & 0xFF)); } else { blocks[index] = (short) (((addId[index >> 1] & 0xF0) << 4) + (short) (blockId[index] & 0xFF)); } } } NBTTagCompound idNameConversion = null; if (compound.hasKey("idConversions")) idNameConversion = compound.getCompoundTag("idConversions"); NBTTagList tileList = compound.getTagList("TileEntities", 10); Map<Integer, Block> blockMap = new HashMap<Integer, Block>(); int totalBlocks = width * height * length; LogHelper.info("Pasting schematic containing " + totalBlocks + " Blocks"); int ii = totalBlocks / 10; int blocksCopied = 0; for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { for (int z = 0; z < length; z++) { BlockPos pos = new BlockPos(posX + x, posY + y, posZ + z); int index = y * width * length + z * width + x; int id = blocks[index]; if (!blockMap.containsKey(id)) { Block block = Block.REGISTRY.getObject(new ResourceLocation(idNameConversion.getString(String.valueOf(id)))); if (idNameConversion != null && block != null)//GameData.getBlockRegistry().getObject(idNameConversion.getString(String.valueOf(id))) != null) { blockMap.put(id, block);//GameData.getBlockRegistry().getObject(idNameConversion.getString(String.valueOf(id)))); } else blockMap.put(id, Block.getBlockById(id)); } if (!copyAir && id == 0) continue; Chunk chunk = world.getChunkFromBlockCoords(pos); BlockPos pos2 = new BlockPos((posX + x) & 15, (posY + y), (posZ + z) & 15); if (chunk.getTileEntity(pos2, Chunk.EnumCreateEntityType.CHECK) != null) { chunk.getTileEntity(pos2, Chunk.EnumCreateEntityType.CHECK).invalidate(); } chunk.removeInvalidTileEntity(pos2); IBlockState oldState = world.getBlockState(pos); IBlockState newState = blockMap.get(id).getStateFromMeta(blockData[(y * length + z) * width + x]); chunk.setBlockState(pos, newState); // chunk.setBlockMetadata((posX + x) & 15, (posY + y), (posZ + z) & 15, blockData[(y * length + z) * width + x]); // world.markBlockForUpdate(posX + x, posY + y, posZ + z);TODO This probably needs to happen world.notifyBlockUpdate(pos, oldState, newState, 3); if (world.getTileEntity(pos) != null) { TileEntity tile = world.getTileEntity(pos); if (tileList != null) for (int i = 0; i < tileList.tagCount(); i++) { if (tileList.getCompoundTagAt(i).getInteger("x") == x && tileList.getCompoundTagAt(i).getInteger("y") == y && tileList.getCompoundTagAt(i).getInteger("z") == z) { tile.readFromNBT(tileList.getCompoundTagAt(i)); } } tile.setPos(pos); // tile.xCoord = posX + x; // tile.yCoord = posY + y; // tile.zCoord = posZ + z; } blocksCopied++; if (width > 1 && height > 1 && length > 1 && blocksCopied % ii == 0) LogHelper.info("Progress: " + (((double) blocksCopied / (double) totalBlocks) * 100D) + "%%"); } } } } public static String[] getSchematics() { String[] s = SchematicHandler.getSaveFolder().list(); for (int i = 0; i < s.length; i++) { if (s[i].contains(".schematic")) s[i] = s[i].substring(0, s[i].lastIndexOf(".schematic")); } return s; } public static class SchematicException extends Exception { private String msg; public SchematicException(String msg) { this.msg = msg; } @Override public String getMessage() { return msg; } } }
package com.nike.wingtips.springboot.zipkin2; import com.nike.wingtips.Tracer; import com.nike.wingtips.lifecyclelistener.SpanLifecycleListener; import com.nike.wingtips.springboot.WingtipsSpringBootConfiguration; import com.nike.wingtips.springboot.WingtipsSpringBootProperties; import com.nike.wingtips.springboot.zipkin2.WingtipsWithZipkinSpringBootConfiguration.DefaultOverrides; import com.nike.wingtips.springboot.zipkin2.componenttest.componentscanonly.ComponentTestMainWithComponentScanOnly; import com.nike.wingtips.springboot.zipkin2.componenttest.componenttestoverridebothreporterandconverter.ComponentTestMainWithReporterAndConverterOverrides; import com.nike.wingtips.springboot.zipkin2.componenttest.componenttestoverridedefaultconverter.ComponentTestMainWithConverterOverride; import com.nike.wingtips.springboot.zipkin2.componenttest.componenttestoverridedefaultreporter.ComponentTestMainWithReporterOverride; import com.nike.wingtips.springboot.zipkin2.componenttest.manualimportandcomponentscan.ComponentTestMainWithBothManualImportAndComponentScan; import com.nike.wingtips.springboot.zipkin2.componenttest.manualimportonly.ComponentTestMainManualImportOnly; import com.nike.wingtips.testutil.Whitebox; import com.nike.wingtips.zipkin2.WingtipsToZipkinLifecycleListener; import com.nike.wingtips.zipkin2.util.WingtipsToZipkinSpanConverter; import com.nike.wingtips.zipkin2.util.WingtipsToZipkinSpanConverterDefaultImpl; import com.tngtech.java.junit.dataprovider.DataProvider; import com.tngtech.java.junit.dataprovider.DataProviderRunner; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.boot.SpringApplication; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.stereotype.Component; import java.io.IOException; import java.net.MalformedURLException; import java.net.ServerSocket; import java.net.URL; import java.util.List; import java.util.UUID; import zipkin2.Endpoint; import zipkin2.Span; import zipkin2.reporter.AsyncReporter; import zipkin2.reporter.Reporter; import zipkin2.reporter.urlconnection.URLConnectionSender; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; /** * Tests the functionality of {@link WingtipsWithZipkinSpringBootConfiguration}. * * @author Nic Munroe */ @RunWith(DataProviderRunner.class) public class WingtipsWithZipkinSpringBootConfigurationTest { @Before public void beforeMethod() { clearTracerSpanLifecycleListeners(); } @After public void afterMethod() { clearTracerSpanLifecycleListeners(); } private void clearTracerSpanLifecycleListeners() { Tracer.getInstance().removeAllSpanLifecycleListeners(); } @SuppressWarnings("SameParameterValue") private WingtipsZipkinProperties generateProps(boolean disabled, String baseUrl, String serviceName) { WingtipsZipkinProperties props = new WingtipsZipkinProperties(); props.setZipkinDisabled(String.valueOf(disabled)); props.setBaseUrl(baseUrl); props.setServiceName(serviceName); return props; } private enum DefaultOverridesScenario { NULL_DEFAULT_OVERRIDES(null), NO_OVERRIDES(new DefaultOverrides()), WITH_REPORTER_OVERRIDE(defaultOverridesWithMockReporter()), WITH_CONVERTER_OVERRIDE(defaultOverridesWithMockConverter()), WITH_REPORTER_AND_CONVERTER_OVERRIDE(defaultOverridesWithMocks()); public final DefaultOverrides defaultOverrides; DefaultOverridesScenario( DefaultOverrides defaultOverrides) { this.defaultOverrides = defaultOverrides; } private static DefaultOverrides defaultOverridesWithMockReporter() { DefaultOverrides defaultOverrides = new DefaultOverrides(); defaultOverrides.zipkinReporter = mock(Reporter.class); return defaultOverrides; } private static DefaultOverrides defaultOverridesWithMockConverter() { DefaultOverrides defaultOverrides = new DefaultOverrides(); defaultOverrides.zipkinSpanConverter = mock(WingtipsToZipkinSpanConverter.class); return defaultOverrides; } private static DefaultOverrides defaultOverridesWithMocks() { DefaultOverrides defaultOverrides = new DefaultOverrides(); defaultOverrides.zipkinReporter = mock(Reporter.class); defaultOverrides.zipkinSpanConverter = mock(WingtipsToZipkinSpanConverter.class); return defaultOverrides; } } @DataProvider(value = { "NULL_DEFAULT_OVERRIDES", "NO_OVERRIDES", "WITH_REPORTER_OVERRIDE", "WITH_CONVERTER_OVERRIDE", "WITH_REPORTER_AND_CONVERTER_OVERRIDE" }) @Test public void constructor_registers_WingtipsToZipkinLifecycleListener_with_expected_values( DefaultOverridesScenario scenario ) throws MalformedURLException { // given String baseUrl = "http://localhost:4242/" + UUID.randomUUID().toString(); String serviceName = UUID.randomUUID().toString(); WingtipsZipkinProperties props = generateProps(false, baseUrl, serviceName); // when new WingtipsWithZipkinSpringBootConfiguration(props, scenario.defaultOverrides); // then List<SpanLifecycleListener> listeners = Tracer.getInstance().getSpanLifecycleListeners(); assertThat(listeners).hasSize(1); assertThat(listeners.get(0)).isInstanceOf(WingtipsToZipkinLifecycleListener.class); WingtipsToZipkinLifecycleListener listener = (WingtipsToZipkinLifecycleListener) listeners.get(0); assertThat(Whitebox.getInternalState(listener, "serviceName")).isEqualTo(serviceName); assertThat(Whitebox.getInternalState(listener, "zipkinEndpoint")) .isEqualTo(Endpoint.newBuilder().serviceName(serviceName).build()); assertThat(Whitebox.getInternalState(listener, "zipkinSpanConverter")).isNotNull(); Object zipkinSpanReporter = Whitebox.getInternalState(listener, "zipkinSpanReporter"); Object zipkinSpanConverter = Whitebox.getInternalState(listener, "zipkinSpanConverter"); if (scenario.defaultOverrides != null) { if (scenario.defaultOverrides.zipkinReporter != null) { assertThat(zipkinSpanReporter).isSameAs(scenario.defaultOverrides.zipkinReporter); } if (scenario.defaultOverrides.zipkinSpanConverter != null) { assertThat(zipkinSpanConverter).isSameAs(scenario.defaultOverrides.zipkinSpanConverter); } } if (scenario.defaultOverrides == null || scenario.defaultOverrides.zipkinReporter == null) { assertThat(zipkinSpanReporter).isInstanceOf(AsyncReporter.class); Object spanSender = Whitebox.getInternalState(zipkinSpanReporter, "sender"); assertThat(spanSender).isInstanceOf(URLConnectionSender.class); assertThat(Whitebox.getInternalState(spanSender, "endpoint")) .isEqualTo(new URL(baseUrl + "/api/v2/spans")); } if (scenario.defaultOverrides == null || scenario.defaultOverrides.zipkinSpanConverter == null) { assertThat(zipkinSpanConverter).isInstanceOf(WingtipsToZipkinSpanConverterDefaultImpl.class); } } @Test public void constructor_does_not_register_WingtipsToZipkinLifecycleListener_when_props_shouldApplyWingtipsToZipkinLifecycleListener_returns_false() { // given WingtipsZipkinProperties props = mock(WingtipsZipkinProperties.class); doReturn(false).when(props).shouldApplyWingtipsToZipkinLifecycleListener(); // when new WingtipsWithZipkinSpringBootConfiguration(props, null); // then assertThat(Tracer.getInstance().getSpanLifecycleListeners()).isEmpty(); verify(props).shouldApplyWingtipsToZipkinLifecycleListener(); verifyNoMoreInteractions(props); } @SuppressWarnings("unused") private enum ComponentTestSetup { MANUAL_IMPORT_ONLY(ComponentTestMainManualImportOnly.class, false, null, null), COMPONENT_SCAN_ONLY(ComponentTestMainWithComponentScanOnly.class, true, null, null), BOTH_MANUAL_AND_COMPONENT_SCAN(ComponentTestMainWithBothManualImportAndComponentScan.class, true, null, null), WITH_ZIPKIN_REPORTER_OVERRIDE( ComponentTestMainWithReporterOverride.class, false, ComponentTestMainWithReporterOverride.CUSTOM_REPORTER_INSTANCE, null ), WITH_CONVERTER_OVERRIDE( ComponentTestMainWithConverterOverride.class, false, null, ComponentTestMainWithConverterOverride.CUSTOM_CONVERTER_INSTANCE ), WITH_BOTH_REPORTER_AND_CONVERTER_OVERRIDES( ComponentTestMainWithReporterAndConverterOverrides.class, false, ComponentTestMainWithReporterOverride.CUSTOM_REPORTER_INSTANCE, ComponentTestMainWithConverterOverride.CUSTOM_CONVERTER_INSTANCE ); final boolean expectComponentScannedObjects; final Class<?> mainClass; final Reporter<zipkin2.Span> expectedReporterOverride; final WingtipsToZipkinSpanConverter expectedConverterOverride; ComponentTestSetup(Class<?> mainClass, boolean expectComponentScannedObjects, Reporter<Span> expectedReporterOverride, WingtipsToZipkinSpanConverter expectedConverterOverride) { this.mainClass = mainClass; this.expectComponentScannedObjects = expectComponentScannedObjects; this.expectedReporterOverride = expectedReporterOverride; this.expectedConverterOverride = expectedConverterOverride; } } // This component test verifies that a Spring Boot application successfully utilizes WingtipsSpringBootConfiguration // and WingtipsSpringBootProperties when it is component scanned, imported manually, or both. Specifically // we should not get multiple bean definition errors even when WingtipsSpringBootConfiguration is *both* // component scanned *and* imported manually. // We also test that app-specific overrides of certain things are honored/used (e.g. Zipkin Reporter). @DataProvider(value = { "MANUAL_IMPORT_ONLY", "COMPONENT_SCAN_ONLY", "BOTH_MANUAL_AND_COMPONENT_SCAN", "WITH_ZIPKIN_REPORTER_OVERRIDE", "WITH_CONVERTER_OVERRIDE", "WITH_BOTH_REPORTER_AND_CONVERTER_OVERRIDES" }) @Test public void component_test(ComponentTestSetup componentTestSetup) { // given int serverPort = findFreePort(); Class<?> mainClass = componentTestSetup.mainClass; ConfigurableApplicationContext serverAppContext = SpringApplication.run(mainClass, "--server.port=" + serverPort); try { // when WingtipsSpringBootConfiguration baseConfig = serverAppContext.getBean(WingtipsSpringBootConfiguration.class); WingtipsWithZipkinSpringBootConfiguration zipkinConfig = serverAppContext.getBean(WingtipsWithZipkinSpringBootConfiguration.class); WingtipsSpringBootProperties baseProps = serverAppContext.getBean(WingtipsSpringBootProperties.class); WingtipsZipkinProperties zipkinProps = serverAppContext.getBean(WingtipsZipkinProperties.class); String[] someComponentScannedClassBeanNames = serverAppContext.getBeanNamesForType(SomeComponentScannedClass.class); List<SpanLifecycleListener> lifecycleListeners = Tracer.getInstance().getSpanLifecycleListeners(); // then // Sanity check that we component scanned (or not) as appropriate. if (componentTestSetup.expectComponentScannedObjects) { assertThat(someComponentScannedClassBeanNames).isNotEmpty(); } else { assertThat(someComponentScannedClassBeanNames).isEmpty(); } // WingtipsSpringBootConfiguration, WingtipsWithZipkinSpringBootConfiguration, // WingtipsSpringBootProperties, and WingtipsZipkinProperties should be available as beans, and // the base config should use the same props we received. assertThat(baseConfig).isNotNull(); assertThat(baseProps).isNotNull(); assertThat(baseConfig).extracting("wingtipsProperties") .usingRecursiveComparison() .isEqualTo(baseProps); assertThat(zipkinConfig).isNotNull(); assertThat(zipkinProps).isNotNull(); // Verify that a WingtipsToZipkinLifecycleListener was registered with Tracer. assertThat(lifecycleListeners).hasSize(1); SpanLifecycleListener listener = lifecycleListeners.get(0); assertThat(listener).isInstanceOf(WingtipsToZipkinLifecycleListener.class); // Verify the Zipkin Reporter override if expected. if (componentTestSetup.expectedReporterOverride != null) { assertThat(Whitebox.getInternalState(listener, "zipkinSpanReporter")) .isSameAs(ComponentTestMainWithReporterOverride.CUSTOM_REPORTER_INSTANCE); } // Verify the Wingtips-to-Zipkin converter override if expected. if (componentTestSetup.expectedConverterOverride != null) { assertThat(Whitebox.getInternalState(listener, "zipkinSpanConverter")) .isSameAs(ComponentTestMainWithConverterOverride.CUSTOM_CONVERTER_INSTANCE); } } finally { SpringApplication.exit(serverAppContext); } } private static int findFreePort() { try (ServerSocket serverSocket = new ServerSocket(0)) { return serverSocket.getLocalPort(); } catch (IOException e) { throw new RuntimeException(e); } } @Component private static class SomeComponentScannedClass { } }
package org.fiolino.common.util; import javax.annotation.Nullable; import java.util.concurrent.CancellationException; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; import java.util.function.UnaryOperator; /** * This class can be used to hold values that usually don't change, * but should be frequently updated to accept changes if there are some. * <p> * This is best used for tasks that need some resources, but not that many * that frequent updates would hurt. An example is some data read from the * file system. * <p> * Values are calculated by either a {@link Supplier} or a {@link UnaryOperator}, which gets the old * cached value as the parameter. This can be useful for costly operations which can check whether * an update is necessary at all. * <p> * Operators are only called once even in concurrent access. They don't need to be thread safe even in concurrent * environments. * <p> * Example: * <code> * Cached&lt;DataObject&gt; valueHolder = Cached.updateEvery(5).hours().with(() -&gt; new DataObject(...)); * </code> * * @author kuli */ public abstract class Cached<T> implements Supplier<T> { /** * This is the starting factory method, followed by a method for the time unit. * * @param value How many time units shall the cache keep its value. */ public static ExpectUnit updateEvery(long value) { return new ExpectUnit(value); } /** * This is the factory method which assigns a duration in text form, including the delay and the time unit. * * Examples: * 1 Day * 5 sec * 900 millis * * The time unit must be a unique start of some {@link TimeUnit} name. If none is given, seconds are assumed. */ public static ExpectEvaluator updateEvery(String value) { long duration = Strings.parseLongDuration(value, TimeUnit.MILLISECONDS); return new ExpectEvaluator(duration); } /** * Use this factory to create a cached instance which calls its operator in every call. */ public static <T> Cached<T> updateAlways(T initialValue, UnaryOperator<T> evaluator) { return new ImmediateUpdater<>(initialValue, evaluator, true); } /** * Use this factory to create a cached instance which calls its operator in every call. * Calculated value may be null. */ public static <T> Cached<T> updateAlwaysNullable(T initialValue, UnaryOperator<T> evaluator) { return new ImmediateUpdater<>(initialValue, evaluator, false); } /** * Creates a Cached instance that gets initialized by some supplier and then always returns * that value. * * @param evaluator Computes the initial value * @param <T> The type * @return A Cached instance */ public static <T> Cached<T> with(Supplier<T> evaluator) { return new OneTimeInitializer<>(evaluator, true); } /** * Creates a Cached instance that gets initialized by some supplier and then always returns * that value. * Calculated value may be null. * * @param evaluator Computes the initial value * @param <T> The type * @return A Cached instance */ public static <T> Cached<T> withNullable(Supplier<T> evaluator) { return new OneTimeInitializer<>(evaluator, false); } public static final class ExpectUnit { private final long value; private ExpectUnit(long value) { this.value = value; } /** * Sets the expiration timeout unit to nano seconds. * <p> * This method follows a with(). */ public ExpectEvaluator nanoseconds() { return new ExpectEvaluator(TimeUnit.NANOSECONDS.toMillis(value)); } /** * Sets the expiration timeout unit to micro seconds. * <p> * This method follows a with(). */ public ExpectEvaluator microseconds() { return new ExpectEvaluator(TimeUnit.MICROSECONDS.toMillis(value)); } /** * Sets the expiration timeout unit to milli seconds. * <p> * This method follows a with(). */ public ExpectEvaluator milliseconds() { return new ExpectEvaluator(value); } /** * Sets the expiration timeout unit to seconds. * <p> * This method follows a with(). */ public ExpectEvaluator seconds() { return new ExpectEvaluator(TimeUnit.SECONDS.toMillis(value)); } /** * Sets the expiration timeout unit to minutes. * <p> * This method follows a with(). */ public ExpectEvaluator minutes() { return new ExpectEvaluator(TimeUnit.MINUTES.toMillis(value)); } /** * Sets the expiration timeout unit to hours. * <p> * This method follows a with(). */ public ExpectEvaluator hours() { return new ExpectEvaluator(TimeUnit.HOURS.toMillis(value)); } /** * Sets the expiration timeout unit to days. * <p> * This method follows a with(). */ public ExpectEvaluator days() { return new ExpectEvaluator(TimeUnit.DAYS.toMillis(value)); } } public static final class ExpectEvaluator { final long milliseconds; private ExpectEvaluator(long milliseconds) { this.milliseconds = milliseconds; } /** * Assigns an initial value and an operator that updates any existing value initially and after expiry. * * @param initialValue This is used for the first call to the operator. * @param eval This gets evaluated first and after each timeout * @param <T> The cached type * @return The cache instance. This can be used now. */ public <T> Cached<T> with(@Nullable T initialValue, UnaryOperator<T> eval) { return new TimedCache<T>(milliseconds, initialValue, eval, true); } /** * Initially and after each expiry, a new value is calculated via this Callable instance. * Expired values will be discarded completely. * * @param eval This gets evaluated first and after each timeout * @param <T> The cached type * @return The cache instance. This can be used now. */ public <T> Cached<T> with(Supplier<T> eval) { return with(null, v -> eval.get()); } /** * Assigns an initial value and an operator that updates any existing value initially and after expiry. * Calculated value may be null. * * @param initialValue This is used for the first call to the operator. * @param eval This gets evaluated first and after each timeout * @param <T> The cached type * @return The cache instance. This can be used now. */ public <T> Cached<T> withNullable(@Nullable T initialValue, UnaryOperator<T> eval) { return new TimedCache<T>(milliseconds, initialValue, eval, false); } /** * Initially and after each expiry, a new value is calculated via this Callable instance. * Expired values will be discarded completely. * Calculated value may be null. * * @param eval This gets evaluated first and after each timeout * @param <T> The cached type * @return The cache instance. This can be used now. */ public <T> Cached<T> withNullable(Supplier<T> eval) { return withNullable(null, v -> eval.get()); } } private volatile boolean isInitialized; private volatile T instance; private final UnaryOperator<T> evaluator; private final boolean mandatory; private final Semaphore updateResource = new Semaphore(1); Cached(Supplier<T> eval, boolean mandatory) { this.evaluator = x -> eval.get(); this.mandatory = mandatory; } Cached(T initialValue, UnaryOperator<T> evaluator, boolean mandatory) { this.evaluator = evaluator; this.mandatory = mandatory; instance = initialValue; } /** * Gets the cached value. * <p> * Update the cached value, if refresh rate has expired. */ @Override public final T get() { T value = instance; if (neededRefresh()) { return instance; } return value; } abstract boolean isValid(); private boolean neededRefresh() { if (isInitialized && isValid()) { return false; } tryRefresh(); return true; } /** * Refreshes the value to an updated instance. * This either starts the refresh process immediately, or it waits until another updating thread has finished. */ public final void refresh() { isInitialized = false; if (!tryRefresh()) { spinWait(); } } private boolean tryRefresh() { if (updateResource.tryAcquire()) { try { T value; try { value = evaluator.apply(instance); } catch (RefreshNotPossibleException ex) { if (instance == null && mandatory) { throw new IllegalStateException("Evaluator " + evaluator + " could not refresh on initial run", ex); } return true; } if (value == null && mandatory) { throw new NullPointerException("Evaluator " + evaluator + " returned null value"); } isInitialized = true; instance = value; postRefresh(); } finally { updateResource.release(); } return true; } else { // Another thread is updating - only wait for it if no old value is available or refresh is explicitly requested waitIfUninitialized(); } return false; } void postRefresh() {} private void waitIfUninitialized() { while (!isInitialized) { spinWait(); } } private void spinWait() { try { updateResource.acquire(); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); throw new CancellationException("Thread is interrupted, " + evaluator + " may return uninitialized null value."); } updateResource.release(); } @Override public String toString() { StringBuilder sb = new StringBuilder(); if (!isInitialized) sb.append("Uninitialized "); sb.append(getClass().getSimpleName()); if (isInitialized) { sb.append(" (").append(instance).append(')'); } postToString(sb); return sb.toString(); } void postToString(StringBuilder sb) {} /** * Assigned Operators can throw this to indicate that a refresh is not possible yet, * and the cached value shall remain until it's possible again. * * If this is thrown on initial calculation, then either an {@link IllegalStateException} is thrown if the * result is mandatory, or null is returned if the result is nullable. */ public static class RefreshNotPossibleException extends RuntimeException { private static final long serialVersionUID = 5734137134481666718L; } } final class TimedCache<T> extends Cached<T> { private volatile long lastUpdate; private final long refreshRate; TimedCache(long refreshRate, T initialValue, UnaryOperator<T> evaluator, boolean mandatory) { super(initialValue, evaluator, mandatory); this.refreshRate = refreshRate; } @Override boolean isValid() { return System.currentTimeMillis() - lastUpdate <= refreshRate; } @Override void postRefresh() { super.postRefresh(); lastUpdate = System.currentTimeMillis(); } @Override void postToString(StringBuilder sb) { super.postToString(sb); if (isValid()) { sb.append("; expires in "); Strings.appendLongDuration(sb, refreshRate + lastUpdate - System.currentTimeMillis(), TimeUnit.MILLISECONDS); } else { sb.append("; expired since "); Strings.appendLongDuration(sb, System.currentTimeMillis() - refreshRate - lastUpdate, TimeUnit.MILLISECONDS); } } } final class OneTimeInitializer<T> extends Cached<T> { OneTimeInitializer(Supplier<T> evaluator, boolean mandatory) { super(evaluator, mandatory); } @Override boolean isValid() { return true; } } final class ImmediateUpdater<T> extends Cached<T> { ImmediateUpdater(T initialValue, UnaryOperator<T> evaluator, boolean mandatory) { super(initialValue, evaluator, mandatory); } @Override boolean isValid() { return false; } }
/** * Copyright (c) 2010 Ignasi Barrera * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.sjmvc.util; import static org.sjmvc.util.ReflectionUtils.fromString; import static org.sjmvc.util.ReflectionUtils.getFieldArrayType; import static org.sjmvc.util.ReflectionUtils.getFieldCollectionType; import static org.sjmvc.util.ReflectionUtils.getFieldType; import static org.sjmvc.util.ReflectionUtils.getProperty; import static org.sjmvc.util.ReflectionUtils.setValue; import static org.sjmvc.util.ReflectionUtils.transformAndSet; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertNotNull; import static org.testng.Assert.assertNull; import static org.testng.Assert.fail; import org.sjmvc.NestedTestPojo; import org.sjmvc.TestPojo; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; /** * Unit tests for the {@link ReflectionUtils} class. * * @author Ignasi Barrera */ public class ReflectionUtilsTest { /** The target objetc used in tests. */ private TestPojo target; @BeforeMethod public void setUp() { target = new TestPojo(); target.setRequiredFields(); } @Test public void testGetProperty() throws Exception { String notNullField = (String) getProperty(target, "stringProperty"); assertNotNull(notNullField); assertEquals(notNullField, target.getStringProperty()); String[] nullField = (String[]) getProperty(target, "stringArray"); assertNull(nullField); } @Test(expectedExceptions = NoSuchFieldException.class) public void testInvalidGetProperty() throws Exception { getProperty(target, "unexistingProperty"); } @Test public void testSetValue() throws Exception { setValue(target, "stringProperty", "test"); setValue(target, "integerProperty", 5); setValue(target, "integerArray", null); assertEquals(target.getStringProperty(), "test"); assertEquals(target.getIntegerProperty(), new Integer(5)); assertEquals(target.getIntegerArray(), null); } @Test(expectedExceptions = IllegalArgumentException.class) public void testInvalidSetValue() throws Exception { // Unexisting field exception is already tested in the // testInvalidGetProperty method setValue(target, "integerProperty", "13"); } @Test public void testFromString() throws Exception { assertEquals(fromString(Object.class, null), null); assertEquals(fromString(String.class, "test"), "test"); assertEquals(fromString(Integer.class, "5"), new Integer(5)); assertEquals(fromString(Double.class, "5.6"), new Double(5.6)); assertEquals(fromString(Float.class, "5.7"), new Float(5.7F)); assertEquals(fromString(Long.class, "12345"), new Long(12345L)); assertEquals(fromString(Short.class, "12"), new Short((short) 12)); assertEquals(fromString(Boolean.class, "true"), Boolean.TRUE); assertEquals(fromString(Byte.class, "1"), new Byte((byte) 1)); assertEquals(fromString(TestEnum.class, "VALUE"), TestEnum.VALUE); } @Test public void testInvalidFromString() { checkInvalidFromString(TestPojo.class, "value"); checkInvalidFromString(Integer.class, "value"); checkInvalidFromString(TestEnum.class, "value"); } @Test public void testTransformAndSet() throws Exception { transformAndSet(target, "stringProperty", "test"); transformAndSet(target, "integerProperty", "5"); transformAndSet(target, "integerArray", null); assertEquals(target.getStringProperty(), "test"); assertEquals(target.getIntegerProperty(), new Integer(5)); assertEquals(target.getIntegerArray(), null); } @Test public void testInvalidTransformAndSet() { checkInvalidTransformAndSet(target, "unexistingProperty", "test"); checkInvalidTransformAndSet(target, "integerProperty", "test"); checkInvalidTransformAndSet(target, "nestedProperty", "test"); } @Test public void testGetFieldType() throws Exception { assertEquals(getFieldType("stringProperty", TestPojo.class), String.class); assertEquals(getFieldType("integerProperty", TestPojo.class), Integer.class); assertEquals(getFieldType("nestedProperty", TestPojo.class), NestedTestPojo.class); // Unexisting field exception is already tested in the // testInvalidGetProperty method } @Test public void testGetFieldArrayType() throws Exception { assertEquals(getFieldArrayType("stringArray", TestPojo.class), String.class); assertEquals(getFieldArrayType("integerArray", TestPojo.class), Integer.class); // Not array fields assertEquals(getFieldArrayType("stringProperty", TestPojo.class), null); assertEquals(getFieldArrayType("integerProperty", TestPojo.class), null); } @Test public void testGetFieldCollectionType() throws Exception { assertEquals(getFieldCollectionType("stringList", TestPojo.class), String.class); assertEquals(getFieldCollectionType("integerList", TestPojo.class), Integer.class); } @Test public void testInvalidGetFieldCollectionType() throws Exception { checkInvalidGetFieldCollectionType("stringProperty", TestPojo.class); checkInvalidGetFieldCollectionType("integerProperty", TestPojo.class); } public void checkInvalidFromString(final Class< ? > clazz, final String value) { try { fromString(clazz, value); fail("fromString method should have failed"); } catch (Exception ex) { // Do nothing. Test success. } } public void checkInvalidTransformAndSet(final Object target, final String name, final String value) { try { transformAndSet(target, name, value); fail("transformAndSet method should have failed"); } catch (Exception ex) { // Do nothing. Test success. } } public void checkInvalidGetFieldCollectionType(final String name, final Class< ? > clazz) throws Exception { try { getFieldCollectionType(name, clazz); fail("getFieldCollectionType method should throw a ClassCastException"); } catch (ClassCastException ex) { // Do nothing. Test success. } } private enum TestEnum { VALUE } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.personalize.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/ListRecipes" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListRecipesResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * The list of available recipes. * </p> */ private java.util.List<RecipeSummary> recipes; /** * <p> * A token for getting the next set of recipes. * </p> */ private String nextToken; /** * <p> * The list of available recipes. * </p> * * @return The list of available recipes. */ public java.util.List<RecipeSummary> getRecipes() { return recipes; } /** * <p> * The list of available recipes. * </p> * * @param recipes * The list of available recipes. */ public void setRecipes(java.util.Collection<RecipeSummary> recipes) { if (recipes == null) { this.recipes = null; return; } this.recipes = new java.util.ArrayList<RecipeSummary>(recipes); } /** * <p> * The list of available recipes. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setRecipes(java.util.Collection)} or {@link #withRecipes(java.util.Collection)} if you want to override * the existing values. * </p> * * @param recipes * The list of available recipes. * @return Returns a reference to this object so that method calls can be chained together. */ public ListRecipesResult withRecipes(RecipeSummary... recipes) { if (this.recipes == null) { setRecipes(new java.util.ArrayList<RecipeSummary>(recipes.length)); } for (RecipeSummary ele : recipes) { this.recipes.add(ele); } return this; } /** * <p> * The list of available recipes. * </p> * * @param recipes * The list of available recipes. * @return Returns a reference to this object so that method calls can be chained together. */ public ListRecipesResult withRecipes(java.util.Collection<RecipeSummary> recipes) { setRecipes(recipes); return this; } /** * <p> * A token for getting the next set of recipes. * </p> * * @param nextToken * A token for getting the next set of recipes. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * A token for getting the next set of recipes. * </p> * * @return A token for getting the next set of recipes. */ public String getNextToken() { return this.nextToken; } /** * <p> * A token for getting the next set of recipes. * </p> * * @param nextToken * A token for getting the next set of recipes. * @return Returns a reference to this object so that method calls can be chained together. */ public ListRecipesResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getRecipes() != null) sb.append("Recipes: ").append(getRecipes()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListRecipesResult == false) return false; ListRecipesResult other = (ListRecipesResult) obj; if (other.getRecipes() == null ^ this.getRecipes() == null) return false; if (other.getRecipes() != null && other.getRecipes().equals(this.getRecipes()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getRecipes() == null) ? 0 : getRecipes().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListRecipesResult clone() { try { return (ListRecipesResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.lucene; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFormatTooNewException; import org.apache.lucene.index.IndexFormatTooOldException; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.search.Collector; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SimpleCollector; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TimeLimitingCollector; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.search.grouping.CollapseTopFieldDocs; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.Lock; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Counter; import org.apache.lucene.util.Version; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.IndexFieldData; import java.io.IOException; import java.text.ParseException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; public class Lucene { public static final String LATEST_DOC_VALUES_FORMAT = "Lucene54"; public static final String LATEST_POSTINGS_FORMAT = "Lucene50"; public static final String LATEST_CODEC = "Lucene62"; static { Deprecated annotation = PostingsFormat.forName(LATEST_POSTINGS_FORMAT).getClass().getAnnotation(Deprecated.class); assert annotation == null : "PostingsFromat " + LATEST_POSTINGS_FORMAT + " is deprecated" ; annotation = DocValuesFormat.forName(LATEST_DOC_VALUES_FORMAT).getClass().getAnnotation(Deprecated.class); assert annotation == null : "DocValuesFormat " + LATEST_DOC_VALUES_FORMAT + " is deprecated" ; } public static final NamedAnalyzer STANDARD_ANALYZER = new NamedAnalyzer("_standard", AnalyzerScope.GLOBAL, new StandardAnalyzer()); public static final NamedAnalyzer KEYWORD_ANALYZER = new NamedAnalyzer("_keyword", AnalyzerScope.GLOBAL, new KeywordAnalyzer()); public static final ScoreDoc[] EMPTY_SCORE_DOCS = new ScoreDoc[0]; public static final TopDocs EMPTY_TOP_DOCS = new TopDocs(0, EMPTY_SCORE_DOCS, 0.0f); public static Version parseVersion(@Nullable String version, Version defaultVersion, Logger logger) { if (version == null) { return defaultVersion; } try { return Version.parse(version); } catch (ParseException e) { logger.warn((Supplier<?>) () -> new ParameterizedMessage("no version match {}, default to {}", version, defaultVersion), e); return defaultVersion; } } /** * Reads the segments infos, failing if it fails to load */ public static SegmentInfos readSegmentInfos(Directory directory) throws IOException { return SegmentInfos.readLatestCommit(directory); } /** * Returns an iterable that allows to iterate over all files in this segments info */ public static Iterable<String> files(SegmentInfos infos) throws IOException { final List<Collection<String>> list = new ArrayList<>(); list.add(Collections.singleton(infos.getSegmentsFileName())); for (SegmentCommitInfo info : infos) { list.add(info.files()); } return Iterables.flatten(list); } /** * Returns the number of documents in the index referenced by this {@link SegmentInfos} */ public static int getNumDocs(SegmentInfos info) { int numDocs = 0; for (SegmentCommitInfo si : info) { numDocs += si.info.maxDoc() - si.getDelCount(); } return numDocs; } /** * Reads the segments infos from the given commit, failing if it fails to load */ public static SegmentInfos readSegmentInfos(IndexCommit commit) throws IOException { // Using commit.getSegmentsFileName() does NOT work here, have to // manually create the segment filename String filename = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", commit.getGeneration()); return SegmentInfos.readCommit(commit.getDirectory(), filename); } /** * Reads the segments infos from the given segments file name, failing if it fails to load */ private static SegmentInfos readSegmentInfos(String segmentsFileName, Directory directory) throws IOException { return SegmentInfos.readCommit(directory, segmentsFileName); } /** * This method removes all files from the given directory that are not referenced by the given segments file. * This method will open an IndexWriter and relies on index file deleter to remove all unreferenced files. Segment files * that are newer than the given segments file are removed forcefully to prevent problems with IndexWriter opening a potentially * broken commit point / leftover. * <b>Note:</b> this method will fail if there is another IndexWriter open on the given directory. This method will also acquire * a write lock from the directory while pruning unused files. This method expects an existing index in the given directory that has * the given segments file. */ public static SegmentInfos pruneUnreferencedFiles(String segmentsFileName, Directory directory) throws IOException { final SegmentInfos si = readSegmentInfos(segmentsFileName, directory); try (Lock writeLock = directory.obtainLock(IndexWriter.WRITE_LOCK_NAME)) { int foundSegmentFiles = 0; for (final String file : directory.listAll()) { /** * we could also use a deletion policy here but in the case of snapshot and restore * sometimes we restore an index and override files that were referenced by a "future" * commit. If such a commit is opened by the IW it would likely throw a corrupted index exception * since checksums don's match anymore. that's why we prune the name here directly. * We also want the caller to know if we were not able to remove a segments_N file. */ if (file.startsWith(IndexFileNames.SEGMENTS) || file.equals(IndexFileNames.OLD_SEGMENTS_GEN)) { foundSegmentFiles++; if (file.equals(si.getSegmentsFileName()) == false) { directory.deleteFile(file); // remove all segment_N files except of the one we wanna keep } } } assert SegmentInfos.getLastCommitSegmentsFileName(directory).equals(segmentsFileName); if (foundSegmentFiles == 0) { throw new IllegalStateException("no commit found in the directory"); } } final CommitPoint cp = new CommitPoint(si, directory); try (IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(Lucene.STANDARD_ANALYZER) .setIndexCommit(cp) .setCommitOnClose(false) .setMergePolicy(NoMergePolicy.INSTANCE) .setOpenMode(IndexWriterConfig.OpenMode.APPEND))) { // do nothing and close this will kick of IndexFileDeleter which will remove all pending files } return si; } /** * This method removes all lucene files from the given directory. It will first try to delete all commit points / segments * files to ensure broken commits or corrupted indices will not be opened in the future. If any of the segment files can't be deleted * this operation fails. */ public static void cleanLuceneIndex(Directory directory) throws IOException { try (Lock writeLock = directory.obtainLock(IndexWriter.WRITE_LOCK_NAME)) { for (final String file : directory.listAll()) { if (file.startsWith(IndexFileNames.SEGMENTS) || file.equals(IndexFileNames.OLD_SEGMENTS_GEN)) { directory.deleteFile(file); // remove all segment_N files } } } try (IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(Lucene.STANDARD_ANALYZER) .setMergePolicy(NoMergePolicy.INSTANCE) // no merges .setCommitOnClose(false) // no commits .setOpenMode(IndexWriterConfig.OpenMode.CREATE))) // force creation - don't append... { // do nothing and close this will kick of IndexFileDeleter which will remove all pending files } } public static void checkSegmentInfoIntegrity(final Directory directory) throws IOException { new SegmentInfos.FindSegmentsFile(directory) { @Override protected Object doBody(String segmentFileName) throws IOException { try (IndexInput input = directory.openInput(segmentFileName, IOContext.READ)) { CodecUtil.checksumEntireFile(input); } return null; } }.run(); } /** * Wraps <code>delegate</code> with count based early termination collector with a threshold of <code>maxCountHits</code> */ public static final EarlyTerminatingCollector wrapCountBasedEarlyTerminatingCollector(final Collector delegate, int maxCountHits) { return new EarlyTerminatingCollector(delegate, maxCountHits); } /** * Wraps <code>delegate</code> with a time limited collector with a timeout of <code>timeoutInMillis</code> */ public static final TimeLimitingCollector wrapTimeLimitingCollector(final Collector delegate, final Counter counter, long timeoutInMillis) { return new TimeLimitingCollector(delegate, counter, timeoutInMillis); } /** * Check whether there is one or more documents matching the provided query. */ public static boolean exists(IndexSearcher searcher, Query query) throws IOException { final Weight weight = searcher.createNormalizedWeight(query, false); // the scorer API should be more efficient at stopping after the first // match than the bulk scorer API for (LeafReaderContext context : searcher.getIndexReader().leaves()) { final Scorer scorer = weight.scorer(context); if (scorer == null) { continue; } final Bits liveDocs = context.reader().getLiveDocs(); final DocIdSetIterator iterator = scorer.iterator(); for (int doc = iterator.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.nextDoc()) { if (liveDocs == null || liveDocs.get(doc)) { return true; } } } return false; } public static TopDocs readTopDocs(StreamInput in) throws IOException { byte type = in.readByte(); if (type == 0) { int totalHits = in.readVInt(); float maxScore = in.readFloat(); ScoreDoc[] scoreDocs = new ScoreDoc[in.readVInt()]; for (int i = 0; i < scoreDocs.length; i++) { scoreDocs[i] = new ScoreDoc(in.readVInt(), in.readFloat()); } return new TopDocs(totalHits, scoreDocs, maxScore); } else if (type == 1) { int totalHits = in.readVInt(); float maxScore = in.readFloat(); SortField[] fields = new SortField[in.readVInt()]; for (int i = 0; i < fields.length; i++) { fields[i] = readSortField(in); } FieldDoc[] fieldDocs = new FieldDoc[in.readVInt()]; for (int i = 0; i < fieldDocs.length; i++) { fieldDocs[i] = readFieldDoc(in); } return new TopFieldDocs(totalHits, fieldDocs, fields, maxScore); } else if (type == 2) { int totalHits = in.readVInt(); float maxScore = in.readFloat(); String field = in.readString(); SortField[] fields = new SortField[in.readVInt()]; for (int i = 0; i < fields.length; i++) { fields[i] = readSortField(in); } int size = in.readVInt(); Object[] collapseValues = new Object[size]; FieldDoc[] fieldDocs = new FieldDoc[size]; for (int i = 0; i < fieldDocs.length; i++) { fieldDocs[i] = readFieldDoc(in); collapseValues[i] = readSortValue(in); } return new CollapseTopFieldDocs(field, totalHits, fieldDocs, fields, collapseValues, maxScore); } else { throw new IllegalStateException("Unknown type " + type); } } public static FieldDoc readFieldDoc(StreamInput in) throws IOException { Comparable[] cFields = new Comparable[in.readVInt()]; for (int j = 0; j < cFields.length; j++) { byte type = in.readByte(); if (type == 0) { cFields[j] = null; } else if (type == 1) { cFields[j] = in.readString(); } else if (type == 2) { cFields[j] = in.readInt(); } else if (type == 3) { cFields[j] = in.readLong(); } else if (type == 4) { cFields[j] = in.readFloat(); } else if (type == 5) { cFields[j] = in.readDouble(); } else if (type == 6) { cFields[j] = in.readByte(); } else if (type == 7) { cFields[j] = in.readShort(); } else if (type == 8) { cFields[j] = in.readBoolean(); } else if (type == 9) { cFields[j] = in.readBytesRef(); } else { throw new IOException("Can't match type [" + type + "]"); } } return new FieldDoc(in.readVInt(), in.readFloat(), cFields); } private static Comparable readSortValue(StreamInput in) throws IOException { byte type = in.readByte(); if (type == 0) { return null; } else if (type == 1) { return in.readString(); } else if (type == 2) { return in.readInt(); } else if (type == 3) { return in.readLong(); } else if (type == 4) { return in.readFloat(); } else if (type == 5) { return in.readDouble(); } else if (type == 6) { return in.readByte(); } else if (type == 7) { return in.readShort(); } else if (type == 8) { return in.readBoolean(); } else if (type == 9) { return in.readBytesRef(); } else { throw new IOException("Can't match type [" + type + "]"); } } public static ScoreDoc readScoreDoc(StreamInput in) throws IOException { return new ScoreDoc(in.readVInt(), in.readFloat()); } private static final Class<?> GEO_DISTANCE_SORT_TYPE_CLASS = LatLonDocValuesField.newDistanceSort("some_geo_field", 0, 0).getClass(); public static void writeTopDocs(StreamOutput out, TopDocs topDocs) throws IOException { if (topDocs instanceof CollapseTopFieldDocs) { out.writeByte((byte) 2); CollapseTopFieldDocs collapseDocs = (CollapseTopFieldDocs) topDocs; out.writeVInt(topDocs.totalHits); out.writeFloat(topDocs.getMaxScore()); out.writeString(collapseDocs.field); out.writeVInt(collapseDocs.fields.length); for (SortField sortField : collapseDocs.fields) { writeSortField(out, sortField); } out.writeVInt(topDocs.scoreDocs.length); for (int i = 0; i < topDocs.scoreDocs.length; i++) { ScoreDoc doc = collapseDocs.scoreDocs[i]; writeFieldDoc(out, (FieldDoc) doc); writeSortValue(out, collapseDocs.collapseValues[i]); } } else if (topDocs instanceof TopFieldDocs) { out.writeByte((byte) 1); TopFieldDocs topFieldDocs = (TopFieldDocs) topDocs; out.writeVInt(topDocs.totalHits); out.writeFloat(topDocs.getMaxScore()); out.writeVInt(topFieldDocs.fields.length); for (SortField sortField : topFieldDocs.fields) { writeSortField(out, sortField); } out.writeVInt(topDocs.scoreDocs.length); for (ScoreDoc doc : topFieldDocs.scoreDocs) { writeFieldDoc(out, (FieldDoc) doc); } } else { out.writeByte((byte) 0); out.writeVInt(topDocs.totalHits); out.writeFloat(topDocs.getMaxScore()); out.writeVInt(topDocs.scoreDocs.length); for (ScoreDoc doc : topDocs.scoreDocs) { writeScoreDoc(out, doc); } } } private static void writeMissingValue(StreamOutput out, Object missingValue) throws IOException { if (missingValue == SortField.STRING_FIRST) { out.writeByte((byte) 1); } else if (missingValue == SortField.STRING_LAST) { out.writeByte((byte) 2); } else { out.writeByte((byte) 0); out.writeGenericValue(missingValue); } } private static Object readMissingValue(StreamInput in) throws IOException { final byte id = in.readByte(); switch (id) { case 0: return in.readGenericValue(); case 1: return SortField.STRING_FIRST; case 2: return SortField.STRING_LAST; default: throw new IOException("Unknown missing value id: " + id); } } private static void writeSortValue(StreamOutput out, Object field) throws IOException { if (field == null) { out.writeByte((byte) 0); } else { Class type = field.getClass(); if (type == String.class) { out.writeByte((byte) 1); out.writeString((String) field); } else if (type == Integer.class) { out.writeByte((byte) 2); out.writeInt((Integer) field); } else if (type == Long.class) { out.writeByte((byte) 3); out.writeLong((Long) field); } else if (type == Float.class) { out.writeByte((byte) 4); out.writeFloat((Float) field); } else if (type == Double.class) { out.writeByte((byte) 5); out.writeDouble((Double) field); } else if (type == Byte.class) { out.writeByte((byte) 6); out.writeByte((Byte) field); } else if (type == Short.class) { out.writeByte((byte) 7); out.writeShort((Short) field); } else if (type == Boolean.class) { out.writeByte((byte) 8); out.writeBoolean((Boolean) field); } else if (type == BytesRef.class) { out.writeByte((byte) 9); out.writeBytesRef((BytesRef) field); } else { throw new IOException("Can't handle sort field value of type [" + type + "]"); } } } public static void writeFieldDoc(StreamOutput out, FieldDoc fieldDoc) throws IOException { out.writeVInt(fieldDoc.fields.length); for (Object field : fieldDoc.fields) { writeSortValue(out, field); } out.writeVInt(fieldDoc.doc); out.writeFloat(fieldDoc.score); } public static void writeScoreDoc(StreamOutput out, ScoreDoc scoreDoc) throws IOException { if (!scoreDoc.getClass().equals(ScoreDoc.class)) { throw new IllegalArgumentException("This method can only be used to serialize a ScoreDoc, not a " + scoreDoc.getClass()); } out.writeVInt(scoreDoc.doc); out.writeFloat(scoreDoc.score); } // LUCENE 4 UPGRADE: We might want to maintain our own ordinal, instead of Lucene's ordinal public static SortField.Type readSortType(StreamInput in) throws IOException { return SortField.Type.values()[in.readVInt()]; } public static SortField readSortField(StreamInput in) throws IOException { String field = null; if (in.readBoolean()) { field = in.readString(); } SortField.Type sortType = readSortType(in); Object missingValue = readMissingValue(in); boolean reverse = in.readBoolean(); SortField sortField = new SortField(field, sortType, reverse); if (missingValue != null) { sortField.setMissingValue(missingValue); } return sortField; } public static void writeSortType(StreamOutput out, SortField.Type sortType) throws IOException { out.writeVInt(sortType.ordinal()); } public static void writeSortField(StreamOutput out, SortField sortField) throws IOException { if (sortField.getClass() == GEO_DISTANCE_SORT_TYPE_CLASS) { // for geo sorting, we replace the SortField with a SortField that assumes a double field. // this works since the SortField is only used for merging top docs SortField newSortField = new SortField(sortField.getField(), SortField.Type.DOUBLE); newSortField.setMissingValue(sortField.getMissingValue()); sortField = newSortField; } if (sortField.getClass() != SortField.class) { throw new IllegalArgumentException("Cannot serialize SortField impl [" + sortField + "]"); } if (sortField.getField() == null) { out.writeBoolean(false); } else { out.writeBoolean(true); out.writeString(sortField.getField()); } if (sortField.getComparatorSource() != null) { IndexFieldData.XFieldComparatorSource comparatorSource = (IndexFieldData.XFieldComparatorSource) sortField.getComparatorSource(); writeSortType(out, comparatorSource.reducedType()); writeMissingValue(out, comparatorSource.missingValue(sortField.getReverse())); } else { writeSortType(out, sortField.getType()); writeMissingValue(out, sortField.getMissingValue()); } out.writeBoolean(sortField.getReverse()); } public static Explanation readExplanation(StreamInput in) throws IOException { boolean match = in.readBoolean(); String description = in.readString(); final Explanation[] subExplanations = new Explanation[in.readVInt()]; for (int i = 0; i < subExplanations.length; ++i) { subExplanations[i] = readExplanation(in); } if (match) { return Explanation.match(in.readFloat(), description, subExplanations); } else { return Explanation.noMatch(description, subExplanations); } } public static void writeExplanation(StreamOutput out, Explanation explanation) throws IOException { out.writeBoolean(explanation.isMatch()); out.writeString(explanation.getDescription()); Explanation[] subExplanations = explanation.getDetails(); out.writeVInt(subExplanations.length); for (Explanation subExp : subExplanations) { writeExplanation(out, subExp); } if (explanation.isMatch()) { out.writeFloat(explanation.getValue()); } } /** * This exception is thrown when {@link org.elasticsearch.common.lucene.Lucene.EarlyTerminatingCollector} * reaches early termination * */ public static final class EarlyTerminationException extends ElasticsearchException { public EarlyTerminationException(String msg) { super(msg); } public EarlyTerminationException(StreamInput in) throws IOException{ super(in); } } /** * A collector that terminates early by throwing {@link org.elasticsearch.common.lucene.Lucene.EarlyTerminationException} * when count of matched documents has reached <code>maxCountHits</code> */ public static final class EarlyTerminatingCollector extends SimpleCollector { private final int maxCountHits; private final Collector delegate; private int count = 0; private LeafCollector leafCollector; EarlyTerminatingCollector(final Collector delegate, int maxCountHits) { this.maxCountHits = maxCountHits; this.delegate = Objects.requireNonNull(delegate); } public int count() { return count; } public boolean exists() { return count > 0; } @Override public void setScorer(Scorer scorer) throws IOException { leafCollector.setScorer(scorer); } @Override public void collect(int doc) throws IOException { leafCollector.collect(doc); if (++count >= maxCountHits) { throw new EarlyTerminationException("early termination [CountBased]"); } } @Override public void doSetNextReader(LeafReaderContext atomicReaderContext) throws IOException { leafCollector = delegate.getLeafCollector(atomicReaderContext); } @Override public boolean needsScores() { return delegate.needsScores(); } } private Lucene() { } public static final boolean indexExists(final Directory directory) throws IOException { return DirectoryReader.indexExists(directory); } /** * Wait for an index to exist for up to {@code timeLimitMillis}. Returns * true if the index eventually exists, false if not. * * Will retry the directory every second for at least {@code timeLimitMillis} */ public static final boolean waitForIndex(final Directory directory, final long timeLimitMillis) throws IOException { final long DELAY = 1000; long waited = 0; try { while (true) { if (waited >= timeLimitMillis) { break; } if (indexExists(directory)) { return true; } Thread.sleep(DELAY); waited += DELAY; } } catch (InterruptedException e) { Thread.currentThread().interrupt(); return false; } // one more try after all retries return indexExists(directory); } /** * Returns <tt>true</tt> iff the given exception or * one of it's causes is an instance of {@link CorruptIndexException}, * {@link IndexFormatTooOldException}, or {@link IndexFormatTooNewException} otherwise <tt>false</tt>. */ public static boolean isCorruptionException(Throwable t) { return ExceptionsHelper.unwrapCorruption(t) != null; } /** * Parses the version string lenient and returns the default value if the given string is null or empty */ public static Version parseVersionLenient(String toParse, Version defaultValue) { return LenientParser.parse(toParse, defaultValue); } @SuppressForbidden(reason = "Version#parseLeniently() used in a central place") private static final class LenientParser { public static Version parse(String toParse, Version defaultValue) { if (Strings.hasLength(toParse)) { try { return Version.parseLeniently(toParse); } catch (ParseException e) { // pass to default } } return defaultValue; } } /** * Return a Scorer that throws an ElasticsearchIllegalStateException * on all operations with the given message. */ public static Scorer illegalScorer(final String message) { return new Scorer(null) { @Override public float score() throws IOException { throw new IllegalStateException(message); } @Override public int freq() throws IOException { throw new IllegalStateException(message); } @Override public int docID() { throw new IllegalStateException(message); } @Override public DocIdSetIterator iterator() { throw new IllegalStateException(message); } }; } private static final class CommitPoint extends IndexCommit { private String segmentsFileName; private final Collection<String> files; private final Directory dir; private final long generation; private final Map<String,String> userData; private final int segmentCount; private CommitPoint(SegmentInfos infos, Directory dir) throws IOException { segmentsFileName = infos.getSegmentsFileName(); this.dir = dir; userData = infos.getUserData(); files = Collections.unmodifiableCollection(infos.files(true)); generation = infos.getGeneration(); segmentCount = infos.size(); } @Override public String toString() { return "DirectoryReader.ReaderCommit(" + segmentsFileName + ")"; } @Override public int getSegmentCount() { return segmentCount; } @Override public String getSegmentsFileName() { return segmentsFileName; } @Override public Collection<String> getFileNames() { return files; } @Override public Directory getDirectory() { return dir; } @Override public long getGeneration() { return generation; } @Override public boolean isDeleted() { return false; } @Override public Map<String,String> getUserData() { return userData; } @Override public void delete() { throw new UnsupportedOperationException("This IndexCommit does not support deletions"); } } /** * Given a {@link Scorer}, return a {@link Bits} instance that will match * all documents contained in the set. Note that the returned {@link Bits} * instance MUST be consumed in order. */ public static Bits asSequentialAccessBits(final int maxDoc, @Nullable Scorer scorer) throws IOException { if (scorer == null) { return new Bits.MatchNoBits(maxDoc); } final TwoPhaseIterator twoPhase = scorer.twoPhaseIterator(); final DocIdSetIterator iterator; if (twoPhase == null) { iterator = scorer.iterator(); } else { iterator = twoPhase.approximation(); } return new Bits() { int previous = -1; boolean previousMatched = false; @Override public boolean get(int index) { if (index < 0 || index >= maxDoc) { throw new IndexOutOfBoundsException(index + " is out of bounds: [" + 0 + "-" + maxDoc + "["); } if (index < previous) { throw new IllegalArgumentException("This Bits instance can only be consumed in order. " + "Got called on [" + index + "] while previously called on [" + previous + "]"); } if (index == previous) { // we cache whether it matched because it is illegal to call // twoPhase.matches() twice return previousMatched; } previous = index; int doc = iterator.docID(); if (doc < index) { try { doc = iterator.advance(index); } catch (IOException e) { throw new IllegalStateException("Cannot advance iterator", e); } } if (index == doc) { try { return previousMatched = twoPhase == null || twoPhase.matches(); } catch (IOException e) { throw new IllegalStateException("Cannot validate match", e); } } return previousMatched = false; } @Override public int length() { return maxDoc; } }; } }
package plotting; import java.awt.BasicStroke; import java.awt.Color; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.jfree.chart.ChartFactory; import org.jfree.chart.ChartUtilities; import org.jfree.chart.JFreeChart; import org.jfree.chart.LegendItemCollection; import org.jfree.chart.axis.AxisLocation; import org.jfree.chart.plot.CategoryPlot; import org.jfree.chart.plot.DatasetRenderingOrder; import org.jfree.chart.plot.PlotOrientation; import org.jfree.chart.renderer.category.CategoryItemRenderer; import org.jfree.chart.renderer.category.LevelRenderer; import org.jfree.data.category.DefaultCategoryDataset; import org.jfree.ui.RectangleEdge; public class MakeS2V1Plots { public static void main(String[] args) throws FileNotFoundException, IOException { List<S2V1> S2V1List = new ArrayList<S2V1>(); String directory = "/media/milan/Data/Thesis/Results/Approaches"; File dir = new File(directory); File[] directoryListing = dir.listFiles(); if (directoryListing != null) { for (File child : directoryListing) { String name = child.getName(); if(name.contains("State2Vec")) { if(name.contains("Cluster1")) { S2V1 toAdd = new S2V1(); toAdd.parse(child); S2V1List.add(toAdd); } } } } Map<S2V1, List<S2V1>> combos = new HashMap<S2V1, List<S2V1>>(); for(S2V1 toCheck: S2V1List) { if(combos.containsKey(toCheck)) { combos.get(toCheck).add(toCheck); } else { combos.put(toCheck, new ArrayList<S2V1>()); combos.get(toCheck).add(toCheck); } } // ===PLOTTING=== S2V1 best = null; List<S2V1> listToPlot = new ArrayList<S2V1>(); Iterator<List<S2V1>> iter = combos.values().iterator(); int name = 0; while(iter.hasNext()){ listToPlot = iter.next(); // PLOT FIRST ONE IN MAP final DefaultCategoryDataset averageDS = new DefaultCategoryDataset( ); final DefaultCategoryDataset minDS = new DefaultCategoryDataset(); final DefaultCategoryDataset maxDS = new DefaultCategoryDataset(); final String filler = ""; List<Double> parameters = new ArrayList<Double>(); Map<Double, List<Double>> link = new HashMap<Double, List<Double>>(); int windowSize = 0; double learningRate = 0.0; int minWordFreq = 0; int k = 0; int vectorLength = 0; for(S2V1 toPlot: listToPlot) { windowSize = toPlot.windowSize; learningRate = toPlot.learningRate; minWordFreq = toPlot.minWordFreq; k = toPlot.k; vectorLength = toPlot.vectorLength; double average = toPlot.getTotalAverage(); double parameter = toPlot.k; double min = toPlot.getMin(); double max = toPlot.getMax(); parameters.add(parameter); link.put(parameter, new ArrayList<Double>()); link.get(parameter).add(average); link.get(parameter).add(min); link.get(parameter).add(max); if(best == null) { best = toPlot; } else { if(best.getTotalAverage() < average) { best = toPlot; } } } Collections.sort(parameters); int j = 0; while(j < parameters.size()) { List<Double> toAdds = link.get(parameters.get(j)); averageDS.addValue( toAdds.get(0), parameters.get(j), filler); minDS.addValue( toAdds.get(1), parameters.get(j), filler); maxDS.addValue( toAdds.get(2), parameters.get(j), filler); j++; } JFreeChart barChart = ChartFactory.createBarChart( "vectorlength = " + vectorLength + "\n" + "windowSize = " + windowSize + "\n" + "learningRate = " + learningRate + "\n" + "minWordFreq = " + minWordFreq,// + "\n" + //"clusterK = " + k, "ClusterK", "Matching Percentage", averageDS,PlotOrientation.VERTICAL, true, true, false); CategoryPlot plot = barChart.getCategoryPlot(); plot.setRangeAxisLocation(AxisLocation.BOTTOM_OR_LEFT); barChart.getLegend().setPosition(RectangleEdge.BOTTOM); CategoryItemRenderer renderer2 = new LevelRenderer(); plot.setDataset(1, minDS); plot.setRenderer(1, renderer2); plot.setDatasetRenderingOrder(DatasetRenderingOrder.FORWARD); CategoryItemRenderer renderer3 = new LevelRenderer(); plot.setDataset(2, maxDS); plot.setRenderer(2, renderer3); plot.setDatasetRenderingOrder(DatasetRenderingOrder.FORWARD); renderer2.setSeriesPaint(0, Color.BLACK); renderer2.setSeriesStroke(0, new BasicStroke(5.0f)); renderer2.setSeriesPaint(1, Color.BLACK); renderer2.setSeriesStroke(1, new BasicStroke(5.0f)); renderer2.setSeriesPaint(2, Color.BLACK); renderer2.setSeriesStroke(2, new BasicStroke(5.0f)); renderer3.setSeriesPaint(0, Color.BLACK); renderer3.setSeriesStroke(0, new BasicStroke(5.0f)); renderer3.setSeriesPaint(1, Color.BLACK); renderer3.setSeriesStroke(1, new BasicStroke(5.0f)); renderer3.setSeriesPaint(2, Color.BLACK); renderer3.setSeriesStroke(2, new BasicStroke(5.0f)); LegendItemCollection legendItemsOld = plot.getLegendItems(); final LegendItemCollection legendItemsNew = new LegendItemCollection(); for(int i = 0; i< legendItemsOld.getItemCount(); i++){ if(!(i >= 3)){ legendItemsNew.add(legendItemsOld.get(i)); } } plot.setFixedLegendItems(legendItemsNew); int width = 640; /* Width of the image */ int height = 480; /* Height of the image */ File BarChart = new File( "Plots/clusterK/S2V1/S2V1 - " + name + ".jpeg" ); BarChart.getParentFile().mkdirs(); ChartUtilities.saveChartAsJPEG( BarChart , barChart , width , height ); name++; } System.out.println(best.getTotalAverage()); System.out.println(best.getMax()); System.out.println("vectorlength: " + best.vectorLength); System.out.println("windowsize: " + best.windowSize); System.out.println("learningrate: " + best.learningRate); System.out.println("minWorFreq: " + best.minWordFreq); System.out.println("clusterK: " + best.k); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.suggest; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.search.spell.*; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.automaton.LevenshteinAutomata; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.FastCharArrayReader; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.analysis.CustomAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import java.io.IOException; import java.util.Comparator; import java.util.Locale; public final class SuggestUtils { public static final Comparator<SuggestWord> LUCENE_FREQUENCY = new SuggestWordFrequencyComparator(); public static final Comparator<SuggestWord> SCORE_COMPARATOR = SuggestWordQueue.DEFAULT_COMPARATOR; private SuggestUtils() { // utils!! } public static DirectSpellChecker getDirectSpellChecker(DirectSpellcheckerSettings suggestion) { DirectSpellChecker directSpellChecker = new DirectSpellChecker(); directSpellChecker.setAccuracy(suggestion.accuracy()); Comparator<SuggestWord> comparator; switch (suggestion.sort()) { case SCORE: comparator = SCORE_COMPARATOR; break; case FREQUENCY: comparator = LUCENE_FREQUENCY; break; default: throw new IllegalArgumentException("Illegal suggest sort: " + suggestion.sort()); } directSpellChecker.setComparator(comparator); directSpellChecker.setDistance(suggestion.stringDistance()); directSpellChecker.setMaxEdits(suggestion.maxEdits()); directSpellChecker.setMaxInspections(suggestion.maxInspections()); directSpellChecker.setMaxQueryFrequency(suggestion.maxTermFreq()); directSpellChecker.setMinPrefix(suggestion.prefixLength()); directSpellChecker.setMinQueryLength(suggestion.minWordLength()); directSpellChecker.setThresholdFrequency(suggestion.minDocFreq()); directSpellChecker.setLowerCaseTerms(false); return directSpellChecker; } public static BytesRef join(BytesRef separator, BytesRefBuilder result, BytesRef... toJoin) { result.clear(); for (int i = 0; i < toJoin.length - 1; i++) { result.append(toJoin[i]); result.append(separator); } result.append(toJoin[toJoin.length-1]); return result.get(); } public static abstract class TokenConsumer { protected CharTermAttribute charTermAttr; protected PositionIncrementAttribute posIncAttr; protected OffsetAttribute offsetAttr; public void reset(TokenStream stream) { charTermAttr = stream.addAttribute(CharTermAttribute.class); posIncAttr = stream.addAttribute(PositionIncrementAttribute.class); offsetAttr = stream.addAttribute(OffsetAttribute.class); } protected BytesRef fillBytesRef(BytesRefBuilder spare) { spare.copyChars(charTermAttr); return spare.get(); } public abstract void nextToken() throws IOException; public void end() {} } public static int analyze(Analyzer analyzer, BytesRef toAnalyze, String field, TokenConsumer consumer, CharsRefBuilder spare) throws IOException { spare.copyUTF8Bytes(toAnalyze); return analyze(analyzer, spare.get(), field, consumer); } public static int analyze(Analyzer analyzer, CharsRef toAnalyze, String field, TokenConsumer consumer) throws IOException { try (TokenStream ts = analyzer.tokenStream( field, new FastCharArrayReader(toAnalyze.chars, toAnalyze.offset, toAnalyze.length))) { return analyze(ts, consumer); } } /** NOTE: this method closes the TokenStream, even on exception, which is awkward * because really the caller who called {@link Analyzer#tokenStream} should close it, * but when trying that there are recursion issues when we try to use the same * TokenStrem twice in the same recursion... */ public static int analyze(TokenStream stream, TokenConsumer consumer) throws IOException { int numTokens = 0; boolean success = false; try { stream.reset(); consumer.reset(stream); while (stream.incrementToken()) { consumer.nextToken(); numTokens++; } consumer.end(); } finally { if (success) { stream.close(); } else { IOUtils.closeWhileHandlingException(stream); } } return numTokens; } public static SuggestMode resolveSuggestMode(String suggestMode) { suggestMode = suggestMode.toLowerCase(Locale.US); if ("missing".equals(suggestMode)) { return SuggestMode.SUGGEST_WHEN_NOT_IN_INDEX; } else if ("popular".equals(suggestMode)) { return SuggestMode.SUGGEST_MORE_POPULAR; } else if ("always".equals(suggestMode)) { return SuggestMode.SUGGEST_ALWAYS; } else { throw new IllegalArgumentException("Illegal suggest mode " + suggestMode); } } public static Suggest.Suggestion.Sort resolveSort(String sortVal) { if ("score".equals(sortVal)) { return Suggest.Suggestion.Sort.SCORE; } else if ("frequency".equals(sortVal)) { return Suggest.Suggestion.Sort.FREQUENCY; } else { throw new IllegalArgumentException("Illegal suggest sort " + sortVal); } } public static StringDistance resolveDistance(String distanceVal) { if ("internal".equals(distanceVal)) { return DirectSpellChecker.INTERNAL_LEVENSHTEIN; } else if ("damerau_levenshtein".equals(distanceVal) || "damerauLevenshtein".equals(distanceVal)) { return new LuceneLevenshteinDistance(); } else if ("levenstein".equals(distanceVal)) { return new LevensteinDistance(); //TODO Jaro and Winkler are 2 people - so apply same naming logic as damerau_levenshtein } else if ("jarowinkler".equals(distanceVal)) { return new JaroWinklerDistance(); } else if ("ngram".equals(distanceVal)) { return new NGramDistance(); } else { throw new IllegalArgumentException("Illegal distance option " + distanceVal); } } public static class Fields { public static final ParseField STRING_DISTANCE = new ParseField("string_distance"); public static final ParseField SUGGEST_MODE = new ParseField("suggest_mode"); public static final ParseField MAX_EDITS = new ParseField("max_edits"); public static final ParseField MAX_INSPECTIONS = new ParseField("max_inspections"); // TODO some of these constants are the same as MLT constants and // could be moved to a shared class for maintaining consistency across // the platform public static final ParseField MAX_TERM_FREQ = new ParseField("max_term_freq"); public static final ParseField PREFIX_LENGTH = new ParseField("prefix_length", "prefix_len"); public static final ParseField MIN_WORD_LENGTH = new ParseField("min_word_length", "min_word_len"); public static final ParseField MIN_DOC_FREQ = new ParseField("min_doc_freq"); public static final ParseField SHARD_SIZE = new ParseField("shard_size"); public static final ParseField EXACT_MATCH = new ParseField("exact_match"); } public static boolean parseDirectSpellcheckerSettings(XContentParser parser, String fieldName, DirectSpellcheckerSettings suggestion, ParseFieldMatcher parseFieldMatcher) throws IOException { if ("accuracy".equals(fieldName)) { suggestion.accuracy(parser.floatValue()); } else if (parseFieldMatcher.match(fieldName, Fields.SUGGEST_MODE)) { suggestion.suggestMode(SuggestUtils.resolveSuggestMode(parser.text())); } else if ("sort".equals(fieldName)) { suggestion.sort(SuggestUtils.resolveSort(parser.text())); } else if (parseFieldMatcher.match(fieldName, Fields.STRING_DISTANCE)) { suggestion.stringDistance(SuggestUtils.resolveDistance(parser.text())); } else if (parseFieldMatcher.match(fieldName, Fields.MAX_EDITS)) { suggestion.maxEdits(parser.intValue()); if (suggestion.maxEdits() < 1 || suggestion.maxEdits() > LevenshteinAutomata.MAXIMUM_SUPPORTED_DISTANCE) { throw new IllegalArgumentException("Illegal max_edits value " + suggestion.maxEdits()); } } else if (parseFieldMatcher.match(fieldName, Fields.MAX_INSPECTIONS)) { suggestion.maxInspections(parser.intValue()); } else if (parseFieldMatcher.match(fieldName, Fields.MAX_TERM_FREQ)) { suggestion.maxTermFreq(parser.floatValue()); } else if (parseFieldMatcher.match(fieldName, Fields.PREFIX_LENGTH)) { suggestion.prefixLength(parser.intValue()); } else if (parseFieldMatcher.match(fieldName, Fields.MIN_WORD_LENGTH)) { suggestion.minQueryLength(parser.intValue()); } else if (parseFieldMatcher.match(fieldName, Fields.MIN_DOC_FREQ)) { suggestion.minDocFreq(parser.floatValue()); } else if (parseFieldMatcher.match(fieldName, Fields.EXACT_MATCH)) { suggestion.exactMatch(parser.booleanValue()); } else { return false; } return true; } public static boolean parseSuggestContext(XContentParser parser, MapperService mapperService, String fieldName, SuggestionSearchContext.SuggestionContext suggestion, ParseFieldMatcher parseFieldMatcher) throws IOException { if ("analyzer".equals(fieldName)) { String analyzerName = parser.text(); Analyzer analyzer = mapperService.analysisService().analyzer(analyzerName); if (analyzer == null) { throw new IllegalArgumentException("Analyzer [" + analyzerName + "] doesn't exists"); } suggestion.setAnalyzer(analyzer); } else if ("field".equals(fieldName)) { suggestion.setField(parser.text()); } else if ("size".equals(fieldName)) { suggestion.setSize(parser.intValue()); } else if (parseFieldMatcher.match(fieldName, Fields.SHARD_SIZE)) { suggestion.setShardSize(parser.intValue()); } else { return false; } return true; } public static void verifySuggestion(MapperService mapperService, BytesRef globalText, SuggestionContext suggestion) { // Verify options and set defaults if (suggestion.getField() == null) { throw new IllegalArgumentException("The required field option is missing"); } if (suggestion.getText() == null) { if (globalText == null) { throw new IllegalArgumentException("The required text option is missing"); } suggestion.setText(globalText); } if (suggestion.getAnalyzer() == null) { suggestion.setAnalyzer(mapperService.searchAnalyzer()); } if (suggestion.getShardSize() == -1) { suggestion.setShardSize(Math.max(suggestion.getSize(), 5)); } } public static ShingleTokenFilterFactory.Factory getShingleFilterFactory(Analyzer analyzer) { if (analyzer instanceof NamedAnalyzer) { analyzer = ((NamedAnalyzer)analyzer).analyzer(); } if (analyzer instanceof CustomAnalyzer) { final CustomAnalyzer a = (CustomAnalyzer) analyzer; final TokenFilterFactory[] tokenFilters = a.tokenFilters(); for (TokenFilterFactory tokenFilterFactory : tokenFilters) { if (tokenFilterFactory instanceof ShingleTokenFilterFactory) { return ((ShingleTokenFilterFactory)tokenFilterFactory).getInnerFactory(); } else if (tokenFilterFactory instanceof ShingleTokenFilterFactory.Factory) { return (ShingleTokenFilterFactory.Factory) tokenFilterFactory; } } } return null; } }
/* Copyright 2013 Semantic Discovery, Inc. (www.semanticdiscovery.com) This file is part of the Semantic Discovery Toolkit. The Semantic Discovery Toolkit is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. The Semantic Discovery Toolkit is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with The Semantic Discovery Toolkit. If not, see <http://www.gnu.org/licenses/>. */ package org.sd.atn; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.sd.util.tree.NodePath; import org.sd.util.tree.Tree; import org.sd.xml.DomElement; import org.sd.xml.XmlLite; /** * Container for a single parse as accessed through a GenericParseHelper. * <p> * @author Spence Koehler */ public class GenericParse { private AtnParse atnParse; private ParseInterpretation interp; private GenericParseHelper genericParseHelper; private Map<String, String> id2text; private Map<String, String> xpath2text; private String _ruleId; private String _parsedText; private String _inputText; private String _remainingText; GenericParse(ParseInterpretation interp, GenericParseHelper genericParseHelper) { this.atnParse = interp.getSourceParse(); this.interp = interp; this.genericParseHelper = genericParseHelper; this.id2text = null; this.xpath2text = null; this._ruleId = null; this._parsedText = null; this._inputText = null; this._remainingText = null; } public AtnParse getAtnParse() { return atnParse; } public ParseInterpretation getInterp() { return interp; } public String getRuleId() { if (_ruleId == null) { _ruleId = atnParse.getStartRule().getRuleId(); if (_ruleId == null) _ruleId = ""; } return _ruleId; } public String getParsedText() { if (_parsedText == null) { _parsedText = atnParse.getParsedText(); } return _parsedText; } public String getRemainingText() { if (_remainingText == null) { _remainingText = atnParse.getRemainingText(); } return _remainingText; } public boolean hasRemainingText() { final String remainingText = getRemainingText(); return remainingText != null && !"".equals(remainingText); } /** @return [start, end) of parsed text. */ public int[] getParsedTextPosition() { return new int[] { atnParse.getStartIndex(), atnParse.getEndIndex(), }; } /** * Get the full input text submitted for parsing. */ public String getInputText() { return atnParse.getFullText(); } /** * Find the parse tree nodes indicated by the identifier. * <p> * Note that the identifier is turned into a nodePath with "**." * automatically prepended. * * @return the found nodes or null if no such nodes exist. */ public List<Tree<String>> findParseNodes(String constituentIdentifier) { final NodePath<String> nodePath = genericParseHelper.getNodePath(constituentIdentifier); final List<Tree<String>> nodes = nodePath.apply(atnParse.getParseTree()); return nodes; } /** * Get the parsed text indicated by the identifier. * <p> * Note that the identifier is turned into a nodePath with "**." * automatically prepended. * <p> * Text from all identified nodes is concatenated with a space between. * <p> * Text from all identified nodes is concatenated with delimiters * included between consecutive nodes. Non-white delimiters immediately * following the last node are added iff any non-white delimiter * was present between consecutive nodes. * <p> * If no text is selected, the result will be the empty string. */ public String getParsedText(String constituentIdentifier) { String result = null; if (id2text != null) { result = id2text.get(constituentIdentifier); } if (result == null) { result = buildParsedText(constituentIdentifier); if (id2text == null) id2text = new HashMap<String, String>(); id2text.put(constituentIdentifier, result); } return result; } private final String buildParsedText(String constituentIdentifier) { final StringBuilder result = new StringBuilder(); final List<Tree<String>> nodes = findParseNodes(constituentIdentifier); if (nodes != null && nodes.size() > 0) { for (Tree<String> node : nodes) { if (result.length() > 0) result.append(' '); result.append(node.getLeafText()); } } return result.toString(); } /** * Find the interp tree nodes indicated by the xpath. * <p> * Note that the xpath is applied to the xml interpretation tree. * * @return the found nodes or null if no such nodes exist. */ public List<Tree<XmlLite.Data>> findInterpNodes(String xpath) { final Tree<XmlLite.Data> interpTree = interp.getInterpTree(); final List<Tree<XmlLite.Data>> nodes = genericParseHelper.getNodes(xpath, interpTree); return nodes; } /** * Get the parsed text indicated by the xpath. * <p> * Note that the xpath is applied to the xml interpretation tree. * <p> * Text from all identified nodes is concatenated with delimiters * included between consecutive nodes. Non-white delimiters immediately * following the last node are added iff any non-white delimiter * was present between consecutive nodes. * <p> * If no text is selected, the result will be the empty string. */ public String getInterpText(String xpath) { String result = null; if (xpath2text != null) { result = xpath2text.get(xpath); } if (result == null) { result = buildInterpText(xpath); if (xpath2text == null) xpath2text = new HashMap<String, String>(); xpath2text.put(xpath, result); } return result; } private final String buildInterpText(String xpath) { final StringBuilder result = new StringBuilder(); final List<Tree<XmlLite.Data>> nodes = findInterpNodes(xpath); if (nodes != null && nodes.size() > 0) { for (Tree<XmlLite.Data> node : nodes) { ParseInterpretationUtil.getInterpText(result, node); } } return result.toString(); } /** * Get the first interpretation attribute (named attName) from the given * named node (or from any node if nodeName == null). */ public String getInterpAttribute(String nodeName, String attName) { return ParseInterpretationUtil.getInterpAttribute(interp.getInterpTree(), nodeName, attName); } /** * Get all interpretation attributes (named attName) from the given * named node(s) (or from any node if nodeName == null). */ public List<String> getInterpAttributes(String nodeName, String attName) { return ParseInterpretationUtil.getInterpAttributes(interp.getInterpTree(), nodeName, attName); } /** * Get all interpretation attributes (named attName) from the given * named node(s) (or from any node if nodeName == null) starting with * (only at or below) the interp node selected by the given xpath. */ public List<String> getInterpAttributes(String xpath, String nodeName, String attName) { List<String> result = null; final List<Tree<XmlLite.Data>> interpNodes = findInterpNodes(xpath); if (interpNodes != null) { for (Tree<XmlLite.Data> interpNode : interpNodes) { final List<String> atts = ParseInterpretationUtil.getInterpAttributes(interpNode, nodeName, attName); if (atts != null && atts.size() > 0) { if (result == null) result = new ArrayList<String>(); result.addAll(atts); } } } return result; } public String toString() { final StringBuilder result = new StringBuilder(); result.append('"').append(getParsedText()).append("\" == "); final Tree<String> parseTree = atnParse.getParseTree(); if (parseTree != null) { result.append(parseTree.toString()); } result.append(" [").append(getRuleId()).append(']'); if (interp != null) { final Tree<XmlLite.Data> xmlTree = interp.getInterpTree(); if (xmlTree != null) { final DomElement xml = xmlTree.getData().asDomNode().asDomElement(); if (xml != null) { result.append('\n'); xml.asPrettyString(result, 2, 2); } } } return result.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.runtime.aggregate; import org.apache.flink.api.common.state.ValueState; import org.apache.flink.api.common.state.ValueStateDescriptor; import org.apache.flink.configuration.Configuration; import org.apache.flink.table.dataformat.BaseRow; import org.apache.flink.table.dataformat.JoinedRow; import org.apache.flink.table.dataview.PerKeyStateDataViewStore; import org.apache.flink.table.generated.AggsHandleFunction; import org.apache.flink.table.generated.GeneratedAggsHandleFunction; import org.apache.flink.table.generated.GeneratedRecordEqualiser; import org.apache.flink.table.generated.RecordEqualiser; import org.apache.flink.table.runtime.functions.KeyedProcessFunctionWithCleanupState; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.typeutils.BaseRowTypeInfo; import org.apache.flink.util.Collector; import static org.apache.flink.table.dataformat.util.BaseRowUtil.ACCUMULATE_MSG; import static org.apache.flink.table.dataformat.util.BaseRowUtil.RETRACT_MSG; import static org.apache.flink.table.dataformat.util.BaseRowUtil.isAccumulateMsg; /** * Aggregate Function used for the groupby (without window) aggregate. */ public class GroupAggFunction extends KeyedProcessFunctionWithCleanupState<BaseRow, BaseRow, BaseRow> { private static final long serialVersionUID = -4767158666069797704L; /** * The code generated function used to handle aggregates. */ private final GeneratedAggsHandleFunction genAggsHandler; /** * The code generated equaliser used to equal BaseRow. */ private final GeneratedRecordEqualiser genRecordEqualiser; /** * The accumulator types. */ private final LogicalType[] accTypes; /** * Used to count the number of added and retracted input records. */ private final RecordCounter recordCounter; /** * Whether this operator will generate retraction. */ private final boolean generateRetraction; /** * Reused output row. */ private transient JoinedRow resultRow = null; // function used to handle all aggregates private transient AggsHandleFunction function = null; // function used to equal BaseRow private transient RecordEqualiser equaliser = null; // stores the accumulators private transient ValueState<BaseRow> accState = null; /** * Creates a {@link GroupAggFunction}. * * @param minRetentionTime minimal state idle retention time. * @param maxRetentionTime maximal state idle retention time. * @param genAggsHandler The code generated function used to handle aggregates. * @param genRecordEqualiser The code generated equaliser used to equal BaseRow. * @param accTypes The accumulator types. * @param indexOfCountStar The index of COUNT(*) in the aggregates. * -1 when the input doesn't contain COUNT(*), i.e. doesn't contain retraction messages. * We make sure there is a COUNT(*) if input stream contains retraction. * @param generateRetraction Whether this operator will generate retraction. */ public GroupAggFunction( long minRetentionTime, long maxRetentionTime, GeneratedAggsHandleFunction genAggsHandler, GeneratedRecordEqualiser genRecordEqualiser, LogicalType[] accTypes, int indexOfCountStar, boolean generateRetraction) { super(minRetentionTime, maxRetentionTime); this.genAggsHandler = genAggsHandler; this.genRecordEqualiser = genRecordEqualiser; this.accTypes = accTypes; this.recordCounter = RecordCounter.of(indexOfCountStar); this.generateRetraction = generateRetraction; } @Override public void open(Configuration parameters) throws Exception { super.open(parameters); // instantiate function function = genAggsHandler.newInstance(getRuntimeContext().getUserCodeClassLoader()); function.open(new PerKeyStateDataViewStore(getRuntimeContext())); // instantiate equaliser equaliser = genRecordEqualiser.newInstance(getRuntimeContext().getUserCodeClassLoader()); BaseRowTypeInfo accTypeInfo = new BaseRowTypeInfo(accTypes); ValueStateDescriptor<BaseRow> accDesc = new ValueStateDescriptor<>("accState", accTypeInfo); accState = getRuntimeContext().getState(accDesc); initCleanupTimeState("GroupAggregateCleanupTime"); resultRow = new JoinedRow(); } @Override public void processElement(BaseRow input, Context ctx, Collector<BaseRow> out) throws Exception { long currentTime = ctx.timerService().currentProcessingTime(); // register state-cleanup timer registerProcessingCleanupTimer(ctx, currentTime); BaseRow currentKey = ctx.getCurrentKey(); boolean firstRow; BaseRow accumulators = accState.value(); if (null == accumulators) { firstRow = true; accumulators = function.createAccumulators(); } else { firstRow = false; } // set accumulators to handler first function.setAccumulators(accumulators); // get previous aggregate result BaseRow prevAggValue = function.getValue(); // update aggregate result and set to the newRow if (isAccumulateMsg(input)) { // accumulate input function.accumulate(input); } else { // retract input function.retract(input); } // get current aggregate result BaseRow newAggValue = function.getValue(); // get accumulator accumulators = function.getAccumulators(); if (!recordCounter.recordCountIsZero(accumulators)) { // we aggregated at least one record for this key // update the state accState.update(accumulators); // if this was not the first row and we have to emit retractions if (!firstRow) { if (!stateCleaningEnabled && equaliser.equalsWithoutHeader(prevAggValue, newAggValue)) { // newRow is the same as before and state cleaning is not enabled. // We do not emit retraction and acc message. // If state cleaning is enabled, we have to emit messages to prevent too early // state eviction of downstream operators. return; } else { // retract previous result if (generateRetraction) { // prepare retraction message for previous row resultRow.replace(currentKey, prevAggValue).setHeader(RETRACT_MSG); out.collect(resultRow); } } } // emit the new result resultRow.replace(currentKey, newAggValue).setHeader(ACCUMULATE_MSG); out.collect(resultRow); } else { // we retracted the last record for this key // sent out a delete message if (!firstRow) { // prepare delete message for previous row resultRow.replace(currentKey, prevAggValue).setHeader(RETRACT_MSG); out.collect(resultRow); } // and clear all state accState.clear(); // cleanup dataview under current key function.cleanup(); } } @Override public void onTimer(long timestamp, OnTimerContext ctx, Collector<BaseRow> out) throws Exception { if (stateCleaningEnabled) { cleanupState(accState); function.cleanup(); } } @Override public void close() throws Exception { if (function != null) { function.close(); } } }
package meshwork; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.Socket; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.TimerTask; import java.util.UUID; import meshwork.MainActivity.SectionsPagerAdapter; import android.R; import android.app.AlertDialog; import android.app.Notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.app.Service; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.bluetooth.BluetoothServerSocket; import android.bluetooth.BluetoothSocket; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.os.Binder; import android.os.Bundle; import android.os.Handler; import android.os.HandlerThread; import android.os.IBinder; import android.os.Looper; import android.os.Message; import android.view.View; import android.widget.ArrayAdapter; import android.widget.Toast; public class BluetoothService extends Service { BluetoothAdapter mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter(); String stringUUID = "37b8b70f-50d9-4974-bdc6-58da71421d2b"; UUID uuid = UUID.fromString(stringUUID); ArrayList<String> messages = new ArrayList<String>(); boolean connected = false; boolean connectedToMainActivity = false; ConnectedThread connectedThread; HashMap<BluetoothSocket,ConnectedThread> threads = new HashMap<BluetoothSocket,ConnectedThread>(); MessageHandler mHandler = new MessageHandler(); AlertHandler aHandler = new AlertHandler(); private final IBinder mBinder = new LocalBinder(); ArrayAdapter<String> nearbyDevicesAdapter; MainActivity mainActivity; ConversationManager conversationManager; final int MESSAGE_NEW_CONVERSATION = 1; final int MESSAGE_NEW_NAME = 2; final int MESSAGE_NEW_MESSAGE = 3; final int MESSAGE_NEW_IMAGE = 4; final int MESSAGE_DISCONNECT = 5; final byte DATA_TEXT = (byte)1; final byte DATA_NAME = (byte)2; final byte DATA_PROFILE_IMAGE = (byte)3; final byte DATA_DISCONNECT = (byte)4; //Create a BroadcastReceiver for ACTION_FOUND //Alerts the app when a new device is found final BroadcastReceiver mReceiver = new BroadcastReceiver() { public void onReceive(Context context, Intent intent) { String action = intent.getAction(); // When discovery finds a device if (BluetoothDevice.ACTION_FOUND.equals(action)) { // Get the BluetoothDevice object from the Intent BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE); //when the service is connected to main activity, the array adapters are synced //so updating this adapter affects its use in WhoIsNearbyFragment if(connectedToMainActivity) { if((nearbyDevicesAdapter != null) && nearbyDevicesAdapter.getPosition(device.getName()) == -1) { nearbyDevicesAdapter.add(device.getName()); nearbyDevicesAdapter.notifyDataSetChanged(); } connectDevice(device); } } } }; /** * Class for MainActivity to access * @author ryaneshleman * */ public class LocalBinder extends Binder { BluetoothService getService() { return BluetoothService.this; } } public final class MessageHandler extends Handler { public void handleMessage(Message msg) { BluetoothSocket socket; switch(msg.what) { case MESSAGE_NEW_CONVERSATION: conversationManager.newConversation((BluetoothSocket)msg.obj); //alert("new conv in service"); return; case MESSAGE_NEW_NAME: String name = msg.getData().getString("name"); conversationManager.setName((BluetoothSocket)msg.obj,name); //alert("new name:" + name); return; case MESSAGE_NEW_MESSAGE: String message = msg.getData().getString("message"); socket = (BluetoothSocket)msg.obj; System.out.println("SERVICE MESSAGE RECEIVED: " + message + socket.toString()); conversationManager.addMessage(socket,message); //alert("New Message from: " + conversationManager.conversations.get(socket).name); newMessageNotification(conversationManager.conversations.get(socket).name); return; case MESSAGE_NEW_IMAGE: Bitmap img = msg.getData().getParcelable("image"); socket = (BluetoothSocket)msg.obj; conversationManager.newConversationImage(socket,img); break; case MESSAGE_DISCONNECT: socket = (BluetoothSocket)msg.obj; conversationManager.endConversation(socket); threads.remove(threads.get(socket)); break; } } } public final class AlertHandler extends Handler { public void handleMessage(Message msg) { alert((String)msg.obj); } } @Override public void onCreate() { // Start up the thread running the service. Note that we create a // separate thread because the service normally runs in the process's // main thread, which we don't want to block. We also make it // background priority so CPU-intensive work will not disrupt our UI. HandlerThread thread = new HandlerThread("ServiceStartArguments"); thread.start(); AcceptThread acceptThread = new AcceptThread(); acceptThread.start(); //register the broadcast receiver that is notified when new device is present IntentFilter filter = new IntentFilter(BluetoothDevice.ACTION_FOUND); this.registerReceiver(mReceiver, filter); System.out.println("SERVICE CREATED"); } @Override public int onStartCommand(Intent intent, int flags, int startId) { // If we get killed, after returning from here, restart System.out.println("SERVICE STARTED"); return START_STICKY; } public void alert(String str) { Toast.makeText(this, str, Toast.LENGTH_SHORT).show(); } @Override public void onDestroy() { super.onDestroy(); Toast.makeText(this, "service done", Toast.LENGTH_SHORT).show(); disconnectFromRemoteDevices(); unregisterReceiver(mReceiver); } private void disconnectFromRemoteDevices() { for(BluetoothSocket s : threads.keySet()) threads.get(s).write(DATA_DISCONNECT, new byte[0]); } @Override public IBinder onBind(Intent intent) { return mBinder; } public void refreshDiscovery() { if(!mBluetoothAdapter.startDiscovery()) { //Handle this error } alert("starting discovery"); } public void connectDevice(BluetoothDevice device) { Thread thr = new ConnectThread(device); thr.start(); } private class AcceptThread extends Thread { private BluetoothServerSocket mmServerSocket; public AcceptThread() { init(); } private void init() { // Use a temporary object that is later assigned to mmServerSocket, // because mmServerSocket is final BluetoothServerSocket tmp = null; try { // MY_UUID is the app's UUID string, also used by the client code tmp = mBluetoothAdapter.listenUsingRfcommWithServiceRecord("MeshWork", uuid); } catch (IOException e) { } mmServerSocket = tmp; } public void run() { BluetoothSocket socket = null; // Keep listening until exception occurs or a socket is returned while (true) { try { socket = mmServerSocket.accept(); } catch (IOException e) { break; } // If a connection was accepted if (socket != null) { manageConnectedSocket(socket); //mmServerSocket.close(); try { mmServerSocket.close(); sleep(1200); mmServerSocket = mBluetoothAdapter.listenUsingRfcommWithServiceRecord("MeshWork", uuid); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } socket = null; break; } } } /** Will cancel the listening socket, and cause the thread to finish */ public void cancel() { try { mmServerSocket.close(); } catch (IOException e) { } } } /** * THis thread is used to establish connections between threads * @author ryaneshleman * */ private class ConnectThread extends Thread { private final BluetoothSocket mmSocket; private final BluetoothDevice mmDevice; public ConnectThread(BluetoothDevice device) { // Use a temporary object that is later assigned to mmSocket, // because mmSocket is final BluetoothSocket tmp = null; mmDevice = device; // Get a BluetoothSocket to connect with the given BluetoothDevice try { // uuid is the app's UUID string, also used by the server code tmp = device.createRfcommSocketToServiceRecord(uuid); } catch (IOException e) { } mmSocket = tmp; } public void run() { // Cancel discovery because it will slow down the connection mBluetoothAdapter.cancelDiscovery(); try { // Connect the device through the socket. This will block // until it succeeds or throws an exception mmSocket.connect(); } catch (IOException connectException) { // Unable to connect; close the socket and get out try { mmSocket.close(); } catch (IOException closeException) { } return; } // Do work to manage the connection (in a separate thread) manageConnectedSocket(mmSocket); mBluetoothAdapter.startDiscovery(); } /** Will cancel an in-progress connection, and close the socket */ public void cancel() { try { mmSocket.close(); } catch (IOException e) { } } } /** * This method is called when a connection is achieved and a socket established * @param mmSocket2 */ private void manageConnectedSocket(BluetoothSocket mmSocket2) { System.out.println("SOCKET CREATED: " + mmSocket2); if(alreadyConnected(mmSocket2)) return; connectedThread = new ConnectedThread(mmSocket2); connectedThread.start(); threads.put(mmSocket2,connectedThread); connected = true; aHandler.obtainMessage(1, "SocketCreated").sendToTarget(); Message msg = mHandler.obtainMessage(MESSAGE_NEW_CONVERSATION, mmSocket2); msg.obj = mmSocket2; msg.sendToTarget(); } private boolean alreadyConnected(BluetoothSocket socket) { for(BluetoothSocket s : threads.keySet()) if(s.getRemoteDevice().equals(socket.getRemoteDevice())) return true; return false; } /** * This thread tries to read from the socket and when it does * @author ryaneshleman * */ private class ConnectedThread extends Thread { private final BluetoothSocket mmSocket; private final InputStream mmInStream; private final OutputStream mmOutStream; private boolean receivingImage = false; public ConnectedThread(BluetoothSocket socket) { mmSocket = socket; InputStream tmpIn = null; OutputStream tmpOut = null; // Get the input and output streams, using temp objects because // member streams are final try { tmpIn = socket.getInputStream(); tmpOut = socket.getOutputStream(); } catch (IOException e) { } mmInStream = tmpIn; mmOutStream = tmpOut; } public void run() { int BUFF_SIZE = 128; byte[] buffer = new byte[BUFF_SIZE]; // buffer store for the stream int bytes; // bytes returned from read() write(DATA_NAME,conversationManager.name.getBytes()); // Keep listening to the InputStream until an exception occurs while (true) { try { // Read from the InputStream bytes = mmInStream.read(buffer) - 1; if(bytes == -1) continue; byte flag = buffer[0]; buffer = removePrependedByte(buffer); String receivedMessage; Message msg; Bundle data; switch(flag) { // a conversation name was received case DATA_NAME: receivedMessage = new String(buffer); System.out.println("READING BYTES NAME: " + receivedMessage); msg = mHandler.obtainMessage(MESSAGE_NEW_NAME,mmSocket); data = new Bundle(); data.putString("name", receivedMessage); msg.setData(data); msg.sendToTarget(); buffer = new byte[BUFF_SIZE]; sendProfileImage(this); buffer = new byte[128]; break; //text was received case DATA_TEXT: receivedMessage = new String(buffer); System.out.println("READING BYTES TEXT: " + receivedMessage); msg = mHandler.obtainMessage(MESSAGE_NEW_MESSAGE,mmSocket); data = new Bundle(); data.putString("message", receivedMessage); msg.setData(data); msg.arg1 = 1; msg.sendToTarget(); buffer = new byte[128]; break; //a profile image was received case DATA_PROFILE_IMAGE: System.out.println("receiving image"); Bitmap img = receiveImage(mmInStream, buffer); buffer = new byte[BUFF_SIZE]; System.out.println("image received"); msg = mHandler.obtainMessage(MESSAGE_NEW_IMAGE,mmSocket); data = new Bundle(); data.putParcelable("image", img); msg.setData(data); msg.arg1 = 1; msg.sendToTarget(); break; case DATA_DISCONNECT: System.out.println("DISCONNECT RECEIVED"); msg = mHandler.obtainMessage(MESSAGE_DISCONNECT,mmSocket); msg.sendToTarget(); this.cancel(); return; } } catch (IOException e) { break; } } } /** * THis is called by the run() method when it receives a flag indicating that a profile image is being transfered. * it collects the bytes and turns them into a Bitmap to be returned. * @param mmInStream2 * @param buffer * @return */ private Bitmap receiveImage(InputStream mmInStream2, byte[] buffer){ byte[] intBytes = new byte[] {buffer[0],buffer[1],buffer[2],buffer[3]}; int numBytes = (ByteBuffer.wrap(intBytes)).getInt(); System.out.println("numBytes:" + numBytes); byte[] imgBytes = new byte[numBytes]; int count = 0; try { sleep(1000); //for some reason without this, the image received from the galaxy s is scrambled //possibly due to slow data transfer rates? } catch (InterruptedException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } //copy initial buffer data for(int i = 4; i < buffer.length ; i++) { imgBytes[count] = buffer[i]; count++; } buffer = new byte[numBytes]; while(count < numBytes) { try { mmInStream.read(buffer); for(int i = 0; i< buffer.length && count < numBytes; i++) { imgBytes[count] = buffer[i]; count++; } } catch (IOException e) { e.printStackTrace(); } } //return BitmapFactory.decodeByteArray(imgBytes , 0, numBytes); System.out.println("Bytes Expected" + buffer.length + " Bytes Received " + count); return BitmapFactory.decodeByteArray(imgBytes, 0, count); } /** * writes bytes to the stream, prepended with the provided datattype flag * @param type * @param bytes */ public void write(byte type,byte[] bytes) { //mmSocket.isConnected try { mmOutStream.write(prependByte(type,bytes)); System.out.println("WRITING BYTES"); } catch (IOException e) { } } /* Call this from the main activity to shutdown the connection */ public void cancel() { try { mmSocket.close(); } catch (IOException e) { } } } /** * finds the thread that corresponds to the socket and calls write on it * @param messageInBytes * @param socket */ public void writeMessage(byte[] messageInBytes, BluetoothSocket socket) { threads.get(socket).write(DATA_TEXT,messageInBytes); } public void write(String txt) { connectedThread.write(DATA_TEXT,txt.getBytes()); } public byte[] prependByte(byte b1, byte[] b2) { byte[] out = new byte[1 + b2.length]; out[0] = b1; for(int i = 1; i <= b2.length; i++) out[i] = b2[i-1]; return out; } /** * There are probably better ways to handle this * @param b * @return */ public byte[] removePrependedByte(byte[] b) { byte[] out = new byte[b.length -1]; for(int i = 1; i < b.length; i++) out[i-1] = b[i]; return out; } //the first 4 bytes of bitMapBytes is the number of bytes of image data to follow public void sendProfileImage(ConnectedThread thread) { Bitmap bitmap = conversationManager.profileImage; ByteArrayOutputStream stream = new ByteArrayOutputStream(); bitmap.compress(Bitmap.CompressFormat.WEBP, 75, stream); byte[] streamBytes = stream.toByteArray(); byte[] bitMapBytes = new byte[4 + streamBytes.length]; byte[] len = ByteBuffer.allocate(4).putInt(streamBytes.length).array(); System.out.println("sent image bytes:" + streamBytes.length); for(int i = 0; i< 4; i++) bitMapBytes[i] = len[i]; for(int i = 0; i< streamBytes.length; i++) bitMapBytes[i+4] = streamBytes[i]; thread.write(DATA_PROFILE_IMAGE, bitMapBytes); } private void newMessageNotification(String name) { NotificationManager notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); Notification notification = new Notification(R.drawable.bottom_bar, "New Message from " + name, System.currentTimeMillis()); notification.flags |= Notification.FLAG_AUTO_CANCEL; notification.defaults |= Notification.DEFAULT_SOUND; Context context = getApplicationContext(); CharSequence contentTitle = "Meshwork"; CharSequence contentText = "New Message from " + name; Intent notificationIntent = new Intent(this, meshwork.MainActivity.class); notificationIntent.setAction(Intent.ACTION_MAIN); notificationIntent.addCategory(Intent.CATEGORY_LAUNCHER); notificationIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_SINGLE_TOP); /** * The notificationIntent is wrapped by a contentIntent. The latter is * used to create the notification that will be displayed in the status * bar. When the user clicks on the notification, Android will send * notificationIntent that in turn will re-launch the application. */ PendingIntent contentIntent = PendingIntent.getActivity(this, 0, notificationIntent, PendingIntent.FLAG_CANCEL_CURRENT); notification.setLatestEventInfo(context, contentTitle, contentText, contentIntent); notificationManager.notify(1, notification); } }
package hudson.plugins.scm_sync_configuration.util; import hudson.Plugin; import hudson.PluginWrapper; import hudson.model.Hudson; import hudson.model.User; import hudson.plugins.scm_sync_configuration.SCMManagerFactory; import hudson.plugins.scm_sync_configuration.SCMManipulator; import hudson.plugins.scm_sync_configuration.ScmSyncConfigurationBusiness; import hudson.plugins.scm_sync_configuration.ScmSyncConfigurationPlugin; import hudson.plugins.scm_sync_configuration.model.ScmContext; import hudson.plugins.scm_sync_configuration.scms.SCM; import hudson.plugins.scm_sync_configuration.scms.SCMCredentialConfiguration; import hudson.plugins.scm_sync_configuration.scms.ScmSyncSubversionSCM; import hudson.plugins.scm_sync_configuration.xstream.migration.DefaultSSCPOJO; import hudson.plugins.scm_sync_configuration.xstream.migration.ScmSyncConfigurationPOJO; import hudson.plugins.test.utils.DirectoryUtils; import hudson.plugins.test.utils.scms.ScmUnderTest; import org.codehaus.plexus.PlexusContainerException; import org.codehaus.plexus.component.repository.exception.ComponentLookupException; import org.codehaus.plexus.util.FileUtils; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.rules.TestName; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.objenesis.ObjenesisStd; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.springframework.core.io.ClassPathResource; import java.io.File; import java.io.IOException; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.List; import java.util.regex.Pattern; import static org.hamcrest.CoreMatchers.*; import static org.junit.Assert.*; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; @RunWith(PowerMockRunner.class) @PowerMockIgnore({ "org.tmatesoft.svn.*" }) @PrepareForTest({Hudson.class, SCM.class, ScmSyncSubversionSCM.class, PluginWrapper.class}) public abstract class ScmSyncConfigurationBaseTest { @Rule protected TestName testName = new TestName(); private File currentTestDirectory = null; private File curentLocalRepository = null; private File currentHudsonRootDirectory = null; protected ScmSyncConfigurationBusiness sscBusiness = null; protected ScmContext scmContext = null; private ScmUnderTest scmUnderTest; protected ScmSyncConfigurationBaseTest(ScmUnderTest scmUnderTest) { this.scmUnderTest = scmUnderTest; this.scmContext = null; } @Before public void setup() throws Throwable { // Instantiating ScmSyncConfigurationPlugin instance for unit tests by using // synchronous transactions (instead of an asynchronous ones) // => this way, every commit will be processed synchronously ! ScmSyncConfigurationPlugin scmSyncConfigPluginInstance = new ScmSyncConfigurationPlugin(true); // Mocking PluginWrapper attached to current ScmSyncConfigurationPlugin instance PluginWrapper pluginWrapper = PowerMockito.mock(PluginWrapper.class); when(pluginWrapper.getShortName()).thenReturn("scm-sync-configuration"); // Setting field on current plugin instance Field wrapperField = Plugin.class.getDeclaredField("wrapper"); boolean wrapperFieldAccessibility = wrapperField.isAccessible(); wrapperField.setAccessible(true); wrapperField.set(scmSyncConfigPluginInstance, pluginWrapper); wrapperField.setAccessible(wrapperFieldAccessibility); Field businessField = ScmSyncConfigurationPlugin.class.getDeclaredField("business"); businessField.setAccessible(true); sscBusiness = (ScmSyncConfigurationBusiness) businessField.get(scmSyncConfigPluginInstance); // Mocking Hudson root directory currentTestDirectory = createTmpDirectory("SCMSyncConfigTestsRoot"); currentHudsonRootDirectory = new File(currentTestDirectory.getAbsolutePath()+"/hudsonRootDir/"); if(!(currentHudsonRootDirectory.mkdir())) { throw new IOException("Could not create hudson root directory: " + currentHudsonRootDirectory.getAbsolutePath()); } FileUtils.copyDirectoryStructure(new ClassPathResource(getHudsonRootBaseTemplate()).getFile(), currentHudsonRootDirectory); //EnvVars env = Computer.currentComputer().getEnvironment(); //env.put("HUDSON_HOME", tmpHudsonRoot.getPath() ); // Creating local repository... curentLocalRepository = new File(currentTestDirectory.getAbsolutePath()+"/localRepo/"); if(!(curentLocalRepository.mkdir())) { throw new IOException("Could not create local repo directory: " + curentLocalRepository.getAbsolutePath()); } scmUnderTest.initRepo(curentLocalRepository); // Mocking user User mockedUser = Mockito.mock(User.class); when(mockedUser.getId()).thenReturn("fcamblor"); // Mocking Hudson singleton instance ... // Warning : this line will only work on Objenesis supported VMs : // http://code.google.com/p/objenesis/wiki/ListOfCurrentlySupportedVMs Hudson hudsonMockedInstance = spy((Hudson) new ObjenesisStd().getInstantiatorOf(Hudson.class).newInstance()); PowerMockito.doReturn(currentHudsonRootDirectory).when(hudsonMockedInstance).getRootDir(); PowerMockito.doReturn(mockedUser).when(hudsonMockedInstance).getMe(); PowerMockito.doReturn(scmSyncConfigPluginInstance).when(hudsonMockedInstance).getPlugin(ScmSyncConfigurationPlugin.class); PowerMockito.mockStatic(Hudson.class); PowerMockito.doReturn(hudsonMockedInstance).when(Hudson.class); Hudson.getInstance(); //when(Hudson.getInstance()).thenReturn(hudsonMockedInstance); } @After public void teardown() throws Throwable { // Deleting current test directory FileUtils.deleteDirectory(currentTestDirectory); } // Overridable protected String getHudsonRootBaseTemplate(){ return "hudsonRootBaseTemplate/"; } protected static File createTmpDirectory(String directoryPrefix) throws IOException { final File temp = File.createTempFile(directoryPrefix, Long.toString(System.nanoTime())); if(!(temp.delete())) { throw new IOException("Could not delete temp file: " + temp.getAbsolutePath()); } if(!(temp.mkdir())) { throw new IOException("Could not create temp directory: " + temp.getAbsolutePath()); } return (temp); } protected SCM createSCMMock(){ return createSCMMock(getSCMRepositoryURL()); } protected SCM createSCMMock(String url){ SCM mockedSCM = spy(SCM.valueOf(getSCMClass().getName())); if(scmUnderTest.useCredentials()){ SCMCredentialConfiguration mockedCredential = new SCMCredentialConfiguration("toto"); PowerMockito.doReturn(mockedCredential).when(mockedSCM).extractScmCredentials((String)Mockito.notNull()); } scmContext = new ScmContext(mockedSCM, url); ScmSyncConfigurationPOJO config = new DefaultSSCPOJO(); config.setScm(scmContext.getScm()); config.setScmRepositoryUrl(scmContext.getScmRepositoryUrl()); ScmSyncConfigurationPlugin.getInstance().loadData(config); ScmSyncConfigurationPlugin.getInstance().init(); return mockedSCM; } protected SCMManipulator createMockedScmManipulator() throws ComponentLookupException, PlexusContainerException{ // Settling up scm context SCMManipulator scmManipulator = new SCMManipulator(SCMManagerFactory.getInstance().createScmManager()); boolean configSettledUp = scmManipulator.scmConfigurationSettledUp(scmContext, true); assertThat(configSettledUp, is(true)); return scmManipulator; } protected void verifyCurrentScmContentMatchesCurrentHudsonDir(boolean match) throws ComponentLookupException, PlexusContainerException, IOException{ verifyCurrentScmContentMatchesHierarchy(getCurrentHudsonRootDirectory(), match); } protected void verifyCurrentScmContentMatchesHierarchy(String hierarchyPath, boolean match) throws ComponentLookupException, PlexusContainerException, IOException{ verifyCurrentScmContentMatchesHierarchy(new ClassPathResource(hierarchyPath).getFile(), match); } protected void verifyCurrentScmContentMatchesHierarchy(File hierarchy, boolean match) throws ComponentLookupException, PlexusContainerException, IOException{ SCMManipulator scmManipulator = createMockedScmManipulator(); // Checkouting scm in temp directory File checkoutDirectoryForVerifications = createTmpDirectory(this.getClass().getSimpleName()+"_"+testName.getMethodName()+"__verifyCurrentScmContentMatchesHierarchy"); scmManipulator.checkout(checkoutDirectoryForVerifications); List<String> diffs = DirectoryUtils.diffDirectories(checkoutDirectoryForVerifications, hierarchy, getSpecialSCMDirectoryExcludePattern(), true); FileUtils.deleteDirectory(checkoutDirectoryForVerifications); if(match){ assertTrue("Directories doesn't match : "+diffs, diffs.isEmpty()); } else { assertFalse("Directories should _not_ match !", diffs.isEmpty()); } } protected void verifyCurrentScmContentMatchesHierarchy(String hierarchyPath) throws ComponentLookupException, PlexusContainerException, IOException{ verifyCurrentScmContentMatchesHierarchy(hierarchyPath, true); } // Overridable in a near future (when dealing with multiple scms ...) protected String getSCMRepositoryURL(){ return scmUnderTest.createUrl(this.getCurentLocalRepository().getAbsolutePath()); } protected static List<Pattern> getSpecialSCMDirectoryExcludePattern(){ return new ArrayList<Pattern>(){{ add(Pattern.compile("\\.svn")); add(Pattern.compile("\\.git.*")); add(Pattern.compile("scm-sync-configuration\\..*\\.log")); add(Pattern.compile("scm-sync-configuration")); }}; } protected String getSuffixForTestFiles() { return scmUnderTest.getSuffixForTestFiles(); } // Overridable in a near future (when dealing with multiple scms ...) protected Class<? extends SCM> getSCMClass(){ return scmUnderTest.getClazz(); } protected File getCurrentTestDirectory() { return currentTestDirectory; } protected File getCurentLocalRepository() { return curentLocalRepository; } public File getCurrentHudsonRootDirectory() { return currentHudsonRootDirectory; } public File getCurrentScmSyncConfigurationCheckoutDirectory(){ return new File(currentHudsonRootDirectory.getAbsolutePath()+"/scm-sync-configuration/checkoutConfiguration/"); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.sftp; import java.io.FileNotFoundException; import java.io.IOException; import java.io.OutputStream; import java.net.URI; import java.net.URLDecoder; import java.util.ArrayList; import java.util.Vector; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.classification.VisibleForTesting; import com.jcraft.jsch.ChannelSftp; import com.jcraft.jsch.ChannelSftp.LsEntry; import com.jcraft.jsch.SftpATTRS; import com.jcraft.jsch.SftpException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** SFTP FileSystem. */ public class SFTPFileSystem extends FileSystem { public static final Logger LOG = LoggerFactory.getLogger(SFTPFileSystem.class); private SFTPConnectionPool connectionPool; private URI uri; private final AtomicBoolean closed = new AtomicBoolean(false); private static final int DEFAULT_SFTP_PORT = 22; private static final int DEFAULT_MAX_CONNECTION = 5; public static final int DEFAULT_BUFFER_SIZE = 1024 * 1024; public static final int DEFAULT_BLOCK_SIZE = 4 * 1024; public static final String FS_SFTP_USER_PREFIX = "fs.sftp.user."; public static final String FS_SFTP_PASSWORD_PREFIX = "fs.sftp.password."; public static final String FS_SFTP_HOST = "fs.sftp.host"; public static final String FS_SFTP_HOST_PORT = "fs.sftp.host.port"; public static final String FS_SFTP_KEYFILE = "fs.sftp.keyfile"; public static final String FS_SFTP_CONNECTION_MAX = "fs.sftp.connection.max"; public static final String E_SAME_DIRECTORY_ONLY = "only same directory renames are supported"; public static final String E_HOST_NULL = "Invalid host specified"; public static final String E_USER_NULL = "No user specified for sftp connection. Expand URI or credential file."; public static final String E_PATH_DIR = "Path %s is a directory."; public static final String E_FILE_STATUS = "Failed to get file status"; public static final String E_FILE_NOTFOUND = "File %s does not exist."; public static final String E_FILE_EXIST = "File already exists: %s"; public static final String E_CREATE_DIR = "create(): Mkdirs failed to create: %s"; public static final String E_DIR_CREATE_FROMFILE = "Can't make directory for path %s since it is a file."; public static final String E_MAKE_DIR_FORPATH = "Can't make directory for path \"%s\" under \"%s\"."; public static final String E_DIR_NOTEMPTY = "Directory: %s is not empty."; public static final String E_FILE_CHECK_FAILED = "File check failed"; public static final String E_SPATH_NOTEXIST = "Source path %s does not exist"; public static final String E_DPATH_EXIST = "Destination path %s already exist, cannot rename!"; public static final String E_FAILED_GETHOME = "Failed to get home directory"; public static final String E_FAILED_DISCONNECT = "Failed to disconnect"; public static final String E_FS_CLOSED = "FileSystem is closed!"; /** * Set configuration from UI. * * @param uri * @param conf * @throws IOException */ private void setConfigurationFromURI(URI uriInfo, Configuration conf) throws IOException { // get host information from URI String host = uriInfo.getHost(); host = (host == null) ? conf.get(FS_SFTP_HOST, null) : host; if (host == null) { throw new IOException(E_HOST_NULL); } conf.set(FS_SFTP_HOST, host); int port = uriInfo.getPort(); port = (port == -1) ? conf.getInt(FS_SFTP_HOST_PORT, DEFAULT_SFTP_PORT) : port; conf.setInt(FS_SFTP_HOST_PORT, port); // get user/password information from URI String userAndPwdFromUri = uriInfo.getUserInfo(); if (userAndPwdFromUri != null) { String[] userPasswdInfo = userAndPwdFromUri.split(":"); String user = userPasswdInfo[0]; user = URLDecoder.decode(user, "UTF-8"); conf.set(FS_SFTP_USER_PREFIX + host, user); if (userPasswdInfo.length > 1) { conf.set(FS_SFTP_PASSWORD_PREFIX + host + "." + user, userPasswdInfo[1]); } } String user = conf.get(FS_SFTP_USER_PREFIX + host); if (user == null || user.equals("")) { throw new IllegalStateException(E_USER_NULL); } int connectionMax = conf.getInt(FS_SFTP_CONNECTION_MAX, DEFAULT_MAX_CONNECTION); connectionPool = new SFTPConnectionPool(connectionMax); } /** * Connecting by using configuration parameters. * * @return An FTPClient instance * @throws IOException */ private ChannelSftp connect() throws IOException { checkNotClosed(); Configuration conf = getConf(); String host = conf.get(FS_SFTP_HOST, null); int port = conf.getInt(FS_SFTP_HOST_PORT, DEFAULT_SFTP_PORT); String user = conf.get(FS_SFTP_USER_PREFIX + host, null); String pwd = conf.get(FS_SFTP_PASSWORD_PREFIX + host + "." + user, null); String keyFile = conf.get(FS_SFTP_KEYFILE, null); ChannelSftp channel = connectionPool.connect(host, port, user, pwd, keyFile); return channel; } /** * Logout and disconnect the given channel. * * @param client * @throws IOException */ private void disconnect(ChannelSftp channel) throws IOException { connectionPool.disconnect(channel); } /** * Resolve against given working directory. * * @param workDir * @param path * @return absolute path */ private Path makeAbsolute(Path workDir, Path path) { if (path.isAbsolute()) { return path; } return new Path(workDir, path); } /** * Convenience method, so that we don't open a new connection when using this * method from within another method. Otherwise every API invocation incurs * the overhead of opening/closing a TCP connection. * @throws IOException */ private boolean exists(ChannelSftp channel, Path file) throws IOException { try { getFileStatus(channel, file); return true; } catch (FileNotFoundException fnfe) { return false; } catch (IOException ioe) { throw new IOException(E_FILE_STATUS, ioe); } } /** * Convenience method, so that we don't open a new connection when using this * method from within another method. Otherwise every API invocation incurs * the overhead of opening/closing a TCP connection. */ @SuppressWarnings("unchecked") private FileStatus getFileStatus(ChannelSftp client, Path file) throws IOException { FileStatus fileStat = null; Path workDir; try { workDir = new Path(client.pwd()); } catch (SftpException e) { throw new IOException(e); } Path absolute = makeAbsolute(workDir, file); Path parentPath = absolute.getParent(); if (parentPath == null) { // root directory long length = -1; // Length of root directory on server not known boolean isDir = true; int blockReplication = 1; long blockSize = DEFAULT_BLOCK_SIZE; // Block Size not known. long modTime = -1; // Modification time of root directory not known. Path root = new Path("/"); return new FileStatus(length, isDir, blockReplication, blockSize, modTime, root.makeQualified(this.getUri(), this.getWorkingDirectory(client))); } String pathName = parentPath.toUri().getPath(); Vector<LsEntry> sftpFiles; try { sftpFiles = (Vector<LsEntry>) client.ls(pathName); } catch (SftpException e) { throw new FileNotFoundException(String.format(E_FILE_NOTFOUND, file)); } if (sftpFiles != null) { for (LsEntry sftpFile : sftpFiles) { if (sftpFile.getFilename().equals(file.getName())) { // file found in directory fileStat = getFileStatus(client, sftpFile, parentPath); break; } } if (fileStat == null) { throw new FileNotFoundException(String.format(E_FILE_NOTFOUND, file)); } } else { throw new FileNotFoundException(String.format(E_FILE_NOTFOUND, file)); } return fileStat; } /** * Convert the file information in LsEntry to a {@link FileStatus} object. * * * @param sftpFile * @param parentPath * @return file status * @throws IOException */ private FileStatus getFileStatus(ChannelSftp channel, LsEntry sftpFile, Path parentPath) throws IOException { SftpATTRS attr = sftpFile.getAttrs(); long length = attr.getSize(); boolean isDir = attr.isDir(); boolean isLink = attr.isLink(); if (isLink) { String link = parentPath.toUri().getPath() + "/" + sftpFile.getFilename(); try { link = channel.realpath(link); Path linkParent = new Path("/", link); FileStatus fstat = getFileStatus(channel, linkParent); isDir = fstat.isDirectory(); length = fstat.getLen(); } catch (Exception e) { throw new IOException(e); } } int blockReplication = 1; // Using default block size since there is no way in SFTP channel to know of // block sizes on server. The assumption could be less than ideal. long blockSize = DEFAULT_BLOCK_SIZE; long modTime = attr.getMTime() * 1000L; // convert to milliseconds long accessTime = attr.getATime() * 1000L; FsPermission permission = getPermissions(sftpFile); // not be able to get the real user group name, just use the user and group // id String user = Integer.toString(attr.getUId()); String group = Integer.toString(attr.getGId()); Path filePath = new Path(parentPath, sftpFile.getFilename()); return new FileStatus(length, isDir, blockReplication, blockSize, modTime, accessTime, permission, user, group, filePath.makeQualified( this.getUri(), this.getWorkingDirectory(channel))); } /** * Return file permission. * * @param sftpFile * @return file permission */ private FsPermission getPermissions(LsEntry sftpFile) { return new FsPermission((short) sftpFile.getAttrs().getPermissions()); } /** * Convenience method, so that we don't open a new connection when using this * method from within another method. Otherwise every API invocation incurs * the overhead of opening/closing a TCP connection. */ private boolean mkdirs(ChannelSftp client, Path file, FsPermission permission) throws IOException { boolean created = true; Path workDir; try { workDir = new Path(client.pwd()); } catch (SftpException e) { throw new IOException(e); } Path absolute = makeAbsolute(workDir, file); String pathName = absolute.getName(); if (!exists(client, absolute)) { Path parent = absolute.getParent(); created = (parent == null || mkdirs(client, parent, FsPermission.getDefault())); if (created) { String parentDir = parent.toUri().getPath(); boolean succeeded = true; try { final String previousCwd = client.pwd(); client.cd(parentDir); client.mkdir(pathName); client.cd(previousCwd); } catch (SftpException e) { throw new IOException(String.format(E_MAKE_DIR_FORPATH, pathName, parentDir)); } created = created & succeeded; } } else if (isFile(client, absolute)) { throw new IOException(String.format(E_DIR_CREATE_FROMFILE, absolute)); } return created; } /** * Convenience method, so that we don't open a new connection when using this * method from within another method. Otherwise every API invocation incurs * the overhead of opening/closing a TCP connection. * @throws IOException */ private boolean isFile(ChannelSftp channel, Path file) throws IOException { try { return !getFileStatus(channel, file).isDirectory(); } catch (FileNotFoundException e) { return false; // file does not exist } catch (IOException ioe) { throw new IOException(E_FILE_CHECK_FAILED, ioe); } } /** * Convenience method, so that we don't open a new connection when using this * method from within another method. Otherwise every API invocation incurs * the overhead of opening/closing a TCP connection. */ private boolean delete(ChannelSftp channel, Path file, boolean recursive) throws IOException { Path workDir; try { workDir = new Path(channel.pwd()); } catch (SftpException e) { throw new IOException(e); } Path absolute = makeAbsolute(workDir, file); String pathName = absolute.toUri().getPath(); FileStatus fileStat = null; try { fileStat = getFileStatus(channel, absolute); } catch (FileNotFoundException e) { // file not found, no need to delete, return true return false; } if (!fileStat.isDirectory()) { boolean status = true; try { channel.rm(pathName); } catch (SftpException e) { status = false; } return status; } else { boolean status = true; FileStatus[] dirEntries = listStatus(channel, absolute); if (dirEntries != null && dirEntries.length > 0) { if (!recursive) { throw new IOException(String.format(E_DIR_NOTEMPTY, file)); } for (int i = 0; i < dirEntries.length; ++i) { delete(channel, new Path(absolute, dirEntries[i].getPath()), recursive); } } try { channel.rmdir(pathName); } catch (SftpException e) { status = false; } return status; } } /** * Convenience method, so that we don't open a new connection when using this * method from within another method. Otherwise every API invocation incurs * the overhead of opening/closing a TCP connection. */ @SuppressWarnings("unchecked") private FileStatus[] listStatus(ChannelSftp client, Path file) throws IOException { Path workDir; try { workDir = new Path(client.pwd()); } catch (SftpException e) { throw new IOException(e); } Path absolute = makeAbsolute(workDir, file); FileStatus fileStat = getFileStatus(client, absolute); if (!fileStat.isDirectory()) { return new FileStatus[] {fileStat}; } Vector<LsEntry> sftpFiles; try { sftpFiles = (Vector<LsEntry>) client.ls(absolute.toUri().getPath()); } catch (SftpException e) { throw new IOException(e); } ArrayList<FileStatus> fileStats = new ArrayList<FileStatus>(); for (int i = 0; i < sftpFiles.size(); i++) { LsEntry entry = sftpFiles.get(i); String fname = entry.getFilename(); // skip current and parent directory, ie. "." and ".." if (!".".equalsIgnoreCase(fname) && !"..".equalsIgnoreCase(fname)) { fileStats.add(getFileStatus(client, entry, absolute)); } } return fileStats.toArray(new FileStatus[fileStats.size()]); } /** * Convenience method, so that we don't open a new connection when using this * method from within another method. Otherwise every API invocation incurs * the overhead of opening/closing a TCP connection. * * @param channel * @param src * @param dst * @return rename successful? * @throws IOException */ private boolean rename(ChannelSftp channel, Path src, Path dst) throws IOException { Path workDir; try { workDir = new Path(channel.pwd()); } catch (SftpException e) { throw new IOException(e); } Path absoluteSrc = makeAbsolute(workDir, src); Path absoluteDst = makeAbsolute(workDir, dst); if (!exists(channel, absoluteSrc)) { throw new IOException(String.format(E_SPATH_NOTEXIST, src)); } if (exists(channel, absoluteDst)) { throw new IOException(String.format(E_DPATH_EXIST, dst)); } boolean renamed = true; try { final String previousCwd = channel.pwd(); channel.cd("/"); channel.rename(src.toUri().getPath(), dst.toUri().getPath()); channel.cd(previousCwd); } catch (SftpException e) { renamed = false; } return renamed; } @Override public void initialize(URI uriInfo, Configuration conf) throws IOException { super.initialize(uriInfo, conf); setConfigurationFromURI(uriInfo, conf); setConf(conf); this.uri = uriInfo; } @Override public URI getUri() { return uri; } @Override public FSDataInputStream open(Path f, int bufferSize) throws IOException { ChannelSftp channel = connect(); Path workDir; try { workDir = new Path(channel.pwd()); } catch (SftpException e) { throw new IOException(e); } Path absolute = makeAbsolute(workDir, f); FileStatus fileStat = getFileStatus(channel, absolute); if (fileStat.isDirectory()) { disconnect(channel); throw new IOException(String.format(E_PATH_DIR, f)); } try { // the path could be a symbolic link, so get the real path absolute = new Path("/", channel.realpath(absolute.toUri().getPath())); } catch (SftpException e) { throw new IOException(e); } return new FSDataInputStream( new SFTPInputStream(channel, absolute, statistics)){ @Override public void close() throws IOException { try { super.close(); } finally { disconnect(channel); } } }; } /** * A stream obtained via this call must be closed before using other APIs of * this class or else the invocation will block. */ @Override public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize, short replication, long blockSize, Progressable progress) throws IOException { final ChannelSftp client = connect(); Path workDir; try { workDir = new Path(client.pwd()); } catch (SftpException e) { throw new IOException(e); } Path absolute = makeAbsolute(workDir, f); if (exists(client, f)) { if (overwrite) { delete(client, f, false); } else { disconnect(client); throw new IOException(String.format(E_FILE_EXIST, f)); } } Path parent = absolute.getParent(); if (parent == null || !mkdirs(client, parent, FsPermission.getDefault())) { parent = (parent == null) ? new Path("/") : parent; disconnect(client); throw new IOException(String.format(E_CREATE_DIR, parent)); } OutputStream os; try { final String previousCwd = client.pwd(); client.cd(parent.toUri().getPath()); os = client.put(f.getName()); client.cd(previousCwd); } catch (SftpException e) { throw new IOException(e); } FSDataOutputStream fos = new FSDataOutputStream(os, statistics) { @Override public void close() throws IOException { super.close(); disconnect(client); } }; return fos; } @Override public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException { throw new UnsupportedOperationException("Append is not supported " + "by SFTPFileSystem"); } /* * The parent of source and destination can be different. It is suppose to * work like 'move' */ @Override public boolean rename(Path src, Path dst) throws IOException { ChannelSftp channel = connect(); try { boolean success = rename(channel, src, dst); return success; } finally { disconnect(channel); } } @Override public boolean delete(Path f, boolean recursive) throws IOException { ChannelSftp channel = connect(); try { boolean success = delete(channel, f, recursive); return success; } finally { disconnect(channel); } } @Override public FileStatus[] listStatus(Path f) throws IOException { ChannelSftp client = connect(); try { FileStatus[] stats = listStatus(client, f); return stats; } finally { disconnect(client); } } @Override public void setWorkingDirectory(Path newDir) { // we do not maintain the working directory state } @Override public Path getWorkingDirectory() { // Return home directory always since we do not maintain state. return getHomeDirectory(); } /** * Convenience method, so that we don't open a new connection when using this * method from within another method. Otherwise every API invocation incurs * the overhead of opening/closing a TCP connection. */ private Path getWorkingDirectory(ChannelSftp client) { // Return home directory always since we do not maintain state. return getHomeDirectory(client); } @Override public Path getHomeDirectory() { ChannelSftp channel = null; try { channel = connect(); Path homeDir = new Path(channel.pwd()); return homeDir; } catch (Exception ioe) { return null; } finally { try { disconnect(channel); } catch (IOException ioe) { return null; } } } /** * Convenience method, so that we don't open a new connection when using this * method from within another method. Otherwise every API invocation incurs * the overhead of opening/closing a TCP connection. */ private Path getHomeDirectory(ChannelSftp channel) { try { return new Path(channel.pwd()); } catch (Exception ioe) { return null; } } @Override public boolean mkdirs(Path f, FsPermission permission) throws IOException { ChannelSftp client = connect(); try { boolean success = mkdirs(client, f, permission); return success; } finally { disconnect(client); } } @Override public FileStatus getFileStatus(Path f) throws IOException { ChannelSftp channel = connect(); try { FileStatus status = getFileStatus(channel, f); return status; } finally { disconnect(channel); } } @Override public void close() throws IOException { if (closed.getAndSet(true)) { return; } try { super.close(); } finally { if (connectionPool != null) { connectionPool.shutdown(); } } } /** * Verify that the input stream is open. Non blocking; this gives * the last state of the volatile {@link #closed} field. * @throws IOException if the connection is closed. */ private void checkNotClosed() throws IOException { if (closed.get()) { throw new IOException(uri + ": " + E_FS_CLOSED); } } @VisibleForTesting SFTPConnectionPool getConnectionPool() { return connectionPool; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.connectwisdom.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/wisdom-2020-10-19/ListAssistantAssociations" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListAssistantAssociationsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The identifier of the Wisdom assistant. Can be either the ID or the ARN. URLs cannot contain the ARN. * </p> */ private String assistantId; /** * <p> * The maximum number of results to return per page. * </p> */ private Integer maxResults; /** * <p> * The token for the next set of results. Use the value returned in the previous response in the next request to * retrieve the next set of results. * </p> */ private String nextToken; /** * <p> * The identifier of the Wisdom assistant. Can be either the ID or the ARN. URLs cannot contain the ARN. * </p> * * @param assistantId * The identifier of the Wisdom assistant. Can be either the ID or the ARN. URLs cannot contain the ARN. */ public void setAssistantId(String assistantId) { this.assistantId = assistantId; } /** * <p> * The identifier of the Wisdom assistant. Can be either the ID or the ARN. URLs cannot contain the ARN. * </p> * * @return The identifier of the Wisdom assistant. Can be either the ID or the ARN. URLs cannot contain the ARN. */ public String getAssistantId() { return this.assistantId; } /** * <p> * The identifier of the Wisdom assistant. Can be either the ID or the ARN. URLs cannot contain the ARN. * </p> * * @param assistantId * The identifier of the Wisdom assistant. Can be either the ID or the ARN. URLs cannot contain the ARN. * @return Returns a reference to this object so that method calls can be chained together. */ public ListAssistantAssociationsRequest withAssistantId(String assistantId) { setAssistantId(assistantId); return this; } /** * <p> * The maximum number of results to return per page. * </p> * * @param maxResults * The maximum number of results to return per page. */ public void setMaxResults(Integer maxResults) { this.maxResults = maxResults; } /** * <p> * The maximum number of results to return per page. * </p> * * @return The maximum number of results to return per page. */ public Integer getMaxResults() { return this.maxResults; } /** * <p> * The maximum number of results to return per page. * </p> * * @param maxResults * The maximum number of results to return per page. * @return Returns a reference to this object so that method calls can be chained together. */ public ListAssistantAssociationsRequest withMaxResults(Integer maxResults) { setMaxResults(maxResults); return this; } /** * <p> * The token for the next set of results. Use the value returned in the previous response in the next request to * retrieve the next set of results. * </p> * * @param nextToken * The token for the next set of results. Use the value returned in the previous response in the next request * to retrieve the next set of results. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * The token for the next set of results. Use the value returned in the previous response in the next request to * retrieve the next set of results. * </p> * * @return The token for the next set of results. Use the value returned in the previous response in the next * request to retrieve the next set of results. */ public String getNextToken() { return this.nextToken; } /** * <p> * The token for the next set of results. Use the value returned in the previous response in the next request to * retrieve the next set of results. * </p> * * @param nextToken * The token for the next set of results. Use the value returned in the previous response in the next request * to retrieve the next set of results. * @return Returns a reference to this object so that method calls can be chained together. */ public ListAssistantAssociationsRequest withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAssistantId() != null) sb.append("AssistantId: ").append(getAssistantId()).append(","); if (getMaxResults() != null) sb.append("MaxResults: ").append(getMaxResults()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListAssistantAssociationsRequest == false) return false; ListAssistantAssociationsRequest other = (ListAssistantAssociationsRequest) obj; if (other.getAssistantId() == null ^ this.getAssistantId() == null) return false; if (other.getAssistantId() != null && other.getAssistantId().equals(this.getAssistantId()) == false) return false; if (other.getMaxResults() == null ^ this.getMaxResults() == null) return false; if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAssistantId() == null) ? 0 : getAssistantId().hashCode()); hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListAssistantAssociationsRequest clone() { return (ListAssistantAssociationsRequest) super.clone(); } }
/* * Copyright 2009 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.client; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.apache.hadoop.io.Writable; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.util.Bytes; /** * Used to perform Delete operations on a single row. * <p> * To delete an entire row, instantiate a Delete object with the row * to delete. To further define the scope of what to delete, perform * additional methods as outlined below. * <p> * To delete specific families, execute {@link #deleteFamily(byte []) deleteFamily} * for each family to delete. * <p> * To delete multiple versions of specific columns, execute * {@link #deleteColumns(byte [],byte []) deleteColumns} * for each column to delete. * <p> * To delete specific versions of specific columns, execute * {@link #deleteColumn(byte [],byte [],long) deleteColumn} * for each column version to delete. * <p> * Specifying timestamps calling constructor, deleteFamily, and deleteColumns * will delete all versions with a timestamp less than or equal to that * specified. Specifying a timestamp to deleteColumn will delete versions * only with a timestamp equal to that specified. * <p>The timestamp passed to the constructor is only used ONLY for delete of * rows. For anything less -- a deleteColumn, deleteColumns or * deleteFamily -- then you need to use the method overrides that take a * timestamp. The constructor timestamp is not referenced. */ public class Delete implements Writable { private byte [] row = null; // This ts is only used when doing a deleteRow. Anything less, private long ts; private long lockId = -1L; private final Map<byte [], List<KeyValue>> familyMap = new TreeMap<byte [], List<KeyValue>>(Bytes.BYTES_COMPARATOR); /** Constructor for Writable. DO NOT USE */ public Delete() { this(null); } /** * Create a Delete operation for the specified row. * <p> * If no further operations are done, this will delete everything * associated with the specified row (all versions of all columns in all * families). * @param row row key */ public Delete(byte [] row) { this(row, HConstants.LATEST_TIMESTAMP, null); } /** * Create a Delete operation for the specified row and timestamp, using * an optional row lock.<p> * * If no further operations are done, this will delete all columns in all * families of the specified row with a timestamp less than or equal to the * specified timestamp.<p> * * This timestamp is ONLY used for a delete row operation. If specifying * families or columns, you must specify each timestamp individually. * @param row row key * @param timestamp maximum version timestamp (only for delete row) * @param rowLock previously acquired row lock, or null */ public Delete(byte [] row, long timestamp, RowLock rowLock) { this.row = row; this.ts = timestamp; if (rowLock != null) { this.lockId = rowLock.getLockId(); } } /** * Method to check if the familyMap is empty * @return true if empty, false otherwise */ public boolean isEmpty() { return familyMap.isEmpty(); } /** * Delete all versions of all columns of the specified family. * <p> * Overrides previous calls to deleteColumn and deleteColumns for the * specified family. * @param family family name */ public void deleteFamily(byte [] family) { this.deleteFamily(family, HConstants.LATEST_TIMESTAMP); } /** * Delete all columns of the specified family with a timestamp less than * or equal to the specified timestamp. * <p> * Overrides previous calls to deleteColumn and deleteColumns for the * specified family. * @param family family name * @param timestamp maximum version timestamp */ public void deleteFamily(byte [] family, long timestamp) { List<KeyValue> list = familyMap.get(family); if(list == null) { list = new ArrayList<KeyValue>(); } else if(!list.isEmpty()) { list.clear(); } list.add(new KeyValue(row, family, null, timestamp, KeyValue.Type.DeleteFamily)); familyMap.put(family, list); } /** * Delete all versions of the specified column. * @param family family name * @param qualifier column qualifier */ public void deleteColumns(byte [] family, byte [] qualifier) { this.deleteColumns(family, qualifier, HConstants.LATEST_TIMESTAMP); } /** * Delete all versions of the specified column with a timestamp less than * or equal to the specified timestamp. * @param family family name * @param qualifier column qualifier * @param timestamp maximum version timestamp */ public void deleteColumns(byte [] family, byte [] qualifier, long timestamp) { List<KeyValue> list = familyMap.get(family); if (list == null) { list = new ArrayList<KeyValue>(); } list.add(new KeyValue(this.row, family, qualifier, timestamp, KeyValue.Type.DeleteColumn)); familyMap.put(family, list); } /** * Delete all versions of the specified column, given in * <code>family:qualifier</code> notation, and with a timestamp less than * or equal to the specified timestamp. * @param column colon-delimited family and qualifier * @param timestamp maximum version timestamp */ public void deleteColumns(byte [] column, long timestamp) { byte [][] parts = KeyValue.parseColumn(column); this.deleteColumns(parts[0], parts[1], timestamp); } /** * Delete the latest version of the specified column. * This is an expensive call in that on the server-side, it first does a * get to find the latest versions timestamp. Then it adds a delete using * the fetched cells timestamp. * @param family family name * @param qualifier column qualifier */ public void deleteColumn(byte [] family, byte [] qualifier) { this.deleteColumn(family, qualifier, HConstants.LATEST_TIMESTAMP); } /** * Delete the specified version of the specified column. * @param family family name * @param qualifier column qualifier * @param timestamp version timestamp */ public void deleteColumn(byte [] family, byte [] qualifier, long timestamp) { List<KeyValue> list = familyMap.get(family); if(list == null) { list = new ArrayList<KeyValue>(); } list.add(new KeyValue( this.row, family, qualifier, timestamp, KeyValue.Type.Delete)); familyMap.put(family, list); } /** * Delete the latest version of the specified column, given in * <code>family:qualifier</code> notation. * @param column colon-delimited family and qualifier */ public void deleteColumn(byte [] column) { byte [][] parts = KeyValue.parseColumn(column); this.deleteColumn(parts[0], parts[1], HConstants.LATEST_TIMESTAMP); } /** * Method for retrieving the delete's familyMap * @return familyMap */ public Map<byte [], List<KeyValue>> getFamilyMap() { return this.familyMap; } /** * Method for retrieving the delete's row * @return row */ public byte [] getRow() { return this.row; } /** * Method for retrieving the delete's RowLock * @return RowLock */ public RowLock getRowLock() { return new RowLock(this.row, this.lockId); } /** * Method for retrieving the delete's lock ID. * * @return The lock ID. */ public long getLockId() { return this.lockId; } /** * Method for retrieving the delete's timestamp * @return timestamp */ public long getTimeStamp() { return this.ts; } /** * @return string */ @Override public String toString() { StringBuffer sb = new StringBuffer(); sb.append("row="); sb.append(Bytes.toString(this.row)); sb.append(", ts="); sb.append(this.ts); sb.append(", families={"); boolean moreThanOne = false; for(Map.Entry<byte [], List<KeyValue>> entry : this.familyMap.entrySet()) { if(moreThanOne) { sb.append(", "); } else { moreThanOne = true; } sb.append("(family="); sb.append(Bytes.toString(entry.getKey())); sb.append(", keyvalues=("); boolean moreThanOneB = false; for(KeyValue kv : entry.getValue()) { if(moreThanOneB) { sb.append(", "); } else { moreThanOneB = true; } sb.append(kv.toString()); } sb.append(")"); } sb.append("}"); return sb.toString(); } //Writable public void readFields(final DataInput in) throws IOException { this.row = Bytes.readByteArray(in); this.ts = in.readLong(); this.lockId = in.readLong(); this.familyMap.clear(); int numFamilies = in.readInt(); for(int i=0;i<numFamilies;i++) { byte [] family = Bytes.readByteArray(in); int numColumns = in.readInt(); List<KeyValue> list = new ArrayList<KeyValue>(numColumns); for(int j=0;j<numColumns;j++) { KeyValue kv = new KeyValue(); kv.readFields(in); list.add(kv); } this.familyMap.put(family, list); } } public void write(final DataOutput out) throws IOException { Bytes.writeByteArray(out, this.row); out.writeLong(this.ts); out.writeLong(this.lockId); out.writeInt(familyMap.size()); for(Map.Entry<byte [], List<KeyValue>> entry : familyMap.entrySet()) { Bytes.writeByteArray(out, entry.getKey()); List<KeyValue> list = entry.getValue(); out.writeInt(list.size()); for(KeyValue kv : list) { kv.write(out); } } } }
/* * CPAchecker is a tool for configurable software verification. * This file is part of CPAchecker. * * Copyright (C) 2007-2013 Dirk Beyer * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * CPAchecker web page: * http://cpachecker.sosy-lab.org */ package org.sosy_lab.cpachecker.util.predicates.z3; import static org.sosy_lab.cpachecker.util.predicates.z3.Z3NativeApi.*; import static org.sosy_lab.cpachecker.util.predicates.z3.Z3NativeApiConstants.*; import java.math.BigInteger; import org.sosy_lab.cpachecker.util.predicates.interfaces.basicimpl.AbstractBitvectorFormulaManager; import com.google.common.base.Preconditions; class Z3BitvectorFormulaManager extends AbstractBitvectorFormulaManager<Long> { private final long z3context; private final Z3FormulaCreator creator; protected Z3BitvectorFormulaManager(Z3FormulaCreator creator) { super(creator); this.creator = creator; this.z3context = creator.getEnv(); } @Override public Long concat(Long pFirst, Long pSecond) { return mk_concat(z3context, pFirst, pSecond); } @Override public Long extract(Long pFirst, int pMsb, int pLsb) { return mk_extract(z3context, pMsb, pLsb, pFirst); } @Override public Long extend(Long pNumber, int pExtensionBits, boolean pSigned) { if (pSigned) { return mk_sign_ext(z3context, pExtensionBits, pNumber); } else { return mk_zero_ext(z3context, pExtensionBits, pNumber); } } @Override public Long makeBitvectorImpl(int pLength, long pI) { long sort = mk_bv_sort(z3context, pLength); return mk_int64(z3context, pI, sort); } @Override protected Long makeBitvectorImpl(int pLength, BigInteger pI) { return makeBitvectorImpl(pLength, pI.toString()); } @Override public Long makeBitvectorImpl(int pLength, String pI) { long sort = mk_bv_sort(z3context, pLength); return mk_numeral(z3context, pI, sort); } @Override public Long makeVariableImpl(int length, String varName) { long type = creator.getBittype(length); return creator.makeVariable(type, varName); } /** * Returns a term representing the (arithmetic if signed is true) right shift of number by toShift. */ @Override public Long shiftRight(Long number, Long toShift, boolean signed) { if (signed) { return mk_bvashr(z3context, number, toShift); } else { return mk_bvlshr(z3context, number, toShift); } } @Override public Long shiftLeft(Long number, Long toShift) { return mk_bvshl(z3context, number, toShift); } @Override public Long not(Long pBits) { return mk_bvnot(z3context, pBits); } @Override public Long and(Long pBits1, Long pBits2) { return mk_bvand(z3context, pBits1, pBits2); } @Override public Long or(Long pBits1, Long pBits2) { return mk_bvor(z3context, pBits1, pBits2); } @Override public Long xor(Long pBits1, Long pBits2) { return mk_bvxor(z3context, pBits1, pBits2); } @Override public boolean isNot(Long pBits) { return isOP(z3context, pBits, Z3_OP_BNOT); } @Override public boolean isAnd(Long pBits) { return isOP(z3context, pBits, Z3_OP_BAND); } @Override public boolean isOr(Long pBits) { return isOP(z3context, pBits, Z3_OP_BOR); } @Override public boolean isXor(Long pBits) { return isOP(z3context, pBits, Z3_OP_BXOR); } @Override public Long negate(Long pNumber) { return mk_bvneg(z3context, pNumber); } @Override public Long add(Long pNumber1, Long pNumber2) { return mk_bvadd(z3context, pNumber1, pNumber2); } @Override public Long subtract(Long pNumber1, Long pNumber2) { return mk_bvsub(z3context, pNumber1, pNumber2); } @Override public Long divide(Long pNumber1, Long pNumber2, boolean signed) { if (signed) { return mk_bvsdiv(z3context, pNumber1, pNumber2); } else { return mk_bvudiv(z3context, pNumber1, pNumber2); } } @Override public Long modulo(Long pNumber1, Long pNumber2, boolean signed) { if (signed) { return mk_bvsrem(z3context, pNumber1, pNumber2); } else { return mk_bvurem(z3context, pNumber1, pNumber2); } } @Override public Long multiply(Long pNumber1, Long pNumber2) { return mk_bvmul(z3context, pNumber1, pNumber2); } @Override public Long equal(Long pNumber1, Long pNumber2) { return mk_eq(z3context, pNumber1, pNumber2); } @Override public Long lessThan(Long pNumber1, Long pNumber2, boolean signed) { if (signed) { return mk_bvslt(z3context, pNumber1, pNumber2); } else { return mk_bvult(z3context, pNumber1, pNumber2); } } @Override public Long lessOrEquals(Long pNumber1, Long pNumber2, boolean signed) { if (signed) { return mk_bvsle(z3context, pNumber1, pNumber2); } else { return mk_bvule(z3context, pNumber1, pNumber2); } } @Override public Long greaterThan(Long pNumber1, Long pNumber2, boolean signed) { return lessThan(pNumber2, pNumber1, signed); } @Override public Long greaterOrEquals(Long pNumber1, Long pNumber2, boolean signed) { return lessOrEquals(pNumber2, pNumber1, signed); } @Override public boolean isNegate(Long pNumber) { return isOP(z3context, pNumber, Z3_OP_BNOT); } @Override public boolean isAdd(Long pNumber) { return isOP(z3context, pNumber, Z3_OP_BADD); } @Override public boolean isSubtract(Long pNumber) { return isOP(z3context, pNumber, Z3_OP_BSUB); } @Override public boolean isDivide(Long pNumber, boolean signed) { if (signed) { return isOP(z3context, pNumber, Z3_OP_BSDIV); } else { return isOP(z3context, pNumber, Z3_OP_BUDIV); } } @Override public boolean isModulo(Long pNumber, boolean signed) { if (signed) { return isOP(z3context, pNumber, Z3_OP_BSREM); } else { return isOP(z3context, pNumber, Z3_OP_BUREM); } } @Override public boolean isMultiply(Long pNumber) { return isOP(z3context, pNumber, Z3_OP_BMUL); } @Override public boolean isEqual(Long pNumber) { return isOP(z3context, pNumber, Z3_OP_EQ); } @Override public boolean isGreaterThan(Long pNumber, boolean signed) { return isLessThan(pNumber, signed); } @Override public boolean isGreaterOrEquals(Long pNumber, boolean signed) { return isLessOrEquals(pNumber, signed); } @Override public boolean isLessThan(Long pNumber, boolean signed) { if (signed) { return isOP(z3context, pNumber, Z3_OP_SLT); } else { return isOP(z3context, pNumber, Z3_OP_ULT); } } @Override public boolean isLessOrEquals(Long pNumber, boolean signed) { if (signed) { return isOP(z3context, pNumber, Z3_OP_SLEQ); } else { return isOP(z3context, pNumber, Z3_OP_ULEQ); } } @Override public int getLength(Long pParam) { long sort = get_sort(z3context, pParam); Preconditions.checkArgument(get_sort_kind(z3context, sort) == Z3_BV_SORT); return get_bv_sort_size(z3context, sort); } }
// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.keyboard_accessory.bar_component; import static androidx.test.espresso.Espresso.onView; import static androidx.test.espresso.action.ViewActions.click; import static androidx.test.espresso.action.ViewActions.longClick; import static androidx.test.espresso.assertion.ViewAssertions.matches; import static androidx.test.espresso.matcher.ViewMatchers.assertThat; import static androidx.test.espresso.matcher.ViewMatchers.isRoot; import static androidx.test.espresso.matcher.ViewMatchers.isSelected; import static androidx.test.espresso.matcher.ViewMatchers.withChild; import static androidx.test.espresso.matcher.ViewMatchers.withText; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.chromium.chrome.browser.keyboard_accessory.AccessoryAction.AUTOFILL_SUGGESTION; import static org.chromium.chrome.browser.keyboard_accessory.bar_component.KeyboardAccessoryProperties.BAR_ITEMS; import static org.chromium.chrome.browser.keyboard_accessory.bar_component.KeyboardAccessoryProperties.DISABLE_ANIMATIONS_FOR_TESTING; import static org.chromium.chrome.browser.keyboard_accessory.bar_component.KeyboardAccessoryProperties.KEYBOARD_TOGGLE_VISIBLE; import static org.chromium.chrome.browser.keyboard_accessory.bar_component.KeyboardAccessoryProperties.OBFUSCATED_CHILD_AT_CALLBACK; import static org.chromium.chrome.browser.keyboard_accessory.bar_component.KeyboardAccessoryProperties.SHEET_TITLE; import static org.chromium.chrome.browser.keyboard_accessory.bar_component.KeyboardAccessoryProperties.SHOW_SWIPING_IPH; import static org.chromium.chrome.browser.keyboard_accessory.bar_component.KeyboardAccessoryProperties.TAB_LAYOUT_ITEM; import static org.chromium.chrome.browser.keyboard_accessory.bar_component.KeyboardAccessoryProperties.VISIBLE; import static org.chromium.ui.test.util.ViewUtils.onViewWaiting; import static org.chromium.ui.test.util.ViewUtils.waitForView; import android.content.pm.ActivityInfo; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Rect; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.view.View; import android.view.ViewStub; import androidx.annotation.Nullable; import androidx.test.espresso.ViewInteraction; import androidx.test.espresso.matcher.RootMatchers; import androidx.test.filters.MediumTest; import com.google.android.material.tabs.TabLayout; import org.hamcrest.Matcher; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.chromium.base.Callback; import org.chromium.base.test.util.CommandLineFlags; import org.chromium.base.test.util.Criteria; import org.chromium.base.test.util.CriteriaHelper; import org.chromium.base.test.util.CriteriaNotSatisfiedException; import org.chromium.chrome.browser.autofill.PersonalDataManager; import org.chromium.chrome.browser.feature_engagement.TrackerFactory; import org.chromium.chrome.browser.flags.ChromeFeatureList; import org.chromium.chrome.browser.flags.ChromeSwitches; import org.chromium.chrome.browser.keyboard_accessory.R; import org.chromium.chrome.browser.keyboard_accessory.bar_component.KeyboardAccessoryProperties.AutofillBarItem; import org.chromium.chrome.browser.keyboard_accessory.bar_component.KeyboardAccessoryProperties.BarItem; import org.chromium.chrome.browser.keyboard_accessory.bar_component.KeyboardAccessoryProperties.TabLayoutBarItem; import org.chromium.chrome.browser.keyboard_accessory.data.KeyboardAccessoryData; import org.chromium.chrome.browser.keyboard_accessory.data.KeyboardAccessoryData.Action; import org.chromium.chrome.browser.keyboard_accessory.tab_layout_component.KeyboardAccessoryTabLayoutCoordinator; import org.chromium.chrome.test.ChromeJUnit4ClassRunner; import org.chromium.chrome.test.ChromeTabbedActivityTestRule; import org.chromium.chrome.test.util.browser.Features.EnableFeatures; import org.chromium.components.autofill.AutofillSuggestion; import org.chromium.components.feature_engagement.EventConstants; import org.chromium.components.feature_engagement.FeatureConstants; import org.chromium.components.feature_engagement.Tracker; import org.chromium.components.feature_engagement.TriggerDetails; import org.chromium.components.feature_engagement.TriggerState; import org.chromium.content_public.browser.test.util.JavaScriptUtils; import org.chromium.content_public.browser.test.util.TestThreadUtils; import org.chromium.ui.DeferredViewStubInflationProvider; import org.chromium.ui.DropdownItem; import org.chromium.ui.ViewProvider; import org.chromium.ui.modelutil.LazyConstructionPropertyMcp; import org.chromium.ui.modelutil.PropertyModel; import org.chromium.ui.widget.ChipView; import org.chromium.ui.widget.ChromeImageView; import org.chromium.url.GURL; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; /** * View tests for the keyboard accessory component. * */ @RunWith(ChromeJUnit4ClassRunner.class) @CommandLineFlags.Add({ChromeSwitches.DISABLE_FIRST_RUN_EXPERIENCE}) @EnableFeatures(ChromeFeatureList.AUTOFILL_KEYBOARD_ACCESSORY) public class KeyboardAccessoryModernViewTest { private static final String CUSTOM_ICON_URL = "https://www.example.com/image.png"; private static final Bitmap TEST_CARD_ART_IMAGE = Bitmap.createBitmap(100, 200, Bitmap.Config.ARGB_8888); private PropertyModel mModel; private BlockingQueue<KeyboardAccessoryModernView> mKeyboardAccessoryView; @Rule public ChromeTabbedActivityTestRule mActivityTestRule = new ChromeTabbedActivityTestRule(); @Mock PersonalDataManager mMockPersonalDataManager; private static class TestTracker implements Tracker { private boolean mWasDismissed; private @Nullable String mEmittedEvent; @Override public void notifyEvent(String event) { mEmittedEvent = event; } public @Nullable String getLastEmittedEvent() { return mEmittedEvent; } @Override public boolean shouldTriggerHelpUI(String feature) { return true; } @Override public TriggerDetails shouldTriggerHelpUIWithSnooze(String feature) { return null; } @Override public boolean wouldTriggerHelpUI(String feature) { return true; } @Override public boolean hasEverTriggered(String feature, boolean fromWindow) { return true; } @Override public int getTriggerState(String feature) { return TriggerState.HAS_NOT_BEEN_DISPLAYED; } @Override public void dismissed(String feature) { mWasDismissed = true; } @Override public void dismissedWithSnooze(String feature, int snoozeAction) { mWasDismissed = true; } public boolean wasDismissed() { return mWasDismissed; } @Nullable @Override public DisplayLockHandle acquireDisplayLock() { return () -> {}; } @Override public boolean isInitialized() { return true; } @Override public void addOnInitializedCallback(Callback<Boolean> callback) { assert false : "Implement addOnInitializedCallback if you need it."; } } @Before public void setUp() throws InterruptedException { MockitoAnnotations.initMocks(this); mActivityTestRule.startMainActivityOnBlankPage(); PersonalDataManager.setInstanceForTesting(mMockPersonalDataManager); TestThreadUtils.runOnUiThreadBlocking(() -> { mModel = KeyboardAccessoryProperties.defaultModelBuilder() .with(TAB_LAYOUT_ITEM, new TabLayoutBarItem(new KeyboardAccessoryTabLayoutCoordinator .TabLayoutCallbacks() { @Override public void onTabLayoutBound( TabLayout tabs) {} @Override public void onTabLayoutUnbound( TabLayout tabs) {} })) .with(DISABLE_ANIMATIONS_FOR_TESTING, true) .with(OBFUSCATED_CHILD_AT_CALLBACK, unused -> {}) .with(SHOW_SWIPING_IPH, false) .build(); ViewStub viewStub = mActivityTestRule.getActivity().findViewById(R.id.keyboard_accessory_stub); mKeyboardAccessoryView = new ArrayBlockingQueue<>(1); ViewProvider<KeyboardAccessoryModernView> provider = new DeferredViewStubInflationProvider<>(viewStub); LazyConstructionPropertyMcp.create( mModel, VISIBLE, provider, KeyboardAccessoryModernViewBinder::bind); provider.whenLoaded(mKeyboardAccessoryView::add); }); } @Test @MediumTest public void testAccessoryVisibilityChangedByModel() throws InterruptedException { // Initially, there shouldn't be a view yet. assertNull(mKeyboardAccessoryView.poll()); // After setting the visibility to true, the view should exist and be visible. TestThreadUtils.runOnUiThreadBlocking(() -> { mModel.set(VISIBLE, true); }); KeyboardAccessoryModernView view = mKeyboardAccessoryView.take(); assertEquals(view.getVisibility(), View.VISIBLE); // After hiding the view, the view should still exist but be invisible. TestThreadUtils.runOnUiThreadBlocking(() -> { mModel.set(VISIBLE, false); }); assertNotEquals(view.getVisibility(), View.VISIBLE); } @Test @MediumTest public void testAddsClickableAutofillSuggestions() { AtomicReference<Boolean> clickRecorded = new AtomicReference<>(); TestThreadUtils.runOnUiThreadBlocking(() -> { mModel.set(VISIBLE, true); mModel.get(BAR_ITEMS).set( createAutofillChipAndTab("Johnathan", result -> clickRecorded.set(true))); }); onViewWaiting(withText("Johnathan")).perform(click()); assertTrue(clickRecorded.get()); } @Test @MediumTest public void testAddsLongClickableAutofillSuggestions() { AtomicReference<Boolean> clickRecorded = new AtomicReference<>(); TestThreadUtils.runOnUiThreadBlocking(() -> { mModel.set(VISIBLE, true); mModel.get(BAR_ITEMS).set(new BarItem[] { new AutofillBarItem( new AutofillSuggestion("Johnathan", "Smith", "", DropdownItem.NO_ICON, false, 1, false, false, false, /* featureForIPH= */ ""), new Action("Unused", AUTOFILL_SUGGESTION, result -> {}, result -> clickRecorded.set(true))), createTabs()}); }); onViewWaiting(withText("Johnathan")).perform(longClick()); assertTrue(clickRecorded.get()); } @Test @MediumTest public void testUpdatesKeyPaddingAfterRotation() throws InterruptedException { TestThreadUtils.runOnUiThreadBlocking(() -> { mModel.set(VISIBLE, true); mModel.set(KEYBOARD_TOGGLE_VISIBLE, false); mModel.set(SHEET_TITLE, "Sheet title"); mModel.get(BAR_ITEMS).set(createAutofillChipAndTab("John", null)); }); KeyboardAccessoryModernView view = mKeyboardAccessoryView.take(); CriteriaHelper.pollUiThread(view.mBarItemsView::isShown); CriteriaHelper.pollUiThread(viewsAreRightAligned(view, view.mBarItemsView.getChildAt(1))); TestThreadUtils.runOnUiThreadBlocking(() -> mModel.set(KEYBOARD_TOGGLE_VISIBLE, true)); CriteriaHelper.pollUiThread(() -> !view.mBarItemsView.isShown()); rotateActivityToLandscape(); TestThreadUtils.runOnUiThreadBlocking(() -> mModel.set(KEYBOARD_TOGGLE_VISIBLE, false)); CriteriaHelper.pollUiThread(view.mBarItemsView::isShown); CriteriaHelper.pollUiThread(viewsAreRightAligned(view, view.mBarItemsView.getChildAt(1))); // Reset device orientation. mActivityTestRule.getActivity().setRequestedOrientation( ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED); } @Test @MediumTest public void testDismissesPasswordEducationBubbleOnFilling() { AutofillBarItem itemWithIPH = new AutofillBarItem( new AutofillSuggestion("Johnathan", "Smith", /*itemTag=*/"", DropdownItem.NO_ICON, false, -2, false, false, false, /* featureForIPH= */ ""), new KeyboardAccessoryData.Action("", AUTOFILL_SUGGESTION, unused -> {})); itemWithIPH.setFeatureForIPH(FeatureConstants.KEYBOARD_ACCESSORY_PASSWORD_FILLING_FEATURE); TestTracker tracker = new TestTracker(); TrackerFactory.setTrackerForTests(tracker); TestThreadUtils.runOnUiThreadBlocking(() -> { mModel.set(VISIBLE, true); mModel.get(BAR_ITEMS).set(new BarItem[] {itemWithIPH, createTabs()}); }); onViewWaiting(withText("Johnathan")); waitForHelpBubble(withText(R.string.iph_keyboard_accessory_fill_with_chrome)); onView(withChild(withText("Johnathan"))).check(matches(isSelected())); onView(withText("Johnathan")).perform(click()); assertThat(tracker.wasDismissed(), is(true)); assertThat(tracker.getLastEmittedEvent(), is(EventConstants.KEYBOARD_ACCESSORY_PASSWORD_AUTOFILLED)); onView(withChild(withText("Johnathan"))).check(matches(not(isSelected()))); } @Test @MediumTest public void testDismissesAddressEducationBubbleOnFilling() { AutofillBarItem itemWithIPH = new AutofillBarItem( new AutofillSuggestion("Johnathan", "Smith", /*itemTag=*/"", DropdownItem.NO_ICON, false, 1, false, false, false, /* featureForIPH= */ ""), new KeyboardAccessoryData.Action("", AUTOFILL_SUGGESTION, unused -> {})); itemWithIPH.setFeatureForIPH(FeatureConstants.KEYBOARD_ACCESSORY_ADDRESS_FILL_FEATURE); TestTracker tracker = new TestTracker(); TrackerFactory.setTrackerForTests(tracker); TestThreadUtils.runOnUiThreadBlocking(() -> { mModel.set(VISIBLE, true); mModel.get(BAR_ITEMS).set(new BarItem[] {itemWithIPH, createTabs()}); }); onViewWaiting(withText("Johnathan")); waitForHelpBubble(withText(R.string.iph_keyboard_accessory_fill_with_chrome)); onView(withText("Johnathan")).perform(click()); assertThat(tracker.wasDismissed(), is(true)); assertThat(tracker.getLastEmittedEvent(), is(EventConstants.KEYBOARD_ACCESSORY_ADDRESS_AUTOFILLED)); } @Test @MediumTest public void testDismissesPaymentEducationBubbleOnFilling() { AutofillBarItem itemWithIPH = new AutofillBarItem( new AutofillSuggestion("Johnathan", "Smith", /*itemTag=*/"", DropdownItem.NO_ICON, false, 70000, false, false, false, /* featureForIPH= */ ""), new KeyboardAccessoryData.Action("", AUTOFILL_SUGGESTION, unused -> {})); itemWithIPH.setFeatureForIPH(FeatureConstants.KEYBOARD_ACCESSORY_PAYMENT_FILLING_FEATURE); TestTracker tracker = new TestTracker(); TrackerFactory.setTrackerForTests(tracker); TestThreadUtils.runOnUiThreadBlocking(() -> { mModel.set(VISIBLE, true); mModel.get(BAR_ITEMS).set(new BarItem[] {itemWithIPH, createTabs()}); }); onViewWaiting(withText("Johnathan")); waitForHelpBubble(withText(R.string.iph_keyboard_accessory_fill_with_chrome)); onView(withText("Johnathan")).perform(click()); assertThat(tracker.wasDismissed(), is(true)); assertThat(tracker.getLastEmittedEvent(), is(EventConstants.KEYBOARD_ACCESSORY_PAYMENT_AUTOFILLED)); } @Test @MediumTest public void testDismissesSwipingEducationBubbleOnTap() { TestTracker tracker = new TestTracker() { @Override public int getTriggerState(String feature) { // Pretend that an autofill IPH was shown already. return feature.equals(FeatureConstants.KEYBOARD_ACCESSORY_PASSWORD_FILLING_FEATURE) ? TriggerState.HAS_BEEN_DISPLAYED : TriggerState.HAS_NOT_BEEN_DISPLAYED; } }; TrackerFactory.setTrackerForTests(tracker); // Render a keyboard accessory bar and wait for completion. TestThreadUtils.runOnUiThreadBlocking(() -> { mModel.set(VISIBLE, true); mModel.get(BAR_ITEMS).set(createAutofillChipAndTab("Johnathan", null)); }); onViewWaiting(withText("Johnathan")); // Pretend an item is offscreen, so swiping is possible and an IPH could be shown. TestThreadUtils.runOnUiThreadBlocking(() -> mModel.set(SHOW_SWIPING_IPH, true)); // Wait until the bubble appears, then dismiss is by tapping it. waitForHelpBubble(withText(R.string.iph_keyboard_accessory_swipe_for_more)) .perform(click()); assertThat(tracker.wasDismissed(), is(true)); } @Test @MediumTest public void testDismissesPaymentOfferEducationBubbleOnFilling() { String itemTag = "Cashback linked"; AutofillBarItem itemWithIPH = new AutofillBarItem( new AutofillSuggestion("Johnathan", "Smith", itemTag, R.drawable.ic_offer_tag, false, 70000, false, false, false, /* featureForIPH= */ ""), new KeyboardAccessoryData.Action("", AUTOFILL_SUGGESTION, unused -> {})); itemWithIPH.setFeatureForIPH(FeatureConstants.KEYBOARD_ACCESSORY_PAYMENT_OFFER_FEATURE); TestTracker tracker = new TestTracker(); TrackerFactory.setTrackerForTests(tracker); TestThreadUtils.runOnUiThreadBlocking(() -> { mModel.set(VISIBLE, true); mModel.get(BAR_ITEMS).set(new BarItem[] {itemWithIPH, createTabs()}); }); onViewWaiting(withText("Johnathan")); waitForHelpBubble(withText(itemTag)); onView(withText("Johnathan")).perform(click()); assertThat(tracker.wasDismissed(), is(true)); assertThat(tracker.getLastEmittedEvent(), is(EventConstants.KEYBOARD_ACCESSORY_PAYMENT_AUTOFILLED)); } @Test @MediumTest public void testNotifiesAboutPartiallyVisibleSuggestions() throws InterruptedException { // Ensure that the callback isn't triggered while all items are visible: AtomicInteger obfuscatedChildAt = new AtomicInteger(-1); TestThreadUtils.runOnUiThreadBlocking(() -> { mModel.set(OBFUSCATED_CHILD_AT_CALLBACK, obfuscatedChildAt::set); mModel.set(VISIBLE, true); mModel.get(BAR_ITEMS).set(createAutofillChipAndTab("John", null)); }); KeyboardAccessoryModernView view = mKeyboardAccessoryView.take(); CriteriaHelper.pollUiThread(() -> view.mBarItemsView.getChildCount() > 0); assertThat(obfuscatedChildAt.get(), is(-1)); // As soon as at least one item can't be displayed in full, trigger the swiping callback. TestThreadUtils.runOnUiThreadBlocking(() -> { mModel.get(BAR_ITEMS).set(new BarItem[] {createAutofillBarItem("JohnathanSmith", null), createAutofillBarItem("TroyMcSpartanGregor", null), createAutofillBarItem("SomeOtherRandomLongName", null), createAutofillBarItem("ToddTester", null), createAutofillBarItem("MayaPark", null), createAutofillBarItem("ThisChipIsProbablyHiddenNow", null), createTabs()}); }); onViewWaiting(withText("JohnathanSmith")); CriteriaHelper.pollUiThread(() -> obfuscatedChildAt.get() > -1); } @Test @MediumTest public void testCustomIconUrlSet_imageReturnedByPersonalDataManager_customIconSetOnChipView() throws InterruptedException { GURL customIconUrl = mock(GURL.class); when(customIconUrl.isValid()).thenReturn(true); when(customIconUrl.getSpec()).thenReturn(CUSTOM_ICON_URL); // Return the cached image when // PersonalDataManager.getCustomImageForAutofillSuggestionIfAvailable is called for the // above url. when(mMockPersonalDataManager.getCustomImageForAutofillSuggestionIfAvailable(any())) .thenReturn(TEST_CARD_ART_IMAGE); // Create an autofill suggestion and set the `customIconUrl`. AutofillBarItem customIconItem = new AutofillBarItem( getDefaultAutofillSuggestionBuilder().setCustomIconUrl(customIconUrl).build(), new KeyboardAccessoryData.Action("", AUTOFILL_SUGGESTION, unused -> {})); TestThreadUtils.runOnUiThreadBlocking(() -> { mModel.set(VISIBLE, true); mModel.get(BAR_ITEMS).set(new BarItem[] {customIconItem, createTabs()}); }); KeyboardAccessoryModernView view = mKeyboardAccessoryView.take(); CriteriaHelper.pollUiThread(() -> view.mBarItemsView.getChildCount() > 0); CriteriaHelper.pollUiThread(() -> { ChipView chipView = (ChipView) view.mBarItemsView.getChildAt(0); ChromeImageView iconImageView = (ChromeImageView) chipView.getChildAt(0); return ((BitmapDrawable) iconImageView.getDrawable()) .getBitmap() .equals(TEST_CARD_ART_IMAGE); }); } @Test @MediumTest public void testCustomIconUrlSet_imageNotCachedInPersonalDataManager_defaultIconSetOnChipView() throws InterruptedException { GURL customIconUrl = mock(GURL.class); when(customIconUrl.isValid()).thenReturn(true); when(customIconUrl.getSpec()).thenReturn(CUSTOM_ICON_URL); // Return the response of PersonalDataManager.getCustomImageForAutofillSuggestionIfAvailable // to null to indicate that the image is not present in the cache. when(mMockPersonalDataManager.getCustomImageForAutofillSuggestionIfAvailable(any())) .thenReturn(null); AutofillBarItem customIconItem = new AutofillBarItem( getDefaultAutofillSuggestionBuilder().setCustomIconUrl(customIconUrl).build(), new KeyboardAccessoryData.Action("", AUTOFILL_SUGGESTION, unused -> {})); TestThreadUtils.runOnUiThreadBlocking(() -> { mModel.set(VISIBLE, true); mModel.get(BAR_ITEMS).set(new BarItem[] {customIconItem, createTabs()}); }); KeyboardAccessoryModernView view = mKeyboardAccessoryView.take(); CriteriaHelper.pollUiThread(() -> view.mBarItemsView.getChildCount() > 0); CriteriaHelper.pollUiThread(() -> { ChipView chipView = (ChipView) view.mBarItemsView.getChildAt(0); ChromeImageView iconImageView = (ChromeImageView) chipView.getChildAt(0); Drawable expectedIcon = mActivityTestRule.getActivity().getDrawable(R.drawable.visa_card); return getBitmap(expectedIcon).sameAs(getBitmap(iconImageView.getDrawable())); }); } @Test @MediumTest public void testCustomIconUrlNotSet_defaultIconSetOnChipView() throws InterruptedException { // Create an autofill suggestion without setting the `customIconUrl`. AutofillBarItem itemWithoutCustomIconUrl = new AutofillBarItem(getDefaultAutofillSuggestionBuilder().build(), new KeyboardAccessoryData.Action("", AUTOFILL_SUGGESTION, unused -> {})); TestThreadUtils.runOnUiThreadBlocking(() -> { mModel.set(VISIBLE, true); mModel.get(BAR_ITEMS).set(new BarItem[] {itemWithoutCustomIconUrl, createTabs()}); }); KeyboardAccessoryModernView view = mKeyboardAccessoryView.take(); CriteriaHelper.pollUiThread(() -> view.mBarItemsView.getChildCount() > 0); CriteriaHelper.pollUiThread(() -> { ChipView chipView = (ChipView) view.mBarItemsView.getChildAt(0); ChromeImageView iconImageView = (ChromeImageView) chipView.getChildAt(0); Drawable expectedIcon = mActivityTestRule.getActivity().getDrawable(R.drawable.visa_card); return getBitmap(expectedIcon).sameAs(getBitmap(iconImageView.getDrawable())); }); } private static AutofillSuggestion.Builder getDefaultAutofillSuggestionBuilder() { return new AutofillSuggestion.Builder() .setLabel("Johnathan") .setSubLabel("Smith") .setIconId(R.drawable.visa_card) .setSuggestionId(70000); } // Convert a drawable to a Bitmap for comparison. private static Bitmap getBitmap(Drawable drawable) { Bitmap bitmap = Bitmap.createBitmap(drawable.getIntrinsicWidth(), drawable.getIntrinsicHeight(), Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(bitmap); drawable.setBounds(0, 0, canvas.getWidth(), canvas.getHeight()); drawable.draw(canvas); return bitmap; } private ViewInteraction waitForHelpBubble(Matcher<View> matcher) { View mainDecorView = mActivityTestRule.getActivity().getWindow().getDecorView(); return onView(isRoot()) .inRoot(RootMatchers.withDecorView(not(is(mainDecorView)))) .check(waitForView(matcher)); } private void rotateActivityToLandscape() { mActivityTestRule.getActivity().setRequestedOrientation( ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); CriteriaHelper.pollInstrumentationThread(() -> { try { String result = JavaScriptUtils.executeJavaScriptAndWaitForResult( mActivityTestRule.getWebContents(), "screen.orientation.type.split('-')[0]"); Criteria.checkThat(result, is("\"landscape\"")); } catch (TimeoutException ex) { throw new CriteriaNotSatisfiedException(ex); } }); } private Runnable viewsAreRightAligned(View staticView, View changingView) { Rect accessoryViewRect = new Rect(); staticView.getGlobalVisibleRect(accessoryViewRect); return () -> { Rect keyItemRect = new Rect(); changingView.getGlobalVisibleRect(keyItemRect); Criteria.checkThat(keyItemRect.right, is(accessoryViewRect.right)); }; } private BarItem[] createAutofillChipAndTab(String label, Callback<Action> chipCallback) { return new BarItem[] {createAutofillBarItem(label, chipCallback), createTabs()}; } private AutofillBarItem createAutofillBarItem(String label, Callback<Action> chipCallback) { return new AutofillBarItem( new AutofillSuggestion(label, "Smith", /*itemTag=*/"", DropdownItem.NO_ICON, false, 1, false, false, false, /* featureForIPH= */ ""), new KeyboardAccessoryData.Action("Unused", AUTOFILL_SUGGESTION, chipCallback)); } private TabLayoutBarItem createTabs() { return new TabLayoutBarItem(new KeyboardAccessoryTabLayoutCoordinator.TabLayoutCallbacks() { @Override public void onTabLayoutBound(TabLayout tabs) { if (tabs.getTabCount() > 0) return; tabs.addTab(tabs.newTab() .setIcon(R.drawable.ic_vpn_key_grey) .setContentDescription("Key Icon")); } @Override public void onTabLayoutUnbound(TabLayout tabs) {} }); } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver11; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFActionSetDlDstVer11 implements OFActionSetDlDst { private static final Logger logger = LoggerFactory.getLogger(OFActionSetDlDstVer11.class); // version: 1.1 final static byte WIRE_VERSION = 2; final static int LENGTH = 16; private final static MacAddress DEFAULT_DL_ADDR = MacAddress.NONE; // OF message fields private final MacAddress dlAddr; // // Immutable default instance final static OFActionSetDlDstVer11 DEFAULT = new OFActionSetDlDstVer11( DEFAULT_DL_ADDR ); // package private constructor - used by readers, builders, and factory OFActionSetDlDstVer11(MacAddress dlAddr) { if(dlAddr == null) { throw new NullPointerException("OFActionSetDlDstVer11: property dlAddr cannot be null"); } this.dlAddr = dlAddr; } // Accessors for OF message fields @Override public OFActionType getType() { return OFActionType.SET_DL_DST; } @Override public MacAddress getDlAddr() { return dlAddr; } @Override public OFVersion getVersion() { return OFVersion.OF_11; } public OFActionSetDlDst.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFActionSetDlDst.Builder { final OFActionSetDlDstVer11 parentMessage; // OF message fields private boolean dlAddrSet; private MacAddress dlAddr; BuilderWithParent(OFActionSetDlDstVer11 parentMessage) { this.parentMessage = parentMessage; } @Override public OFActionType getType() { return OFActionType.SET_DL_DST; } @Override public MacAddress getDlAddr() { return dlAddr; } @Override public OFActionSetDlDst.Builder setDlAddr(MacAddress dlAddr) { this.dlAddr = dlAddr; this.dlAddrSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_11; } @Override public OFActionSetDlDst build() { MacAddress dlAddr = this.dlAddrSet ? this.dlAddr : parentMessage.dlAddr; if(dlAddr == null) throw new NullPointerException("Property dlAddr must not be null"); // return new OFActionSetDlDstVer11( dlAddr ); } } static class Builder implements OFActionSetDlDst.Builder { // OF message fields private boolean dlAddrSet; private MacAddress dlAddr; @Override public OFActionType getType() { return OFActionType.SET_DL_DST; } @Override public MacAddress getDlAddr() { return dlAddr; } @Override public OFActionSetDlDst.Builder setDlAddr(MacAddress dlAddr) { this.dlAddr = dlAddr; this.dlAddrSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_11; } // @Override public OFActionSetDlDst build() { MacAddress dlAddr = this.dlAddrSet ? this.dlAddr : DEFAULT_DL_ADDR; if(dlAddr == null) throw new NullPointerException("Property dlAddr must not be null"); return new OFActionSetDlDstVer11( dlAddr ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFActionSetDlDst> { @Override public OFActionSetDlDst readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property type == 4 short type = bb.readShort(); if(type != (short) 0x4) throw new OFParseError("Wrong type: Expected=OFActionType.SET_DL_DST(4), got="+type); int length = U16.f(bb.readShort()); if(length != 16) throw new OFParseError("Wrong length: Expected=16(16), got="+length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); MacAddress dlAddr = MacAddress.read6Bytes(bb); // pad: 6 bytes bb.skipBytes(6); OFActionSetDlDstVer11 actionSetDlDstVer11 = new OFActionSetDlDstVer11( dlAddr ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", actionSetDlDstVer11); return actionSetDlDstVer11; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFActionSetDlDstVer11Funnel FUNNEL = new OFActionSetDlDstVer11Funnel(); static class OFActionSetDlDstVer11Funnel implements Funnel<OFActionSetDlDstVer11> { private static final long serialVersionUID = 1L; @Override public void funnel(OFActionSetDlDstVer11 message, PrimitiveSink sink) { // fixed value property type = 4 sink.putShort((short) 0x4); // fixed value property length = 16 sink.putShort((short) 0x10); message.dlAddr.putTo(sink); // skip pad (6 bytes) } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFActionSetDlDstVer11> { @Override public void write(ByteBuf bb, OFActionSetDlDstVer11 message) { // fixed value property type = 4 bb.writeShort((short) 0x4); // fixed value property length = 16 bb.writeShort((short) 0x10); message.dlAddr.write6Bytes(bb); // pad: 6 bytes bb.writeZero(6); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFActionSetDlDstVer11("); b.append("dlAddr=").append(dlAddr); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFActionSetDlDstVer11 other = (OFActionSetDlDstVer11) obj; if (dlAddr == null) { if (other.dlAddr != null) return false; } else if (!dlAddr.equals(other.dlAddr)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((dlAddr == null) ? 0 : dlAddr.hashCode()); return result; } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver13; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFBsnTlvPortSpeedGbpsVer13 implements OFBsnTlvPortSpeedGbps { private static final Logger logger = LoggerFactory.getLogger(OFBsnTlvPortSpeedGbpsVer13.class); // version: 1.3 final static byte WIRE_VERSION = 4; final static int LENGTH = 8; private final static long DEFAULT_VALUE = 0x0L; // OF message fields private final long value; // // Immutable default instance final static OFBsnTlvPortSpeedGbpsVer13 DEFAULT = new OFBsnTlvPortSpeedGbpsVer13( DEFAULT_VALUE ); // package private constructor - used by readers, builders, and factory OFBsnTlvPortSpeedGbpsVer13(long value) { this.value = U32.normalize(value); } // Accessors for OF message fields @Override public int getType() { return 0x9c; } @Override public long getValue() { return value; } @Override public OFVersion getVersion() { return OFVersion.OF_13; } public OFBsnTlvPortSpeedGbps.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFBsnTlvPortSpeedGbps.Builder { final OFBsnTlvPortSpeedGbpsVer13 parentMessage; // OF message fields private boolean valueSet; private long value; BuilderWithParent(OFBsnTlvPortSpeedGbpsVer13 parentMessage) { this.parentMessage = parentMessage; } @Override public int getType() { return 0x9c; } @Override public long getValue() { return value; } @Override public OFBsnTlvPortSpeedGbps.Builder setValue(long value) { this.value = value; this.valueSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_13; } @Override public OFBsnTlvPortSpeedGbps build() { long value = this.valueSet ? this.value : parentMessage.value; // return new OFBsnTlvPortSpeedGbpsVer13( value ); } } static class Builder implements OFBsnTlvPortSpeedGbps.Builder { // OF message fields private boolean valueSet; private long value; @Override public int getType() { return 0x9c; } @Override public long getValue() { return value; } @Override public OFBsnTlvPortSpeedGbps.Builder setValue(long value) { this.value = value; this.valueSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_13; } // @Override public OFBsnTlvPortSpeedGbps build() { long value = this.valueSet ? this.value : DEFAULT_VALUE; return new OFBsnTlvPortSpeedGbpsVer13( value ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFBsnTlvPortSpeedGbps> { @Override public OFBsnTlvPortSpeedGbps readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property type == 0x9c short type = bb.readShort(); if(type != (short) 0x9c) throw new OFParseError("Wrong type: Expected=0x9c(0x9c), got="+type); int length = U16.f(bb.readShort()); if(length != 8) throw new OFParseError("Wrong length: Expected=8(8), got="+length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long value = U32.f(bb.readInt()); OFBsnTlvPortSpeedGbpsVer13 bsnTlvPortSpeedGbpsVer13 = new OFBsnTlvPortSpeedGbpsVer13( value ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", bsnTlvPortSpeedGbpsVer13); return bsnTlvPortSpeedGbpsVer13; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFBsnTlvPortSpeedGbpsVer13Funnel FUNNEL = new OFBsnTlvPortSpeedGbpsVer13Funnel(); static class OFBsnTlvPortSpeedGbpsVer13Funnel implements Funnel<OFBsnTlvPortSpeedGbpsVer13> { private static final long serialVersionUID = 1L; @Override public void funnel(OFBsnTlvPortSpeedGbpsVer13 message, PrimitiveSink sink) { // fixed value property type = 0x9c sink.putShort((short) 0x9c); // fixed value property length = 8 sink.putShort((short) 0x8); sink.putLong(message.value); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFBsnTlvPortSpeedGbpsVer13> { @Override public void write(ByteBuf bb, OFBsnTlvPortSpeedGbpsVer13 message) { // fixed value property type = 0x9c bb.writeShort((short) 0x9c); // fixed value property length = 8 bb.writeShort((short) 0x8); bb.writeInt(U32.t(message.value)); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFBsnTlvPortSpeedGbpsVer13("); b.append("value=").append(value); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFBsnTlvPortSpeedGbpsVer13 other = (OFBsnTlvPortSpeedGbpsVer13) obj; if( value != other.value) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (value ^ (value >>> 32)); return result; } }
package com.caseystella.analytics.outlier.batch.rpca; import com.caseystella.analytics.DataPoint; import com.caseystella.analytics.distribution.GlobalStatistics; import com.caseystella.analytics.distribution.scaling.ScalingFunctions; import com.caseystella.analytics.outlier.Outlier; import com.caseystella.analytics.outlier.OutlierMetadataConstants; import com.caseystella.analytics.outlier.Severity; import com.caseystella.analytics.outlier.batch.OutlierAlgorithm; import com.caseystella.analytics.outlier.streaming.OutlierConfig; import com.caseystella.analytics.util.ConfigUtil; import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; import java.util.HashMap; import java.util.List; public class RPCAOutlierAlgorithm implements OutlierAlgorithm{ private static final double EPSILON = 1e-12; private static final String THRESHOLD_CONF = "rpca.threshold"; private final double LPENALTY_DEFAULT = 1; private final double SPENALTY_DEFAULT = 1.4; private final String LPENALTY_CONFIG = "lpenalty"; private final String SPENALTY_CONFIG = "spenalty"; private final String FORCE_DIFF_CONFIG = "forceDiff"; private final String MIN_RECORDS_CONFIG = "minRecords"; private Double lpenalty; private Double spenalty; private Boolean isForceDiff = false; private int minRecords = 0; private double threshold = EPSILON; private ScalingFunctions scaling = ScalingFunctions.NONE; public RPCAOutlierAlgorithm() { } public RPCAOutlierAlgorithm withLPenalty(double lPenalty) { this.lpenalty = lPenalty; return this; } public RPCAOutlierAlgorithm withSPenalty(double sPenalty) { this.spenalty = sPenalty; return this; } public RPCAOutlierAlgorithm withForceDiff(boolean forceDiff) { this.isForceDiff = forceDiff; return this; } public RPCAOutlierAlgorithm withMinRecords(int minRecords) { this.minRecords = minRecords; return this; } public RPCAOutlierAlgorithm withScalingFunction(ScalingFunctions scaling) { this.scaling = scaling; return this; } // Helper Function public double[][] VectorToMatrix(double[] x, int rows, int cols) { double[][] input2DArray = new double[rows][cols]; for (int n= 0; n< x.length; n++) { int i = n % rows; int j = (int) Math.floor(n / rows); input2DArray[i][j] = x[n]; } return input2DArray; } public double outlierScore(List<DataPoint> dataPoints, DataPoint value) { double[] inputData = new double[dataPoints.size() + 1]; int numNonZero = 0; if(scaling != ScalingFunctions.NONE) { int i = 0; final DescriptiveStatistics stats = new DescriptiveStatistics(); for (DataPoint dp : dataPoints) { inputData[i++] = dp.getValue(); stats.addValue(dp.getValue()); numNonZero += dp.getValue() > EPSILON ? 1 : 0; } inputData[i] = value.getValue(); GlobalStatistics globalStats = new GlobalStatistics() {{ setMax(stats.getMax()); setMin(stats.getMin()); setMax(stats.getMean()); setStddev(stats.getStandardDeviation()); }}; for(i = 0;i < inputData.length;++i) { inputData[i] = scaling.scale(inputData[i], globalStats); } } else { int i = 0; for (DataPoint dp : dataPoints) { inputData[i++] = dp.getValue(); numNonZero += dp.getValue() > EPSILON ? 1 : 0; } inputData[i] = value.getValue(); } int nCols = 1; int nRows = inputData.length; if(numNonZero > minRecords) { AugmentedDickeyFuller dickeyFullerTest = new AugmentedDickeyFuller(inputData); double[] inputArrayTransformed = inputData; if (!this.isForceDiff && dickeyFullerTest.isNeedsDiff()) { // Auto Diff inputArrayTransformed = dickeyFullerTest.getZeroPaddedDiff(); } else if (this.isForceDiff) { // Force Diff inputArrayTransformed = dickeyFullerTest.getZeroPaddedDiff(); } if (this.spenalty == null) { this.lpenalty = this.LPENALTY_DEFAULT; this.spenalty = this.SPENALTY_DEFAULT/ Math.sqrt(Math.max(nCols, nRows)); } // Calc Mean double mean = 0; for (int n=0; n < inputArrayTransformed.length; n++) { mean += inputArrayTransformed[n]; } mean /= inputArrayTransformed.length; // Calc STDEV double stdev = 0; for (int n=0; n < inputArrayTransformed.length; n++) { stdev += Math.pow(inputArrayTransformed[n] - mean,2) ; } stdev = Math.sqrt(stdev / (inputArrayTransformed.length - 1)); // Transformation: Zero Mean, Unit Variance for (int n=0; n < inputArrayTransformed.length; n++) { inputArrayTransformed[n] = (inputArrayTransformed[n]-mean)/stdev; } // Read Input Data into Array // Read Input Data into Array double[][] input2DArray = new double[nRows][nCols]; input2DArray = VectorToMatrix(inputArrayTransformed, nRows, nCols); RPCA rSVD = new RPCA(input2DArray, this.lpenalty, this.spenalty); double[][] outputE = rSVD.getE().getData(); double[][] outputS = rSVD.getS().getData(); double[][] outputL = rSVD.getL().getData(); return outputS[nRows-1][0]; } else { return Double.NaN; } } @Override public Outlier analyze(Outlier outlierCandidate, List<DataPoint> context, DataPoint dp) { double score = outlierScore(context, dp); Severity severity = Severity.NOT_ENOUGH_DATA; if(!Double.isNaN(score)) { severity = Math.abs(score) > threshold?Severity.SEVERE_OUTLIER:Severity.NORMAL; } outlierCandidate.setSeverity(severity); if(severity == Severity.SEVERE_OUTLIER) { if(dp.getMetadata() == null) { dp.setMetadata(new HashMap<String, String>()); } dp.getMetadata().put(OutlierMetadataConstants.REAL_OUTLIER_SCORE.toString(), Math.abs(score) + ""); } return outlierCandidate; } @Override public void configure(OutlierConfig config) { { Object thresholdObj = config.getConfig().get(THRESHOLD_CONF); if(thresholdObj != null) { threshold = ConfigUtil.INSTANCE.coerceDouble(THRESHOLD_CONF, thresholdObj); } } { Object lPenaltyObj = config.getConfig().get(LPENALTY_CONFIG); if (lPenaltyObj != null) { withLPenalty(ConfigUtil.INSTANCE.coerceDouble(LPENALTY_CONFIG, lPenaltyObj)); } } { Object sPenaltyObj = config.getConfig().get(SPENALTY_CONFIG); if (sPenaltyObj != null) { withSPenalty(ConfigUtil.INSTANCE.coerceDouble(SPENALTY_CONFIG, sPenaltyObj)); } } { Object forceDiffObj= config.getConfig().get(FORCE_DIFF_CONFIG); if (forceDiffObj != null) { withForceDiff(ConfigUtil.INSTANCE.coerceBoolean(FORCE_DIFF_CONFIG, forceDiffObj)); } } { Object minRecordsObj = config.getConfig().get(MIN_RECORDS_CONFIG); if (minRecordsObj!= null) { withMinRecords(ConfigUtil.INSTANCE.coerceInteger(MIN_RECORDS_CONFIG, minRecordsObj)); } } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.plugins; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import org.apache.lucene.analysis.util.CharFilterFactory; import org.apache.lucene.analysis.util.TokenFilterFactory; import org.apache.lucene.analysis.util.TokenizerFactory; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.bootstrap.JarHell; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.LifecycleComponent; import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.threadpool.ExecutorBuilder; import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory; /** * */ public class PluginsService extends AbstractComponent { /** * We keep around a list of plugins and modules */ private final List<Tuple<PluginInfo, Plugin>> plugins; private final PluginsAndModules info; public static final Setting<List<String>> MANDATORY_SETTING = Setting.listSetting("plugin.mandatory", Collections.emptyList(), Function.identity(), Property.NodeScope); private final Map<Plugin, List<OnModuleReference>> onModuleReferences; public List<Setting<?>> getPluginSettings() { return plugins.stream().flatMap(p -> p.v2().getSettings().stream()).collect(Collectors.toList()); } public List<String> getPluginSettingsFilter() { return plugins.stream().flatMap(p -> p.v2().getSettingsFilter().stream()).collect(Collectors.toList()); } static class OnModuleReference { public final Class<? extends Module> moduleClass; public final Method onModuleMethod; OnModuleReference(Class<? extends Module> moduleClass, Method onModuleMethod) { this.moduleClass = moduleClass; this.onModuleMethod = onModuleMethod; } } /** * Constructs a new PluginService * @param settings The settings of the system * @param modulesDirectory The directory modules exist in, or null if modules should not be loaded from the filesystem * @param pluginsDirectory The directory plugins exist in, or null if plugins should not be loaded from the filesystem * @param classpathPlugins Plugins that exist in the classpath which should be loaded */ public PluginsService(Settings settings, Path modulesDirectory, Path pluginsDirectory, Collection<Class<? extends Plugin>> classpathPlugins) { super(settings); info = new PluginsAndModules(); List<Tuple<PluginInfo, Plugin>> pluginsLoaded = new ArrayList<>(); // first we load plugins that are on the classpath. this is for tests and transport clients for (Class<? extends Plugin> pluginClass : classpathPlugins) { Plugin plugin = loadPlugin(pluginClass, settings); PluginInfo pluginInfo = new PluginInfo(pluginClass.getName(), "classpath plugin", "NA", pluginClass.getName()); if (logger.isTraceEnabled()) { logger.trace("plugin loaded from classpath [{}]", pluginInfo); } pluginsLoaded.add(new Tuple<>(pluginInfo, plugin)); info.addPlugin(pluginInfo); } // load modules if (modulesDirectory != null) { try { List<Bundle> bundles = getModuleBundles(modulesDirectory); List<Tuple<PluginInfo, Plugin>> loaded = loadBundles(bundles); pluginsLoaded.addAll(loaded); for (Tuple<PluginInfo, Plugin> module : loaded) { info.addModule(module.v1()); } } catch (IOException ex) { throw new IllegalStateException("Unable to initialize modules", ex); } } // now, find all the ones that are in plugins/ if (pluginsDirectory != null) { try { List<Bundle> bundles = getPluginBundles(pluginsDirectory); List<Tuple<PluginInfo, Plugin>> loaded = loadBundles(bundles); pluginsLoaded.addAll(loaded); for (Tuple<PluginInfo, Plugin> plugin : loaded) { info.addPlugin(plugin.v1()); } } catch (IOException ex) { throw new IllegalStateException("Unable to initialize plugins", ex); } } plugins = Collections.unmodifiableList(pluginsLoaded); // We need to build a List of plugins for checking mandatory plugins Set<String> pluginsNames = new HashSet<>(); for (Tuple<PluginInfo, Plugin> tuple : plugins) { pluginsNames.add(tuple.v1().getName()); } // Checking expected plugins List<String> mandatoryPlugins = MANDATORY_SETTING.get(settings); if (mandatoryPlugins.isEmpty() == false) { Set<String> missingPlugins = new HashSet<>(); for (String mandatoryPlugin : mandatoryPlugins) { if (!pluginsNames.contains(mandatoryPlugin) && !missingPlugins.contains(mandatoryPlugin)) { missingPlugins.add(mandatoryPlugin); } } if (!missingPlugins.isEmpty()) { throw new ElasticsearchException("Missing mandatory plugins [" + Strings.collectionToDelimitedString(missingPlugins, ", ") + "]"); } } // we don't log jars in lib/ we really shouldn't log modules, // but for now: just be transparent so we can debug any potential issues logPluginInfo(info.getModuleInfos(), "module", logger); logPluginInfo(info.getPluginInfos(), "plugin", logger); Map<Plugin, List<OnModuleReference>> onModuleReferences = new HashMap<>(); for (Tuple<PluginInfo, Plugin> pluginEntry : plugins) { Plugin plugin = pluginEntry.v2(); List<OnModuleReference> list = new ArrayList<>(); for (Method method : plugin.getClass().getMethods()) { if (!method.getName().equals("onModule")) { continue; } // this is a deprecated final method, so all Plugin subclasses have it if (method.getParameterTypes().length == 1 && method.getParameterTypes()[0].equals(IndexModule.class)) { continue; } if (method.getParameterTypes().length == 0 || method.getParameterTypes().length > 1) { logger.warn("Plugin: {} implementing onModule with no parameters or more than one parameter", pluginEntry.v1().getName()); continue; } Class moduleClass = method.getParameterTypes()[0]; if (!Module.class.isAssignableFrom(moduleClass)) { if (method.getDeclaringClass() == Plugin.class) { // These are still part of the Plugin class to point the user to the new implementations continue; } throw new RuntimeException( "Plugin: [" + pluginEntry.v1().getName() + "] implements onModule taking a parameter that isn't a Module [" + moduleClass.getSimpleName() + "]"); } list.add(new OnModuleReference(moduleClass, method)); } if (!list.isEmpty()) { onModuleReferences.put(plugin, list); } } this.onModuleReferences = Collections.unmodifiableMap(onModuleReferences); } private static void logPluginInfo(final List<PluginInfo> pluginInfos, final String type, final ESLogger logger) { assert pluginInfos != null; if (pluginInfos.isEmpty()) { logger.info("no " + type + "s loaded"); } else { for (final String name : pluginInfos.stream().map(PluginInfo::getName).sorted().collect(Collectors.toList())) { logger.info("loaded " + type + " [" + name + "]"); } } } private List<Tuple<PluginInfo, Plugin>> plugins() { return plugins; } public void processModules(Iterable<Module> modules) { for (Module module : modules) { processModule(module); } } public void processModule(Module module) { for (Tuple<PluginInfo, Plugin> plugin : plugins()) { // see if there are onModule references List<OnModuleReference> references = onModuleReferences.get(plugin.v2()); if (references != null) { for (OnModuleReference reference : references) { if (reference.moduleClass.isAssignableFrom(module.getClass())) { try { reference.onModuleMethod.invoke(plugin.v2(), module); } catch (IllegalAccessException | InvocationTargetException e) { logger.warn("plugin {}, failed to invoke custom onModule method", e, plugin.v1().getName()); throw new ElasticsearchException("failed to invoke onModule", e); } catch (Exception e) { logger.warn("plugin {}, failed to invoke custom onModule method", e, plugin.v1().getName()); throw e; } } } } } } public Settings updatedSettings() { Map<String, String> foundSettings = new HashMap<>(); final Settings.Builder builder = Settings.builder(); for (Tuple<PluginInfo, Plugin> plugin : plugins) { Settings settings = plugin.v2().additionalSettings(); for (String setting : settings.getAsMap().keySet()) { String oldPlugin = foundSettings.put(setting, plugin.v1().getName()); if (oldPlugin != null) { throw new IllegalArgumentException("Cannot have additional setting [" + setting + "] " + "in plugin [" + plugin.v1().getName() + "], already added in plugin [" + oldPlugin + "]"); } } builder.put(settings); } return builder.put(this.settings).build(); } public Collection<Module> createGuiceModules() { List<Module> modules = new ArrayList<>(); for (Tuple<PluginInfo, Plugin> plugin : plugins) { modules.addAll(plugin.v2().createGuiceModules()); } return modules; } public List<ExecutorBuilder<?>> getExecutorBuilders(Settings settings) { final ArrayList<ExecutorBuilder<?>> builders = new ArrayList<>(); for (final Tuple<PluginInfo, Plugin> plugin : plugins) { builders.addAll(plugin.v2().getExecutorBuilders(settings)); } return builders; } /** Returns all classes injected into guice by plugins which extend {@link LifecycleComponent}. */ public Collection<Class<? extends LifecycleComponent>> getGuiceServiceClasses() { List<Class<? extends LifecycleComponent>> services = new ArrayList<>(); for (Tuple<PluginInfo, Plugin> plugin : plugins) { services.addAll(plugin.v2().getGuiceServiceClasses()); } return services; } public void onIndexModule(IndexModule indexModule) { for (Tuple<PluginInfo, Plugin> plugin : plugins) { plugin.v2().onIndexModule(indexModule); } } /** * Get information about plugins and modules */ public PluginsAndModules info() { return info; } // a "bundle" is a group of plugins in a single classloader // really should be 1-1, but we are not so fortunate static class Bundle { List<PluginInfo> plugins = new ArrayList<>(); List<URL> urls = new ArrayList<>(); } // similar in impl to getPluginBundles, but DO NOT try to make them share code. // we don't need to inherit all the leniency, and things are different enough. static List<Bundle> getModuleBundles(Path modulesDirectory) throws IOException { // damn leniency if (Files.notExists(modulesDirectory)) { return Collections.emptyList(); } List<Bundle> bundles = new ArrayList<>(); try (DirectoryStream<Path> stream = Files.newDirectoryStream(modulesDirectory)) { for (Path module : stream) { if (FileSystemUtils.isHidden(module)) { continue; // skip over .DS_Store etc } PluginInfo info = PluginInfo.readFromProperties(module); Bundle bundle = new Bundle(); bundle.plugins.add(info); // gather urls for jar files try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(module, "*.jar")) { for (Path jar : jarStream) { // normalize with toRealPath to get symlinks out of our hair bundle.urls.add(jar.toRealPath().toUri().toURL()); } } bundles.add(bundle); } } return bundles; } static List<Bundle> getPluginBundles(Path pluginsDirectory) throws IOException { ESLogger logger = Loggers.getLogger(PluginsService.class); // TODO: remove this leniency, but tests bogusly rely on it if (!isAccessibleDirectory(pluginsDirectory, logger)) { return Collections.emptyList(); } List<Bundle> bundles = new ArrayList<>(); try (DirectoryStream<Path> stream = Files.newDirectoryStream(pluginsDirectory)) { for (Path plugin : stream) { if (FileSystemUtils.isHidden(plugin)) { logger.trace("--- skip hidden plugin file[{}]", plugin.toAbsolutePath()); continue; } logger.trace("--- adding plugin [{}]", plugin.toAbsolutePath()); final PluginInfo info; try { info = PluginInfo.readFromProperties(plugin); } catch (IOException e) { throw new IllegalStateException("Could not load plugin descriptor for existing plugin [" + plugin.getFileName() + "]. Was the plugin built before 2.0?", e); } List<URL> urls = new ArrayList<>(); try (DirectoryStream<Path> jarStream = Files.newDirectoryStream(plugin, "*.jar")) { for (Path jar : jarStream) { // normalize with toRealPath to get symlinks out of our hair urls.add(jar.toRealPath().toUri().toURL()); } } final Bundle bundle = new Bundle(); bundles.add(bundle); bundle.plugins.add(info); bundle.urls.addAll(urls); } } return bundles; } private List<Tuple<PluginInfo,Plugin>> loadBundles(List<Bundle> bundles) { List<Tuple<PluginInfo, Plugin>> plugins = new ArrayList<>(); for (Bundle bundle : bundles) { // jar-hell check the bundle against the parent classloader // pluginmanager does it, but we do it again, in case lusers mess with jar files manually try { final List<URL> jars = new ArrayList<>(); jars.addAll(Arrays.asList(JarHell.parseClassPath())); jars.addAll(bundle.urls); JarHell.checkJarHell(jars.toArray(new URL[0])); } catch (Exception e) { throw new IllegalStateException("failed to load bundle " + bundle.urls + " due to jar hell", e); } // create a child to load the plugins in this bundle ClassLoader loader = URLClassLoader.newInstance(bundle.urls.toArray(new URL[0]), getClass().getClassLoader()); for (PluginInfo pluginInfo : bundle.plugins) { // reload lucene SPI with any new services from the plugin reloadLuceneSPI(loader); final Class<? extends Plugin> pluginClass = loadPluginClass(pluginInfo.getClassname(), loader); final Plugin plugin = loadPlugin(pluginClass, settings); plugins.add(new Tuple<>(pluginInfo, plugin)); } } return Collections.unmodifiableList(plugins); } /** * Reloads all Lucene SPI implementations using the new classloader. * This method must be called after the new classloader has been created to * register the services for use. */ static void reloadLuceneSPI(ClassLoader loader) { // do NOT change the order of these method calls! // Codecs: PostingsFormat.reloadPostingsFormats(loader); DocValuesFormat.reloadDocValuesFormats(loader); Codec.reloadCodecs(loader); // Analysis: CharFilterFactory.reloadCharFilters(loader); TokenFilterFactory.reloadTokenFilters(loader); TokenizerFactory.reloadTokenizers(loader); } private Class<? extends Plugin> loadPluginClass(String className, ClassLoader loader) { try { return loader.loadClass(className).asSubclass(Plugin.class); } catch (ClassNotFoundException e) { throw new ElasticsearchException("Could not find plugin class [" + className + "]", e); } } private Plugin loadPlugin(Class<? extends Plugin> pluginClass, Settings settings) { try { try { return pluginClass.getConstructor(Settings.class).newInstance(settings); } catch (NoSuchMethodException e) { try { return pluginClass.getConstructor().newInstance(); } catch (NoSuchMethodException e1) { throw new ElasticsearchException("No constructor for [" + pluginClass + "]. A plugin class must " + "have either an empty default constructor or a single argument constructor accepting a " + "Settings instance"); } } } catch (Exception e) { throw new ElasticsearchException("Failed to load plugin class [" + pluginClass.getName() + "]", e); } } public <T> List<T> filterPlugins(Class<T> type) { return plugins.stream().filter(x -> type.isAssignableFrom(x.v2().getClass())) .map(p -> ((T)p.v2())).collect(Collectors.toList()); } }
/** * $Revision: $ * $Date: $ * * Copyright (C) 2005-2008 Jive Software. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.openfire.group; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.jivesoftware.database.DbConnectionManager; import org.jivesoftware.openfire.XMPPServer; import org.jivesoftware.util.JiveGlobals; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xmpp.packet.JID; /** * The JDBC group provider allows you to use an external database to define the make up of groups. * It is best used with the JDBCAuthProvider to provide integration between your external system and * Openfire. All data is treated as read-only so any set operations will result in an exception. * * To enable this provider, set the following in the system properties: * * <ul> * <li><tt>provider.group.className = org.jivesoftware.openfire.group.JDBCGroupProvider</tt></li> * </ul> * * Then you need to set your driver, connection string and SQL statements: * * <ul> * <li><tt>jdbcProvider.driver = com.mysql.jdbc.Driver</tt></li> * <li><tt>jdbcProvider.connectionString = jdbc:mysql://localhost/dbname?user=username&amp;password=secret</tt></li> * <li><tt>jdbcGroupProvider.groupCountSQL = SELECT count(*) FROM myGroups</tt></li> * <li><tt>jdbcGroupProvider.allGroupsSQL = SELECT groupName FROM myGroups</tt></li> * <li><tt>jdbcGroupProvider.userGroupsSQL = SELECT groupName FORM myGroupUsers WHERE username=?</tt></li> * <li><tt>jdbcGroupProvider.descriptionSQL = SELECT groupDescription FROM myGroups WHERE groupName=?</tt></li> * <li><tt>jdbcGroupProvider.loadMembersSQL = SELECT username FORM myGroupUsers WHERE groupName=? AND isAdmin='N'</tt></li> * <li><tt>jdbcGroupProvider.loadAdminsSQL = SELECT username FORM myGroupUsers WHERE groupName=? AND isAdmin='Y'</tt></li> * </ul> * * In order to use the configured JDBC connection provider do not use a JDBC * connection string, set the following property * * <ul> * <li><tt>jdbcGroupProvider.useConnectionProvider = true</tt></li> * </ul> * * @author David Snopek */ public class JDBCGroupProvider extends AbstractGroupProvider { private static final Logger Log = LoggerFactory.getLogger(JDBCGroupProvider.class); private String connectionString; private String groupCountSQL; private String descriptionSQL; private String allGroupsSQL; private String userGroupsSQL; private String loadMembersSQL; private String loadAdminsSQL; private boolean useConnectionProvider; private XMPPServer server = XMPPServer.getInstance(); /** * Constructor of the JDBCGroupProvider class. */ public JDBCGroupProvider() { // Convert XML based provider setup to Database based JiveGlobals.migrateProperty("jdbcProvider.driver"); JiveGlobals.migrateProperty("jdbcProvider.connectionString"); JiveGlobals.migrateProperty("jdbcGroupProvider.groupCountSQL"); JiveGlobals.migrateProperty("jdbcGroupProvider.allGroupsSQL"); JiveGlobals.migrateProperty("jdbcGroupProvider.userGroupsSQL"); JiveGlobals.migrateProperty("jdbcGroupProvider.descriptionSQL"); JiveGlobals.migrateProperty("jdbcGroupProvider.loadMembersSQL"); JiveGlobals.migrateProperty("jdbcGroupProvider.loadAdminsSQL"); useConnectionProvider = JiveGlobals.getBooleanProperty("jdbcGroupProvider.useConnectionProvider"); if (!useConnectionProvider) { // Load the JDBC driver and connection string. String jdbcDriver = JiveGlobals.getProperty("jdbcProvider.driver"); try { Class.forName(jdbcDriver).newInstance(); } catch (Exception e) { Log.error("Unable to load JDBC driver: " + jdbcDriver, e); return; } connectionString = JiveGlobals.getProperty("jdbcProvider.connectionString"); } // Load SQL statements groupCountSQL = JiveGlobals.getProperty("jdbcGroupProvider.groupCountSQL"); allGroupsSQL = JiveGlobals.getProperty("jdbcGroupProvider.allGroupsSQL"); userGroupsSQL = JiveGlobals.getProperty("jdbcGroupProvider.userGroupsSQL"); descriptionSQL = JiveGlobals.getProperty("jdbcGroupProvider.descriptionSQL"); loadMembersSQL = JiveGlobals.getProperty("jdbcGroupProvider.loadMembersSQL"); loadAdminsSQL = JiveGlobals.getProperty("jdbcGroupProvider.loadAdminsSQL"); } private Connection getConnection() throws SQLException { if (useConnectionProvider) return DbConnectionManager.getConnection(); return DriverManager.getConnection(connectionString); } public Group getGroup(String name) throws GroupNotFoundException { String description = null; Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { con = getConnection(); pstmt = con.prepareStatement(descriptionSQL); pstmt.setString(1, name); rs = pstmt.executeQuery(); if (!rs.next()) { throw new GroupNotFoundException("Group with name " + name + " not found."); } description = rs.getString(1); } catch (SQLException e) { Log.error(e.getMessage(), e); } finally { DbConnectionManager.closeConnection(rs, pstmt, con); } Collection<JID> members = getMembers(name, false); Collection<JID> administrators = getMembers(name, true); return new Group(name, description, members, administrators); } private Collection<JID> getMembers(String groupName, boolean adminsOnly) { List<JID> members = new ArrayList<JID>(); Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { con = getConnection(); if (adminsOnly) { if (loadAdminsSQL == null) { return members; } pstmt = con.prepareStatement(loadAdminsSQL); } else { pstmt = con.prepareStatement(loadMembersSQL); } pstmt.setString(1, groupName); rs = pstmt.executeQuery(); while (rs.next()) { String user = rs.getString(1); if (user != null) { JID userJID; if (user.contains("@")) { userJID = new JID(user); } else { userJID = server.createJID(user, null); } members.add(userJID); } } } catch (SQLException e) { Log.error(e.getMessage(), e); } finally { DbConnectionManager.closeConnection(rs, pstmt, con); } return members; } public int getGroupCount() { int count = 0; Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { con = getConnection(); pstmt = con.prepareStatement(groupCountSQL); rs = pstmt.executeQuery(); if (rs.next()) { count = rs.getInt(1); } } catch (SQLException e) { Log.error(e.getMessage(), e); } finally { DbConnectionManager.closeConnection(rs, pstmt, con); } return count; } public Collection<String> getGroupNames() { List<String> groupNames = new ArrayList<String>(); Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { con = getConnection(); pstmt = con.prepareStatement(allGroupsSQL); rs = pstmt.executeQuery(); while (rs.next()) { groupNames.add(rs.getString(1)); } } catch (SQLException e) { Log.error(e.getMessage(), e); } finally { DbConnectionManager.closeConnection(rs, pstmt, con); } return groupNames; } public Collection<String> getGroupNames(int start, int num) { List<String> groupNames = new ArrayList<String>(); Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { con = getConnection(); pstmt = DbConnectionManager.createScrollablePreparedStatement(con, allGroupsSQL); rs = pstmt.executeQuery(); DbConnectionManager.scrollResultSet(rs, start); int count = 0; while (rs.next() && count < num) { groupNames.add(rs.getString(1)); count++; } } catch (SQLException e) { Log.error(e.getMessage(), e); } finally { DbConnectionManager.closeConnection(rs, pstmt, con); } return groupNames; } public Collection<String> getGroupNames(JID user) { List<String> groupNames = new ArrayList<String>(); Connection con = null; PreparedStatement pstmt = null; ResultSet rs = null; try { con = getConnection(); pstmt = con.prepareStatement(userGroupsSQL); pstmt.setString(1, server.isLocal(user) ? user.getNode() : user.toString()); rs = pstmt.executeQuery(); while (rs.next()) { groupNames.add(rs.getString(1)); } } catch (SQLException e) { Log.error(e.getMessage(), e); } finally { DbConnectionManager.closeConnection(rs, pstmt, con); } return groupNames; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.logging.tests.java.util.logging; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.PrintStream; import java.net.ServerSocket; import java.net.Socket; import java.security.Permission; import java.util.Properties; import java.util.logging.Filter; import java.util.logging.Formatter; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.LogManager; import java.util.logging.LogRecord; import java.util.logging.LoggingPermission; import java.util.logging.SocketHandler; import java.util.logging.XMLFormatter; import junit.framework.TestCase; import org.apache.harmony.logging.tests.java.util.logging.HandlerTest.NullOutputStream; import org.apache.harmony.logging.tests.java.util.logging.util.EnvironmentHelper; import tests.util.CallVerificationStack; /** * Test class java.util.logging.ConsoleHandler */ public class SocketHandlerTest extends TestCase { private static final LogManager LOG_MANAGER = LogManager.getLogManager(); private final static String INVALID_LEVEL = "impossible_level"; private final PrintStream err = System.err; private OutputStream errSubstituteStream = null; private static String className = SocketHandlerTest.class.getName(); private SocketHandler h = null; private Properties props; /* * @see TestCase#setUp() */ protected void setUp() throws Exception { super.setUp(); errSubstituteStream = new NullOutputStream(); System.setErr(new PrintStream(errSubstituteStream)); } /* * @see TestCase#tearDown() */ protected void tearDown() throws Exception { initProps(); LOG_MANAGER.reset(); LOG_MANAGER.readConfiguration(EnvironmentHelper .PropertiesToInputStream(props)); CallVerificationStack.getInstance().clear(); if (null != h) { h.close(); h = null; } System.setErr(err); super.tearDown(); } private void initProps() throws Exception { props = new Properties(); props.put("handlers", className + "$MockHandler " + className + "$MockHandler"); props.put("java.util.logging.FileHandler.pattern", "%h/java%u.log"); props.put("java.util.logging.FileHandler.limit", "50000"); props.put("java.util.logging.FileHandler.count", "5"); props.put("java.util.logging.FileHandler.formatter", "java.util.logging.XMLFormatter"); props.put(".level", "FINE"); props.put("java.util.logging.ConsoleHandler.level", "OFF"); props.put("java.util.logging.ConsoleHandler.formatter", "java.util.logging.SimpleFormatter"); props.put("foo.handlers", "java.util.logging.ConsoleHandler"); props.put("foo.level", "WARNING"); props.put("com.xyz.foo.level", "SEVERE"); } /* * Test the constructor with no relevant log manager properties are set. */ public void testConstructor_NoProperties() throws Exception { assertNull(LOG_MANAGER.getProperty( "java.util.logging.SocketHandler.level")); assertNull(LOG_MANAGER.getProperty( "java.util.logging.SocketHandler.filter")); assertNull(LOG_MANAGER.getProperty( "java.util.logging.SocketHandler.formatter")); assertNull(LOG_MANAGER.getProperty( "java.util.logging.SocketHandler.encoding")); assertNull(LOG_MANAGER.getProperty( "java.util.logging.SocketHandler.host")); assertNull(LOG_MANAGER.getProperty( "java.util.logging.SocketHandler.port")); try { h = new SocketHandler(); fail("Should throw IllegalArgumentException!"); } catch (IllegalArgumentException e) { } try { h = new SocketHandler(null, 0); fail("Should throw IllegalArgumentException!"); } catch (IllegalArgumentException e) { } try { h = new SocketHandler("", 0); fail("Should throw IllegalArgumentException!"); } catch (IllegalArgumentException e) { } try { h = new SocketHandler("127.0.0.1", -1); fail("Should throw IllegalArgumentException!"); } catch (IllegalArgumentException e) { } try { h = new SocketHandler("127.0.0.1", Integer.MAX_VALUE); fail("Should throw IllegalArgumentException!"); } catch (IllegalArgumentException e) { } try { h = new SocketHandler("127.0.0.1", 66666); fail("Should throw IllegalArgumentException!"); } catch (IllegalArgumentException e) { } try { h = new SocketHandler("127.0.0.1", 0); fail("Should throw IllegalArgumentException!"); } catch (IllegalArgumentException e) { } // start the server to be ready to accept log messages ServerThread thread = new ServerThread(); thread.start(); Thread.sleep(2000); h = new SocketHandler("127.0.0.1", 6666); assertSame(h.getLevel(), Level.ALL); assertTrue(h.getFormatter() instanceof XMLFormatter); assertNull(h.getFilter()); assertNull(h.getEncoding()); h.close(); // ensure the thread exits and the port becomes available again thread.getReadString(); } /* * Test the constructor with no relevant log manager properties are set * except host and port. */ public void testConstructor_NoBasicProperties() throws Exception { assertNull(LOG_MANAGER.getProperty( "java.util.logging.SocketHandler.level")); assertNull(LOG_MANAGER.getProperty( "java.util.logging.SocketHandler.filter")); assertNull(LOG_MANAGER.getProperty( "java.util.logging.SocketHandler.formatter")); assertNull(LOG_MANAGER.getProperty( "java.util.logging.SocketHandler.encoding")); Properties p = new Properties(); p.put("java.util.logging.SocketHandler.host", "127.0.0.1"); p.put("java.util.logging.SocketHandler.port", "6666"); LOG_MANAGER.readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); // start the server to be ready to accept log messages ServerThread thread = new ServerThread(); thread.start(); Thread.sleep(2000); h = new SocketHandler(); assertSame(h.getLevel(), Level.ALL); assertTrue(h.getFormatter() instanceof XMLFormatter); assertNull(h.getFilter()); assertNull(h.getEncoding()); h.close(); // ensure the thread exits and the port becomes available again thread.getReadString(); try { h = new SocketHandler("127.0.sdfcdsfsa%%&&^0.1", 6665); fail("Should throw IOException!"); } catch (IOException e) { } } /* * Test the constructor with insufficient privilege for connection. */ public void testConstructor_InsufficientPrivilege() throws Exception { SecurityManager oldMan = null; Properties p = new Properties(); p.put("java.util.logging.SocketHandler.level", "FINE"); p.put("java.util.logging.SocketHandler.filter", className + "$MockFilter"); p.put("java.util.logging.SocketHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.SocketHandler.encoding", "utf-8"); p.put("java.util.logging.SocketHandler.host", "127.0.0.1"); p.put("java.util.logging.SocketHandler.port", "6666"); LOG_MANAGER.readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); oldMan = System.getSecurityManager(); System.setSecurityManager(new MockNoSocketSecurityManager()); try { new SocketHandler(); fail("Should throw SecurityException!"); } catch (SecurityException e) { } finally { System.setSecurityManager(oldMan); } System.setSecurityManager(new MockNoSocketSecurityManager()); try { new SocketHandler("127.0.0.1", 6666); fail("Should throw SecurityException!"); } catch (SecurityException e) { } finally { System.setSecurityManager(oldMan); } } /* * Test the constructor with valid relevant log manager properties are set. */ public void testConstructor_ValidProperties() throws Exception { Properties p = new Properties(); p.put("java.util.logging.SocketHandler.level", "FINE"); p.put("java.util.logging.SocketHandler.filter", className + "$MockFilter"); p.put("java.util.logging.SocketHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.SocketHandler.encoding", "iso-8859-1"); p.put("java.util.logging.SocketHandler.host", "127.0.0.1"); p.put("java.util.logging.SocketHandler.port", "6666"); LOG_MANAGER.readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); // start the server to be ready to accept log messages ServerThread thread = new ServerThread(); thread.start(); Thread.sleep(2000); h = new SocketHandler(); assertSame(h.getLevel(), Level.parse("FINE")); assertTrue(h.getFormatter() instanceof MockFormatter); assertTrue(h.getFilter() instanceof MockFilter); assertEquals(h.getEncoding(), "iso-8859-1"); h.close(); // ensure the thread exits and the port becomes available again thread.getReadString(); // start the server to be ready to accept log messages thread = new ServerThread(); thread.start(); Thread.sleep(2000); h = new SocketHandler("127.0.0.1", 6666); assertSame(h.getLevel(), Level.parse("FINE")); assertTrue(h.getFormatter() instanceof MockFormatter); assertTrue(h.getFilter() instanceof MockFilter); assertEquals(h.getEncoding(), "iso-8859-1"); h.close(); // ensure the thread exits and the port becomes available again thread.getReadString(); } /* * Test the constructor with invalid relevant log manager properties are set * except host and port. */ public void testConstructor_InvalidBasicProperties() throws Exception { Properties p = new Properties(); p.put("java.util.logging.SocketHandler.level", INVALID_LEVEL); p.put("java.util.logging.SocketHandler.filter", className + ""); p.put("java.util.logging.SocketHandler.formatter", className + ""); p.put("java.util.logging.SocketHandler.encoding", "XXXX"); p.put("java.util.logging.SocketHandler.host", "127.0.0.1"); p.put("java.util.logging.SocketHandler.port", "6666"); LOG_MANAGER.readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); // start the server to be ready to accept log messages ServerThread thread = new ServerThread(); thread.start(); Thread.sleep(2000); h = new SocketHandler(); assertSame(h.getLevel(), Level.ALL); assertTrue(h.getFormatter() instanceof XMLFormatter); assertNull(h.getFilter()); assertNull(h.getEncoding()); h.publish(new LogRecord(Level.SEVERE, "test")); assertNull(h.getEncoding()); h.close(); // ensure the thread exits and the port becomes available again thread.getReadString(); // start the server to be ready to accept log messages thread = new ServerThread(); thread.start(); Thread.sleep(2000); h = new SocketHandler("127.0.0.1", 6666); assertSame(h.getLevel(), Level.ALL); assertTrue(h.getFormatter() instanceof XMLFormatter); assertNull(h.getFilter()); assertNull(h.getEncoding()); h.publish(new LogRecord(Level.SEVERE, "test")); assertNull(h.getEncoding()); h.close(); // ensure the thread exits and the port becomes available again thread.getReadString(); } /* * Test the constructor with valid relevant log manager properties are set * except port. */ public void testConstructor_InvalidPort() throws Exception { Properties p = new Properties(); p.put("java.util.logging.SocketHandler.level", "FINE"); p.put("java.util.logging.SocketHandler.filter", className + "$MockFilter"); p.put("java.util.logging.SocketHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.SocketHandler.encoding", "iso-8859-1"); p.put("java.util.logging.SocketHandler.host", "127.0.0.1"); p.put("java.util.logging.SocketHandler.port", "6666i"); LOG_MANAGER.readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); try { h = new SocketHandler(); fail("Should throw IllegalArgumentException!"); } catch (IllegalArgumentException e) { } } /* * Test the constructor with valid relevant log manager properties are set, * but the port is not open. */ public void testConstructor_NotOpenPort() throws Exception { Properties p = new Properties(); p.put("java.util.logging.SocketHandler.level", "FINE"); p.put("java.util.logging.SocketHandler.filter", className + "$MockFilter"); p.put("java.util.logging.SocketHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.SocketHandler.encoding", "iso-8859-1"); p.put("java.util.logging.SocketHandler.host", "127.0.0.1"); p.put("java.util.logging.SocketHandler.port", "6665"); LOG_MANAGER.readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); try { h = new SocketHandler(); fail("Should throw IOException!"); } catch (IOException e) { } try { h = new SocketHandler("127.0.0.1", 6665); fail("Should throw IOException!"); } catch (IOException e) { } } /* * Test the constructor with valid relevant log manager properties are set * except port. */ public void testConstructor_InvalidHost() throws Exception { Properties p = new Properties(); p.put("java.util.logging.SocketHandler.level", "FINE"); p.put("java.util.logging.SocketHandler.filter", className + "$MockFilter"); p.put("java.util.logging.SocketHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.SocketHandler.encoding", "iso-8859-1"); p.put("java.util.logging.SocketHandler.host", " 34345 #$#%$%$"); p.put("java.util.logging.SocketHandler.port", "6666"); LOG_MANAGER.readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); try { h = new SocketHandler(); fail("Should throw IOException!"); } catch (IOException e) { } try { h = new SocketHandler(" 34345 #$#%$%$", 6666); fail("Should throw IOException!"); } catch (IOException e) { } } /* * Test close() when having sufficient privilege, and a record has been * written to the output stream. */ public void testClose_SufficientPrivilege_NormalClose() throws Exception { Properties p = new Properties(); p.put("java.util.logging.SocketHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.SocketHandler.host", "127.0.0.1"); p.put("java.util.logging.SocketHandler.port", "6666"); LOG_MANAGER.readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); // start the server to be ready to accept log messages ServerThread thread = new ServerThread(); thread.start(); Thread.sleep(2000); h = new SocketHandler(); h.publish(new LogRecord(Level.SEVERE, "testClose_SufficientPrivilege_NormalClose msg")); h.close(); assertEquals("MockFormatter_Head" + "testClose_SufficientPrivilege_NormalClose msg" + "MockFormatter_Tail", thread.getReadString()); h.close(); } /* * Test close() when having sufficient privilege, and no record has been * written to the output stream. */ public void testClose_SufficientPrivilege_DirectClose() throws Exception { Properties p = new Properties(); p.put("java.util.logging.SocketHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.SocketHandler.host", "127.0.0.1"); p.put("java.util.logging.SocketHandler.port", "6666"); LOG_MANAGER.readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); // start the server to be ready to accept log messages ServerThread thread = new ServerThread(); thread.start(); Thread.sleep(2000); h = new SocketHandler(); h.setLevel(Level.INFO); h.close(); assertEquals("MockFormatter_Head" + "MockFormatter_Tail", thread .getReadString()); } /* * Test close() when having insufficient privilege. */ public void testClose_InsufficientPrivilege() throws Exception { Properties p = new Properties(); p.put("java.util.logging.SocketHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.SocketHandler.host", "127.0.0.1"); p.put("java.util.logging.SocketHandler.port", "6666"); LOG_MANAGER.readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); // start the server to be ready to accept log messages ServerThread thread = new ServerThread(); thread.start(); Thread.sleep(2000); h = new SocketHandler(); h.setLevel(Level.INFO); SecurityManager oldMan = System.getSecurityManager(); System.setSecurityManager(new MockSecurityManager()); try { h.close(); fail("Should throw SecurityException!"); } catch (SecurityException e) { } finally { System.setSecurityManager(oldMan); h.close(); // ensure the thread exits and the port becomes available again thread.getReadString(); } } /* * Test publish(), use no filter, having output stream, normal log record. */ public void testPublish_NoFilter() throws Exception { Properties p = new Properties(); p.put("java.util.logging.SocketHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.SocketHandler.host", "127.0.0.1"); p.put("java.util.logging.SocketHandler.port", "6666"); LOG_MANAGER.readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); // start the server to be ready to accept log messages ServerThread thread = new ServerThread(); thread.start(); Thread.sleep(2000); h = new SocketHandler(); h.setLevel(Level.INFO); LogRecord r = new LogRecord(Level.INFO, "testPublish_NoFilter"); h.setLevel(Level.INFO); h.publish(r); h.setLevel(Level.WARNING); h.publish(r); h.setLevel(Level.CONFIG); h.publish(r); r.setLevel(Level.OFF); h.setLevel(Level.OFF); h.publish(r); h.close(); assertEquals("MockFormatter_Head" + "testPublish_NoFilter" + "testPublish_NoFilter" + "MockFormatter_Tail", thread .getReadString()); } /* * Test publish(), use a filter, having output stream, normal log record. */ public void testPublish_WithFilter() throws Exception { Properties p = new Properties(); p.put("java.util.logging.SocketHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.SocketHandler.host", "127.0.0.1"); p.put("java.util.logging.SocketHandler.port", "6666"); LOG_MANAGER.readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); // start the server to be ready to accept log messages ServerThread thread = new ServerThread(); thread.start(); Thread.sleep(2000); h = new SocketHandler(); h.setLevel(Level.INFO); h.setFilter(new MockFilter()); System.setErr(new PrintStream(new ByteArrayOutputStream())); LogRecord r = new LogRecord(Level.INFO, "testPublish_WithFilter"); h.setLevel(Level.INFO); h.publish(r); h.close(); assertEquals("MockFormatter_Head" + "MockFormatter_Tail", thread .getReadString()); } /* * Test publish(), null log record, having output stream */ public void testPublish_Null() throws Exception { Properties p = new Properties(); p.put("java.util.logging.SocketHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.SocketHandler.host", "127.0.0.1"); p.put("java.util.logging.SocketHandler.port", "6666"); LOG_MANAGER.readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); // start the server to be ready to accept log messages ServerThread thread = new ServerThread(); thread.start(); Thread.sleep(2000); h = new SocketHandler(); h.setLevel(Level.INFO); try { h.publish(null); } finally { h.close(); // ensure the thread exits and the port becomes available again thread.getReadString(); } } /* * Test publish(), a log record with empty msg, having output stream */ public void testPublish_EmptyMsg() throws Exception { Properties p = new Properties(); p.put("java.util.logging.SocketHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.SocketHandler.host", "127.0.0.1"); p.put("java.util.logging.SocketHandler.port", "6666"); LOG_MANAGER.readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); // start the server to be ready to accept log messages ServerThread thread = new ServerThread(); thread.start(); Thread.sleep(2000); h = new SocketHandler(); h.setLevel(Level.INFO); LogRecord r = new LogRecord(Level.INFO, ""); h.publish(r); h.close(); assertEquals("MockFormatter_Head" + "MockFormatter_Tail", thread .getReadString()); } /* * Test publish(), a log record with null msg, having output stream */ public void testPublish_NullMsg() throws Exception { Properties p = new Properties(); p.put("java.util.logging.SocketHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.SocketHandler.host", "127.0.0.1"); p.put("java.util.logging.SocketHandler.port", "6666"); LOG_MANAGER.readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); // start the server to be ready to accept log messages ServerThread thread = new ServerThread(); thread.start(); Thread.sleep(2000); h = new SocketHandler(); h.setLevel(Level.INFO); LogRecord r = new LogRecord(Level.INFO, null); h.publish(r); h.close(); assertEquals("MockFormatter_Head" + "MockFormatter_Tail", thread .getReadString()); } /* * Test publish(), after close. */ public void testPublish_AfterClose() throws Exception { Properties p = new Properties(); p.put("java.util.logging.SocketHandler.formatter", className + "$MockFormatter"); p.put("java.util.logging.SocketHandler.host", "127.0.0.1"); p.put("java.util.logging.SocketHandler.port", "6666"); LOG_MANAGER.readConfiguration( EnvironmentHelper.PropertiesToInputStream(p)); // start the server to be ready to accept log messages ServerThread thread = new ServerThread(); thread.start(); Thread.sleep(2000); h = new SocketHandler(); h.setLevel(Level.FINE); assertSame(h.getLevel(), Level.FINE); LogRecord r = new LogRecord(Level.INFO, "testPublish_NoFormatter"); assertTrue(h.isLoggable(r)); h.close(); // ensure the thread exits and the port becomes available again thread.getReadString(); // assertFalse(h.isLoggable(r)); h.publish(r); h.flush(); // assertEquals("MockFormatter_Head", // this.errSubstituteStream.toString()); } /* * A mock filter, always return false. */ public static class MockFilter implements Filter { public boolean isLoggable(LogRecord record) { CallVerificationStack.getInstance().push(record); // System.out.println("filter called..."); return false; } } /* * A mock formatter. */ public static class MockFormatter extends Formatter { public String format(LogRecord r) { // System.out.println("formatter called..."); return super.formatMessage(r); } /* * (non-Javadoc) * * @see java.util.logging.Formatter#getHead(java.util.logging.Handler) */ public String getHead(Handler h) { return "MockFormatter_Head"; } /* * (non-Javadoc) * * @see java.util.logging.Formatter#getTail(java.util.logging.Handler) */ public String getTail(Handler h) { return "MockFormatter_Tail"; } } /* * Used to grant all permissions except logging control. */ public static class MockSecurityManager extends SecurityManager { public MockSecurityManager() { } public void checkPermission(Permission perm) { // grant all permissions except logging control if (perm instanceof LoggingPermission) { throw new SecurityException(); } } public void checkPermission(Permission perm, Object context) { // grant all permissions except logging control if (perm instanceof LoggingPermission) { throw new SecurityException(); } } } /* * Used to grant all permissions except logging control. */ public static class MockNoSocketSecurityManager extends SecurityManager { public MockNoSocketSecurityManager() { } public void checkPermission(Permission perm) { } public void checkPermission(Permission perm, Object context) { } public void checkConnect(String host, int port) { throw new SecurityException(); } } /* * A mock stream handler, expose setOutputStream. */ public static class MockSocketHandler extends SocketHandler { public MockSocketHandler() throws Exception { super(); } public void setOutputStream(OutputStream out) { super.setOutputStream(out); } public boolean isLoggable(LogRecord r) { CallVerificationStack.getInstance().push(r); return super.isLoggable(r); } } /* * A server thread that accepts an incoming connection request and reads any * incoming data into an byte array. */ public static class ServerThread extends Thread { private volatile StringBuffer sb = new StringBuffer(); private volatile boolean finished = false; public boolean finished() { return this.finished; } public String getReadString() throws Exception { int i = 0; while (!this.finished) { sleep(100); if (++i > 100) { // connect to port 6666 to stop the listening. try { Socket s = new Socket("127.0.0.1", 6666); OutputStream os = s.getOutputStream(); os.write(1); os.close(); s.close(); } catch (Exception e) { // ignore } } } return this.sb.toString(); } public void run() { ServerSocket ss = null; Socket s = null; InputStreamReader reader = null; try { char[] buffer = new char[32]; ss = new ServerSocket(6666); s = ss.accept(); reader = new InputStreamReader(s.getInputStream()); while (true) { int length = reader.read(buffer); if (-1 == length) { break; } this.sb.append(buffer, 0, length); } } catch (Exception e) { e.printStackTrace(System.err); } finally { try { if (null != reader) { reader.close(); s.close(); ss.close(); } } catch (Exception e) { e.printStackTrace(System.err); } this.finished = true; } } } }
/* * Copyright 2016 West Coast Informatics, LLC */ package com.wci.tt.mojo; import java.io.File; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Properties; import java.util.Set; import javax.persistence.Query; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import com.wci.tt.jpa.services.handlers.NdcSourceDataHandler; import com.wci.umls.server.SourceData; import com.wci.umls.server.SourceDataFile; import com.wci.umls.server.jpa.SourceDataFileJpa; import com.wci.umls.server.jpa.SourceDataJpa; import com.wci.umls.server.jpa.services.ContentServiceJpa; import com.wci.umls.server.jpa.services.SourceDataServiceJpa; import com.wci.umls.server.model.content.Atom; import com.wci.umls.server.model.content.Attribute; import com.wci.umls.server.services.RootService; import com.wci.umls.server.services.SourceDataService; import com.wci.umls.server.services.handlers.ExceptionHandler; /** * Used for data load to get NDC into a database and bound to a source data * object. * * See admin/pom.xml for a sample execution. * * @goal ndc-rxnorm-update * @phase package */ public class NdcUpdateSourceDataHandlerMojo extends SourceDataMojo { /** * Name of terminology to be loaded. * @parameter * @required */ private String terminology; /** * Input directory. * @parameter * @required */ private String inputDir; /** * Executes the plugin. * * @throws MojoExecutionException the mojo execution exception * @throws MojoFailureException the mojo failure exception */ @SuppressWarnings("unchecked") @Override public void execute() throws MojoExecutionException, MojoFailureException { getLog().info("Starting NDC update data load"); getLog().info(" terminology = " + terminology); getLog().info(" inputDir = " + inputDir); SourceDataService sdService = null; ContentServiceJpa contentService = null; try { sdService = new SourceDataServiceJpa(); contentService = new ContentServiceJpa(); // Check preconditions if (inputDir == null) { throw new Exception("Input directory not specified"); } final File dir = new File(inputDir); if (!dir.exists()) { throw new Exception("Input directory does not exist"); } if (!dir.isDirectory()) { throw new Exception("Input directory must be a directory"); } // // Identify the max version currently in the database and remove the // attributes // String version = contentService.getTerminologyLatestVersion(terminology).getVersion(); // Remove attributes from current version getLog().info("Remove attributes from current version"); contentService.setTransactionPerOperation(false); contentService.beginTransaction(); final Query query = contentService.getEntityManager().createQuery( "SELECT a.id FROM AtomJpa a WHERE terminology = :terminology " + " AND version = :version"); query.setParameter("terminology", terminology); query.setParameter("version", version); int ct = 0; for (final Long id : (List<Long>) query.getResultList()) { final Atom atom = contentService.getAtom(id); final Set<Attribute> attributes = new HashSet<>(); // Clear attributes from atoms and save attribute list for (final Attribute attribute : new HashSet<>(atom.getAttributes())) { attributes.add(attribute); atom.getAttributes().remove(attribute); } // Update atom contentService.updateAtom(atom); // Remove attributes for (final Attribute attribute : attributes) { contentService.removeAttribute(attribute.getId()); } contentService.logAndCommit(++ct, RootService.logCt, RootService.commitCt); } contentService.commitClearBegin(); getLog().info("Insert latest version (with attributes)"); // Find the highest version to process // Only load attributes for that version String maxVersion = "00000000"; for (File versionDir : dir.listFiles()) { // Skip if not an 8 digit yyyyMMdd directory if (!versionDir.getName().matches("\\d{8}")) { continue; } else { version = versionDir.getName(); if (version.compareTo(maxVersion) > 0) { maxVersion = version; } } } getLog().info(" maxVersion = " + maxVersion); // Get the version directory to process (the max version) File versionDir = new File(dir, maxVersion); // Verify presence of an "rrf" directory File[] versionDirContents = versionDir.listFiles(); File rrfDir = null; for (File f : versionDirContents) { if (f.getName().equals("rrf")) { rrfDir = f; } } if (rrfDir == null) { throw new Exception("No rrf directory in the release: " + versionDir.getCanonicalPath()); } // Create source data file final SourceDataFile sdFile = new SourceDataFileJpa(); sdFile.setDirectory(true); sdFile.setLastModifiedBy("loader"); sdFile.setName(rrfDir.getName()); sdFile.setPath(rrfDir.getAbsolutePath()); sdFile.setSize(1000000L); sdFile.setTimestamp(new Date()); sdService.addSourceDataFile(sdFile); getLog().info(" file = " + sdFile); // Create loader final NdcSourceDataHandler loader = new NdcSourceDataHandler(); // Create and add the source data final SourceData sourceData = new SourceDataJpa(); sourceData.setName(getName(terminology, versionDir.getName())); sourceData.setDescription( "Set of RXNORM-NDC files loaded from " + versionDir.getName()); sourceData.setLastModifiedBy("loader"); sourceData.setHandler(loader.getName()); sourceData.getSourceDataFiles().add(sdFile); sourceData.setVersion(versionDir.getName()); sourceData.setTerminology(terminology); sdService.addSourceData(sourceData); getLog().info(" source data = " + sourceData); sdFile.setSourceData(sourceData); sdService.updateSourceDataFile(sdFile); getLog().info(" file (with reference) = " + sdFile); // Now, invoke the loader final Properties p = new Properties(); loader.setSourceData(sourceData); loader.setProperties(p); loader.setAttributesFlag(true); loader.compute(); loader.close(); getLog().info("Done loading " + versionDir.getCanonicalPath()); getLog().info("Done ..."); } catch (Exception e) { // Send email if something went wrong try { ExceptionHandler.handleException(e, "Error loading sample source data"); } catch (Exception e1) { e1.printStackTrace(); throw new MojoFailureException(e.getMessage()); } } finally { // Close service(s) if (sdService != null) { try { sdService.close(); } catch (Exception e) { // n/a } } if (contentService != null) { try { contentService.close(); } catch (Exception e) { // n/a } } } } }
/* * Copyright 2010-2014 Daniel Volk <mail@volkarts.com>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.va.jscp.util; import com.va.jscp.session.AppSession.ConnectionType; import com.va.util.Properties; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.Serializable; import java.util.HashMap; import java.util.InvalidPropertiesFormatException; import java.util.Map.Entry; import java.util.Set; import static com.va.jscp.Jscp.LOCAL_SESSION_NAME; import static com.va.jscp.ui.i18n.Msg.getString; import static com.va.jscp.util.Functions.checkEmptyString; public class Configuration implements Serializable { private static final long serialVersionUID = 404757572586430951L; public static final String CONFIG_BASE_PATH = System.getProperty("user.home") + "/.jscp"; public static final String CONFIG_STORE = CONFIG_BASE_PATH + "/config.xml"; public static final String SESSION_STORE = CONFIG_BASE_PATH + "/sessions"; // common public static final String SESSION_DISPLAY_NAME = "DisplayName"; public static final String SESSION_HOST_NAME = "HostName"; public static final String SESSION_PORT_NUMBER = "PortNumber"; public static final String SESSION_USERNAME = "UserName"; public static final String SESSION_PASSWORD = "Password"; public static final String SESSION_CONNECTION_TYPE = "ConnectionType"; // extended public static final String SESSION_PRE_START_SCRIPT = "PreStartScript"; public static final String SESSION_INITIAL_LOCATION = "InitialLocation"; public static final String SESSION_SPECIFIC_INITIAL_LOCATION = "SpecificInitialLocation"; // ssh public static final String SESSION_SSH_PUBLIC_KEY_FILE = "SSH_PublicKeyFile"; public static final String SESSION_SSH_COMPRESSION = "SSH_Compression"; // dynamic public static final String SESSION_DYN_RANDOM_SEED = "RandomSeed"; public static final String SESSION_DYN_COLUMN_CONFIG = "ColumnConfig"; public static final String SESSION_DYN_MAINWINDOW_SPLIT_WIDTH = "MainWindowSplitWidth"; public static final String SESSION_DYN_LAST_PATH_EDITOR_PATH = "LastPathEditorPath"; public static final String SESSION_DYN_LAST_SEARCH_PATTERNS = "LastSearchPatterns"; public static final String SESSION_DYN_LAST_LOCAL_COPY_PATH = "LastLocalCopyPath"; private final Properties globalConfig; private final HashMap<String, Properties> sessionConfigs; public Configuration() { globalConfig = new Properties(); sessionConfigs = new HashMap<>(); testDirs(); } private void testDirs() { File f; f = new File(CONFIG_BASE_PATH); if (!f.isDirectory()) { if (f.exists()) { f.delete(); } f.mkdirs(); } f = new File(SESSION_STORE); if (!f.isDirectory()) { if (f.exists()) { f.delete(); } f.mkdirs(); } } //<editor-fold defaultstate="collapsed" desc="Delegating methods"> public int getPropertyAsInt(String key) { return globalConfig.getPropertyAsInt(key); } public int getPropertyAsInt(String key, int di) { return globalConfig.getPropertyAsInt(key, di); } public long getPropertyAsLong(String key) { return globalConfig.getPropertyAsLong(key); } public long getPropertyAsLong(String key, long dl) { return globalConfig.getPropertyAsLong(key, dl); } public boolean getPropertyAsBool(String key) { return globalConfig.getPropertyAsBool(key); } public boolean getPropertyAsBool(String key, boolean db) { return globalConfig.getPropertyAsBool(key, db); } public double getPropertyAsDouble(String key) { return globalConfig.getPropertyAsDouble(key); } public double getPropertyAsDouble(String key, double dd) { return globalConfig.getPropertyAsDouble(key, dd); } public byte[] getPropertyAsBytes(String key) { return globalConfig.getPropertyAsBytes(key); } public void setProperty(String key, int val) { globalConfig.setProperty(key, val); } public void setProperty(String key, long val) { globalConfig.setProperty(key, val); } public void setProperty(String key, boolean val) { globalConfig.setProperty(key, val); } public void setProperty(String key, double val) { globalConfig.setProperty(key, val); } public void setProperty(String key, byte[] val) { globalConfig.setProperty(key, val); } public synchronized Object setProperty(String key, String value) { return globalConfig.setProperty(key, value); } public String getProperty(String key) { return globalConfig.getProperty(key); } public String getProperty(String key, String defaultValue) { return globalConfig.getProperty(key, defaultValue); } //</editor-fold> public Set<String> getSessionNames() { return sessionConfigs.keySet(); } public Properties getSessionConfig(String sessionName) { return ensureSession(sessionName); } public synchronized void load() throws IOException { loadJscpConfig(); // load sessions File sessionsDir = new File(SESSION_STORE); File[] sessionFiles = sessionsDir.listFiles(); for (File sessionFile : sessionFiles) { String sessionName = sessionFile.getName().substring(0, sessionFile.getName().length() - 4); try { Properties sessionConfig = new Properties(); sessionConfig.loadFromXML(new FileInputStream(sessionFile)); sessionConfigs.put(sessionName, sessionConfig); } catch (InvalidPropertiesFormatException e) { System.err.println("Could not load session config `" + sessionFile.getName() + "`"); } } //<editor-fold defaultstate="collapsed" desc="Winscp import code"> // InputStream stream = new FileInputStream( // getProperty(CONFIG_NAME, "Winscp.IniFile") // ); // // Scanner fileReader = new Scanner(stream, "UTF-8"); // // // Properties sessionProperties = null; // while ( true ) // { // String line; // try // { // line = fileReader.nextLine(); // } // catch ( NoSuchElementException e ) // { // // end of file // break; // } // // if ( line.trim().equals("") ) // continue; // // if ( // line.startsWith("[Sessions\\") // && !line.startsWith("[Sessions\\Default%20Settings]") // ) // { // sessionProperties = new Properties(); // // configuration.put(line.substring(10, line.length() - 1), sessionProperties); // } // else if ( line.startsWith("[") ) // sessionProperties = null; // // if ( sessionProperties != null ) // { // Pattern pattern = Pattern.compile("^(.*?)=(.*?)$", Pattern.CASE_INSENSITIVE); // Matcher matcher = pattern.matcher(line); // if ( matcher.find() ) // { // sessionProperties.setProperty(matcher.group(1), resolveQuotes(matcher.group(2))); // } // } // } //</editor-fold> } public void checkConfig() { ensureSession(LOCAL_SESSION_NAME); for (Entry<String, Properties> entry : sessionConfigs.entrySet()) { boolean changed = false; if (checkEmptyString(entry.getValue().getProperty(SESSION_DISPLAY_NAME))) { if (entry.getKey().equals(LOCAL_SESSION_NAME)) { entry.getValue().setProperty(SESSION_DISPLAY_NAME, getString("LOCAL")); } else { entry.getValue().setProperty(SESSION_DISPLAY_NAME, entry.getValue().getProperty(SESSION_USERNAME) + "@" + entry.getValue().getProperty(SESSION_HOST_NAME)); } changed = true; } if (entry.getKey().equals(LOCAL_SESSION_NAME) && !ConnectionType.LOCAL.toString().equals(entry.getValue().getProperty(SESSION_CONNECTION_TYPE))) { entry.getValue().setProperty(SESSION_CONNECTION_TYPE, ConnectionType.LOCAL.toString()); changed = true; } if (changed) { try { storeSession(entry.getKey()); } catch (IOException ex) { System.err.println("ERROR: " + ex.toString()); } } } } public void store() throws IOException { storeJscpConfig(); } public void storeSession(String sessionName) throws IOException { Properties sessionConfig = sessionConfigs.get(sessionName); if (sessionConfig == null) { return; } sessionConfig.storeToXML(new FileOutputStream(SESSION_STORE + "/" + sessionName + ".xml"), null, "UTF-8" ); } private Properties ensureSession(String sessionName) { Properties sp = sessionConfigs.get(sessionName); if (sp == null) { sp = new Properties(); try { createSession(sessionName, sp); } catch (IOException ex) { System.err.println("ERROR: " + ex.toString()); } } return sp; } public void createSession(String sessionName, Properties properties) throws IOException { sessionConfigs.put(sessionName, properties); storeSession(sessionName); } public Properties deleteSession(String sessionName) { Properties props = sessionConfigs.remove(sessionName); new File(SESSION_STORE + "/" + sessionName + ".xml").delete(); return props; } private void loadJscpConfig() throws IOException { File configFile = new File(CONFIG_STORE); if (configFile.exists()) { globalConfig.clear(); try (InputStream in = new FileInputStream(configFile)) { globalConfig.loadFromXML(in); } catch (InvalidPropertiesFormatException e) { } } } private void storeJscpConfig() throws IOException { try (OutputStream out = new FileOutputStream(CONFIG_STORE)) { globalConfig.storeToXML(out, null); } } // // // private String resolveQuotes(String value) // { // int searchPos = 0; // StringBuffer newValue = new StringBuffer(); // // Pattern pattern = Pattern.compile("%([0-9A-F]{2})", Pattern.CASE_INSENSITIVE); // Matcher matcher = pattern.matcher(value); // while ( matcher.find(searchPos) ) // { // newValue.append(value.substring(searchPos, matcher.start())); // newValue.append((char)Integer.parseInt(matcher.group(1), 16)); // // searchPos = matcher.end(); // } // newValue.append(value.substring(searchPos, value.length())); // // return newValue.toString(); // } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package poi.hslf.examples; import org.apache.poi.hslf.model.*; import org.apache.poi.hslf.record.TextHeaderAtom; import org.apache.poi.hslf.usermodel.*; import java.awt.*; import java.io.FileOutputStream; import java.io.IOException; /** * Presentation for Fast Feather Track on ApacheconEU 2008 * * @author Yegor Kozlov */ public final class ApacheconEU08 { public static void main(String[] args) throws IOException { SlideShow ppt = new SlideShow(); ppt.setPageSize(new Dimension(720, 540)); slide1(ppt); slide2(ppt); slide3(ppt); slide4(ppt); slide5(ppt); slide6(ppt); slide7(ppt); slide8(ppt); slide9(ppt); slide10(ppt); slide11(ppt); slide12(ppt); FileOutputStream out = new FileOutputStream("apachecon_eu_08.ppt"); ppt.write(out); out.close(); } public static void slide1(SlideShow ppt) throws IOException { Slide slide = ppt.createSlide(); TextBox box1 = new TextBox(); TextRun tr1 = box1.getTextRun(); tr1.setRunType(TextHeaderAtom.CENTER_TITLE_TYPE); tr1.setText("POI-HSLF"); box1.setAnchor(new Rectangle(54, 78, 612, 115)); slide.addShape(box1); TextBox box2 = new TextBox(); TextRun tr2 = box2.getTextRun(); tr2.setRunType(TextHeaderAtom.CENTRE_BODY_TYPE); tr2.setText("Java API To Access Microsoft PowerPoint Format Files"); box2.setAnchor(new Rectangle(108, 204, 504, 138)); slide.addShape(box2); TextBox box3 = new TextBox(); TextRun tr3 = box3.getTextRun(); tr3.getRichTextRuns()[0].setFontSize(32); box3.setHorizontalAlignment(TextBox.AlignCenter); tr3.setText( "Yegor Kozlov\r" + "yegor - apache - org"); box3.setAnchor(new Rectangle(206, 348, 310, 84)); slide.addShape(box3); } public static void slide2(SlideShow ppt) throws IOException { Slide slide = ppt.createSlide(); TextBox box1 = new TextBox(); TextRun tr1 = box1.getTextRun(); tr1.setRunType(TextHeaderAtom.TITLE_TYPE); tr1.setText("What is HSLF?"); box1.setAnchor(new Rectangle(36, 21, 648, 90)); slide.addShape(box1); TextBox box2 = new TextBox(); TextRun tr2 = box2.getTextRun(); tr2.setRunType(TextHeaderAtom.BODY_TYPE); tr2.setText("HorribleSLideshowFormat is the POI Project's pure Java implementation " + "of the Powerpoint binary file format. \r" + "POI sub-project since 2005\r" + "Started by Nick Birch, Yegor Kozlov joined soon after"); box2.setAnchor(new Rectangle(36, 126, 648, 356)); slide.addShape(box2); } public static void slide3(SlideShow ppt) throws IOException { Slide slide = ppt.createSlide(); TextBox box1 = new TextBox(); TextRun tr1 = box1.getTextRun(); tr1.setRunType(TextHeaderAtom.TITLE_TYPE); tr1.setText("HSLF in a Nutshell"); box1.setAnchor(new Rectangle(36, 15, 648, 65)); slide.addShape(box1); TextBox box2 = new TextBox(); TextRun tr2 = box2.getTextRun(); tr2.setRunType(TextHeaderAtom.BODY_TYPE); tr2.setText( "HSLF provides a way to read, create and modify MS PowerPoint presentations\r" + "Pure Java API - you don't need PowerPoint to read and write *.ppt files\r" + "Comprehensive support of PowerPoint objects"); tr2.getRichTextRuns()[0].setFontSize(28); box2.setAnchor(new Rectangle(36, 80, 648, 200)); slide.addShape(box2); TextBox box3 = new TextBox(); TextRun tr3 = box3.getTextRun(); tr3.setRunType(TextHeaderAtom.BODY_TYPE); tr3.setText( "Rich text\r" + "Tables\r" + "Shapes\r" + "Pictures\r" + "Master slides"); tr3.getRichTextRuns()[0].setFontSize(24); tr3.getRichTextRuns()[0].setIndentLevel(1); box3.setAnchor(new Rectangle(36, 265, 648, 150)); slide.addShape(box3); TextBox box4 = new TextBox(); TextRun tr4 = box4.getTextRun(); tr4.setRunType(TextHeaderAtom.BODY_TYPE); tr4.setText("Access to low level data structures"); box4.setAnchor(new Rectangle(36, 430, 648, 50)); slide.addShape(box4); } public static void slide4(SlideShow ppt) throws IOException { Slide slide = ppt.createSlide(); String[][] txt1 = { {"Note"}, {"This presentation was created programmatically using POI HSLF"} }; Table table1 = new Table(2, 1); for (int i = 0; i < txt1.length; i++) { for (int j = 0; j < txt1[i].length; j++) { TableCell cell = table1.getCell(i, j); cell.setText(txt1[i][j]); cell.getTextRun().getRichTextRuns()[0].setFontSize(10); RichTextRun rt = cell.getTextRun().getRichTextRuns()[0]; rt.setFontName("Arial"); rt.setBold(true); if(i == 0){ rt.setFontSize(32); rt.setFontColor(Color.white); cell.getFill().setForegroundColor(new Color(0, 153, 204)); } else { rt.setFontSize(28); cell.getFill().setForegroundColor(new Color(235, 239, 241)); } cell.setVerticalAlignment(TextBox.AnchorMiddle); } } Line border1 = table1.createBorder(); border1.setLineColor(Color.black); border1.setLineWidth(1.0); table1.setAllBorders(border1); Line border2 = table1.createBorder(); border2.setLineColor(Color.black); border2.setLineWidth(2.0); table1.setOutsideBorders(border2); table1.setColumnWidth(0, 510); table1.setRowHeight(0, 60); table1.setRowHeight(1, 100); slide.addShape(table1); table1.moveTo(100, 100); TextBox box1 = new TextBox(); box1.setHorizontalAlignment(TextBox.AlignCenter); TextRun tr1 = box1.getTextRun(); tr1.setText("The source code is available at\r" + "http://people.apache.org/~yegor/apachecon_eu08/"); RichTextRun rt = tr1.getRichTextRuns()[0]; rt.setFontSize(24); box1.setAnchor(new Rectangle(80, 356, 553, 65)); slide.addShape(box1); } public static void slide5(SlideShow ppt) throws IOException { Slide slide = ppt.createSlide(); TextBox box1 = new TextBox(); TextRun tr1 = box1.getTextRun(); tr1.setRunType(TextHeaderAtom.TITLE_TYPE); tr1.setText("HSLF in Action - 1\rData Extraction"); box1.setAnchor(new Rectangle(36, 21, 648, 100)); slide.addShape(box1); TextBox box2 = new TextBox(); TextRun tr2 = box2.getTextRun(); tr2.setRunType(TextHeaderAtom.BODY_TYPE); tr2.setText( "Text from slides and notes\r" + "Images\r" + "Shapes and their properties (type, position in the slide, color, font, etc.)"); box2.setAnchor(new Rectangle(36, 150, 648, 300)); slide.addShape(box2); } public static void slide6(SlideShow ppt) throws IOException { Slide slide = ppt.createSlide(); TextBox box1 = new TextBox(); TextRun tr1 = box1.getTextRun(); tr1.setRunType(TextHeaderAtom.TITLE_TYPE); tr1.setText("HSLF in Action - 2"); box1.setAnchor(new Rectangle(36, 20, 648, 90)); slide.addShape(box1); TextBox box2 = new TextBox(); TextRun tr2 = box2.getTextRun(); tr2.getRichTextRuns()[0].setFontSize(18); tr2.setText("Creating a simple presentation from scratch"); box2.setAnchor(new Rectangle(170, 100, 364, 30)); slide.addShape(box2); TextBox box3 = new TextBox(); TextRun tr3 = box3.getTextRun(); RichTextRun rt3 = tr3.getRichTextRuns()[0]; rt3.setFontName("Courier New"); rt3.setFontSize(8); tr3.setText( " SlideShow ppt = new SlideShow();\r" + " Slide slide = ppt.createSlide();\r" + "\r" + " TextBox box2 = new TextBox();\r" + " box2.setHorizontalAlignment(TextBox.AlignCenter);\r" + " box2.setVerticalAlignment(TextBox.AnchorMiddle);\r" + " box2.getTextRun().setText(\"Java Code\");\r" + " box2.getFill().setForegroundColor(new Color(187, 224, 227));\r" + " box2.setLineColor(Color.black);\r" + " box2.setLineWidth(0.75);\r" + " box2.setAnchor(new Rectangle(66, 243, 170, 170));\r" + " slide.addShape(box2);\r" + "\r" + " TextBox box3 = new TextBox();\r" + " box3.setHorizontalAlignment(TextBox.AlignCenter);\r" + " box3.setVerticalAlignment(TextBox.AnchorMiddle);\r" + " box3.getTextRun().setText(\"*.ppt file\");\r" + " box3.setLineWidth(0.75);\r" + " box3.setLineColor(Color.black);\r" + " box3.getFill().setForegroundColor(new Color(187, 224, 227));\r" + " box3.setAnchor(new Rectangle(473, 243, 170, 170));\r" + " slide.addShape(box3);\r" + "\r" + " AutoShape box4 = new AutoShape(ShapeTypes.Arrow);\r" + " box4.getFill().setForegroundColor(new Color(187, 224, 227));\r" + " box4.setLineWidth(0.75);\r" + " box4.setLineColor(Color.black);\r" + " box4.setAnchor(new Rectangle(253, 288, 198, 85));\r" + " slide.addShape(box4);\r" + "\r" + " FileOutputStream out = new FileOutputStream(\"hslf-demo.ppt\");\r" + " ppt.write(out);\r" + " out.close();"); box3.setAnchor(new Rectangle(30, 150, 618, 411)); slide.addShape(box3); } public static void slide7(SlideShow ppt) throws IOException { Slide slide = ppt.createSlide(); TextBox box2 = new TextBox(); box2.setHorizontalAlignment(TextBox.AlignCenter); box2.setVerticalAlignment(TextBox.AnchorMiddle); box2.getTextRun().setText("Java Code"); box2.getFill().setForegroundColor(new Color(187, 224, 227)); box2.setLineColor(Color.black); box2.setLineWidth(0.75); box2.setAnchor(new Rectangle(66, 243, 170, 170)); slide.addShape(box2); TextBox box3 = new TextBox(); box3.setHorizontalAlignment(TextBox.AlignCenter); box3.setVerticalAlignment(TextBox.AnchorMiddle); box3.getTextRun().setText("*.ppt file"); box3.setLineWidth(0.75); box3.setLineColor(Color.black); box3.getFill().setForegroundColor(new Color(187, 224, 227)); box3.setAnchor(new Rectangle(473, 243, 170, 170)); slide.addShape(box3); AutoShape box4 = new AutoShape(ShapeTypes.Arrow); box4.getFill().setForegroundColor(new Color(187, 224, 227)); box4.setLineWidth(0.75); box4.setLineColor(Color.black); box4.setAnchor(new Rectangle(253, 288, 198, 85)); slide.addShape(box4); } public static void slide8(SlideShow ppt) throws IOException { Slide slide = ppt.createSlide(); TextBox box1 = new TextBox(); TextRun tr1 = box1.getTextRun(); tr1.setRunType(TextHeaderAtom.TITLE_TYPE); tr1.setText("Wait, there is more!"); box1.setAnchor(new Rectangle(36, 21, 648, 90)); slide.addShape(box1); TextBox box2 = new TextBox(); TextRun tr2 = box2.getTextRun(); tr2.setRunType(TextHeaderAtom.BODY_TYPE); tr2.setText( "Rich text\r" + "Tables\r" + "Pictures (JPEG, PNG, BMP, WMF, PICT)\r" + "Comprehensive formatting features"); box2.setAnchor(new Rectangle(36, 126, 648, 356)); slide.addShape(box2); } public static void slide9(SlideShow ppt) throws IOException { Slide slide = ppt.createSlide(); TextBox box1 = new TextBox(); TextRun tr1 = box1.getTextRun(); tr1.setRunType(TextHeaderAtom.TITLE_TYPE); tr1.setText("HSLF in Action - 3"); box1.setAnchor(new Rectangle(36, 20, 648, 50)); slide.addShape(box1); TextBox box2 = new TextBox(); TextRun tr2 = box2.getTextRun(); tr2.getRichTextRuns()[0].setFontSize(18); tr2.setText("PPGraphics2D: PowerPoint Graphics2D driver"); box2.setAnchor(new Rectangle(178, 70, 387, 30)); slide.addShape(box2); TextBox box3 = new TextBox(); TextRun tr3 = box3.getTextRun(); RichTextRun rt3 = tr3.getRichTextRuns()[0]; rt3.setFontName("Courier New"); rt3.setFontSize(8); tr3.setText( " //bar chart data. The first value is the bar color, the second is the width\r" + " Object[] def = new Object[]{\r" + " Color.yellow, new Integer(100),\r" + " Color.green, new Integer(150),\r" + " Color.gray, new Integer(75),\r" + " Color.red, new Integer(200),\r" + " };\r" + "\r" + " SlideShow ppt = new SlideShow();\r" + " Slide slide = ppt.createSlide();\r" + "\r" + " ShapeGroup group = new ShapeGroup();\r" + " //define position of the drawing in the slide\r" + " Rectangle bounds = new java.awt.Rectangle(200, 100, 350, 300);\r" + " group.setAnchor(bounds);\r" + " slide.addShape(group);\r" + " Graphics2D graphics = new PPGraphics2D(group);\r" + "\r" + " //draw a simple bar graph\r" + " int x = bounds.x + 50, y = bounds.y + 50;\r" + " graphics.setFont(new Font(\"Arial\", Font.BOLD, 10));\r" + " for (int i = 0, idx = 1; i < def.length; i+=2, idx++) {\r" + " graphics.setColor(Color.black);\r" + " int width = ((Integer)def[i+1]).intValue();\r" + " graphics.drawString(\"Q\" + idx, x-20, y+20);\r" + " graphics.drawString(width + \"%\", x + width + 10, y + 20);\r" + " graphics.setColor((Color)def[i]);\r" + " graphics.fill(new Rectangle(x, y, width, 30));\r" + " y += 40;\r" + " }\r" + " graphics.setColor(Color.black);\r" + " graphics.setFont(new Font(\"Arial\", Font.BOLD, 14));\r" + " graphics.draw(bounds);\r" + " graphics.drawString(\"Performance\", x + 70, y + 40);\r" + "\r" + " FileOutputStream out = new FileOutputStream(\"hslf-demo.ppt\");\r" + " ppt.write(out);\r" + " out.close();"); box3.setAnchor(new Rectangle(96, 110, 499, 378)); slide.addShape(box3); } public static void slide10(SlideShow ppt) throws IOException { //bar chart data. The first value is the bar color, the second is the width Object[] def = new Object[]{ Color.yellow, new Integer(100), Color.green, new Integer(150), Color.gray, new Integer(75), Color.red, new Integer(200), }; Slide slide = ppt.createSlide(); ShapeGroup group = new ShapeGroup(); //define position of the drawing in the slide Rectangle bounds = new Rectangle(200, 100, 350, 300); group.setAnchor(bounds); slide.addShape(group); Graphics2D graphics = new PPGraphics2D(group); //draw a simple bar graph int x = bounds.x + 50, y = bounds.y + 50; graphics.setFont(new Font("Arial", Font.BOLD, 10)); for (int i = 0, idx = 1; i < def.length; i+=2, idx++) { graphics.setColor(Color.black); int width = ((Integer)def[i+1]).intValue(); graphics.drawString("Q" + idx, x-20, y+20); graphics.drawString(width + "%", x + width + 10, y + 20); graphics.setColor((Color)def[i]); graphics.fill(new Rectangle(x, y, width, 30)); y += 40; } graphics.setColor(Color.black); graphics.setFont(new Font("Arial", Font.BOLD, 14)); graphics.draw(bounds); graphics.drawString("Performance", x + 70, y + 40); } public static void slide11(SlideShow ppt) throws IOException { Slide slide = ppt.createSlide(); TextBox box1 = new TextBox(); TextRun tr1 = box1.getTextRun(); tr1.setRunType(TextHeaderAtom.TITLE_TYPE); tr1.setText("HSLF Development Plans"); box1.setAnchor(new Rectangle(36, 21, 648, 90)); slide.addShape(box1); TextBox box2 = new TextBox(); TextRun tr2 = box2.getTextRun(); tr2.setRunType(TextHeaderAtom.BODY_TYPE); tr2.getRichTextRuns()[0].setFontSize(32); tr2.setText( "Support for more PowerPoint functionality\r" + "Rendering slides into java.awt.Graphics2D"); box2.setAnchor(new Rectangle(36, 126, 648, 100)); slide.addShape(box2); TextBox box3 = new TextBox(); TextRun tr3 = box3.getTextRun(); tr3.setRunType(TextHeaderAtom.BODY_TYPE); tr3.getRichTextRuns()[0].setIndentLevel(1); tr3.setText( "A way to export slides into images or other formats"); box3.setAnchor(new Rectangle(36, 220, 648, 70)); slide.addShape(box3); TextBox box4 = new TextBox(); TextRun tr4 = box4.getTextRun(); tr4.setRunType(TextHeaderAtom.BODY_TYPE); tr4.getRichTextRuns()[0].setFontSize(32); tr4.setText( "Integration with Apache FOP - Formatting Objects Processor"); box4.setAnchor(new Rectangle(36, 290, 648, 90)); slide.addShape(box4); TextBox box5 = new TextBox(); TextRun tr5 = box5.getTextRun(); tr5.setRunType(TextHeaderAtom.BODY_TYPE); tr5.getRichTextRuns()[0].setIndentLevel(1); tr5.setText( "Transformation of XSL-FO into PPT\r" + "PPT2PDF transcoder"); box5.setAnchor(new Rectangle(36, 380, 648, 100)); slide.addShape(box5); } public static void slide12(SlideShow ppt) throws IOException { Slide slide = ppt.createSlide(); TextBox box1 = new TextBox(); TextRun tr1 = box1.getTextRun(); tr1.setRunType(TextHeaderAtom.CENTER_TITLE_TYPE); tr1.setText("Questions?"); box1.setAnchor(new Rectangle(54, 167, 612, 115)); slide.addShape(box1); TextBox box2 = new TextBox(); TextRun tr2 = box2.getTextRun(); tr2.setRunType(TextHeaderAtom.CENTRE_BODY_TYPE); tr2.setText( "http://poi.apache.org/hslf/\r" + "http://people.apache.org/~yegor"); box2.setAnchor(new Rectangle(108, 306, 504, 138)); slide.addShape(box2); } }
package org.ovirt.engine.core.bll; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import org.junit.Test; import org.mockito.Mock; import org.ovirt.engine.core.bll.interfaces.BackendInternal; import org.ovirt.engine.core.common.action.SetVmTicketParameters; import org.ovirt.engine.core.common.action.VdcActionType; import org.ovirt.engine.core.common.action.VdcReturnValueBase; import org.ovirt.engine.core.common.businessentities.GraphicsInfo; import org.ovirt.engine.core.common.businessentities.GraphicsType; import org.ovirt.engine.core.common.businessentities.VM; import org.ovirt.engine.core.common.businessentities.VMStatus; import org.ovirt.engine.core.common.config.ConfigValues; import org.ovirt.engine.core.common.console.ConsoleOptions; import org.ovirt.engine.core.common.queries.ConfigureConsoleOptionsParams; import org.ovirt.engine.core.common.queries.VdcQueryParametersBase; import org.ovirt.engine.core.common.queries.VdcQueryReturnValue; import org.ovirt.engine.core.common.queries.VdcQueryType; import org.ovirt.engine.core.compat.Guid; public class ConfigureConsoleOptionsQueryTest extends BaseCommandTest { @Mock BackendInternal backend; @Test public void shouldFailtWhenNoId() { ConsoleOptions options = new ConsoleOptions(GraphicsType.SPICE); ConfigureConsoleOptionsQuery query = new ConfigureConsoleOptionsQuery(new ConfigureConsoleOptionsParams(options, false)); assertFalse(query.validateInputs()); } @Test public void shouldFailtWhenNoGraphicsType() { ConsoleOptions options = new ConsoleOptions(); ConfigureConsoleOptionsQuery query = new ConfigureConsoleOptionsQuery(new ConfigureConsoleOptionsParams(options, false)); assertFalse(query.validateInputs()); } @Test public void testInputDataOk() { ConfigureConsoleOptionsQuery query = spy(new ConfigureConsoleOptionsQuery(new ConfigureConsoleOptionsParams(getValidOptions(GraphicsType.SPICE), false))); doReturn(mockVm(GraphicsType.SPICE)).when(query).getCachedVm(); assertTrue(query.validateInputs()); } @Test public void failOnStoppedVm() { ConfigureConsoleOptionsParams params = new ConfigureConsoleOptionsParams(getValidOptions(GraphicsType.SPICE), false); ConfigureConsoleOptionsQuery query = spy(new ConfigureConsoleOptionsQuery(params)); VM mockVm = mockVm(GraphicsType.SPICE); mockVm.setStatus(VMStatus.Down); doReturn(mockVm).when(query).getCachedVm(); query.validateInputs(); assertFalse(query.getQueryReturnValue().getSucceeded()); } @Test public void failGetSpiceOnVncVm() { ConfigureConsoleOptionsParams params = new ConfigureConsoleOptionsParams(getValidOptions(GraphicsType.SPICE), false); ConfigureConsoleOptionsQuery query = spy(new ConfigureConsoleOptionsQuery(params)); VM mockVm = mockVm(GraphicsType.VNC); doReturn(mockVm).when(query).getCachedVm(); query.validateInputs(); assertFalse(query.getQueryReturnValue().getSucceeded()); } private ConsoleOptions getValidOptions(GraphicsType graphicsType) { ConsoleOptions options = new ConsoleOptions(graphicsType); options.setVmId(Guid.Empty); return options; } @Test public void shouldCallSetTicket() { ConfigureConsoleOptionsParams params = new ConfigureConsoleOptionsParams(getValidOptions(GraphicsType.VNC), true); ConfigureConsoleOptionsQuery query = spy(new ConfigureConsoleOptionsQuery(params)); doReturn(mockVm(GraphicsType.VNC)).when(query).getCachedVm(); doReturn(null).when(query).getConfigValue(any(ConfigValues.class)); doReturn(true).when(query).getConfigValue(ConfigValues.RemapCtrlAltDelDefault); VdcReturnValueBase result = new VdcReturnValueBase(); result.setSucceeded(true); result.setActionReturnValue("nbusr123"); doReturn(result).when(backend).runInternalAction(eq(VdcActionType.SetVmTicket), any(SetVmTicketParameters.class)); doReturn(backend).when(query).getBackend(); query.getQueryReturnValue().setSucceeded(true); query.executeQueryCommand(); verify(backend, times(1)).runInternalAction(eq(VdcActionType.SetVmTicket), any(SetVmTicketParameters.class)); } @Test public void failWhenCertEnforcedAndCANotFound() { ConfigureConsoleOptionsParams params = new ConfigureConsoleOptionsParams(getValidOptions(GraphicsType.SPICE), false); ConfigureConsoleOptionsQuery query = spy(new ConfigureConsoleOptionsQuery(params)); doReturn(mockVm(GraphicsType.SPICE)).when(query).getCachedVm(); mockSpiceRelatedConfig(query); doReturn(true).when(query).getConfigValue(ConfigValues.EnableSpiceRootCertificateValidation); doReturn(true).when(query).getConfigValue(ConfigValues.RemapCtrlAltDelDefault); VdcQueryReturnValue caResult = new VdcQueryReturnValue(); caResult.setSucceeded(false); doReturn(caResult).when(backend).runInternalQuery(eq(VdcQueryType.GetCACertificate), any(VdcQueryParametersBase.class)); doReturn(backend).when(query).getBackend(); query.getQueryReturnValue().setSucceeded(true); query.executeQueryCommand(); assertFalse(query.getQueryReturnValue().getSucceeded()); } @Test public void passWhenCertNotEnforcedAndCANotFound() { ConfigureConsoleOptionsParams params = new ConfigureConsoleOptionsParams(getValidOptions(GraphicsType.SPICE), false); ConfigureConsoleOptionsQuery query = spy(new ConfigureConsoleOptionsQuery(params)); doReturn(mockVm(GraphicsType.SPICE)).when(query).getCachedVm(); mockSpiceRelatedConfig(query); doReturn(false).when(query).getConfigValue(ConfigValues.EnableSpiceRootCertificateValidation); doReturn(true).when(query).getConfigValue(ConfigValues.RemapCtrlAltDelDefault); doReturn(null).when(backend).runInternalQuery(eq(VdcQueryType.GetCACertificate), any(VdcQueryParametersBase.class)); doReturn(backend).when(query).getBackend(); query.getQueryReturnValue().setSucceeded(true); query.executeQueryCommand(); assertTrue(query.getQueryReturnValue().getSucceeded()); } @Test public void fillRemoteViewerUrl_nothingToReplace() { testFillRemoteViewerUrl( "some", "s", "o", "some" ); } @Test public void fillRemoteViewerUrl_replaceBaseUrl_invalidUrlNotReplaced() { testFillRemoteViewerUrl( "some" + ConfigureConsoleOptionsQuery.ENGINE_BASE_URL + "other", " replaced ", "o", "some" + ConfigureConsoleOptionsQuery.ENGINE_BASE_URL + "other" ); } @Test public void fillRemoteViewerUrl_replaceBaseUrl() { testFillRemoteViewerUrl( "some" + ConfigureConsoleOptionsQuery.ENGINE_BASE_URL + "other", "http://www.ovirt.org/a/b", "", "some" + "http://www.ovirt.org/a/b" + "other" ); } @Test public void fillRemoteViewerUrl_absoluteUrl() { testFillRemoteViewerUrl( "some" + ConfigureConsoleOptionsQuery.CONSOLE_CLIENT_RESOURCES_URL + "other", "o", "http://www.ovirt.org/a/b", "some" + "http://www.ovirt.org/a/b" + "other" ); } @Test public void fillRemoteViewerUrl_relativeUrl_baseUrlMalformed() { testFillRemoteViewerUrl( "some" + ConfigureConsoleOptionsQuery.CONSOLE_CLIENT_RESOURCES_URL + "other", "o s w", "b/c", "some" + "b/c" + "other" ); } @Test public void fillRemoteViewerUrl_relativeUrl_baseUrlCorrect() { testFillRemoteViewerUrl( "some" + ConfigureConsoleOptionsQuery.CONSOLE_CLIENT_RESOURCES_URL + "other", "http://www.ovirt.org", "b/c", "some" + "http://www.ovirt.org/b/c" + "other" ); } @Test public void fillRemoteViewerUrl_relativeUrl_baseUrlCorrect_slashsAround() { testFillRemoteViewerUrl( "some" + ConfigureConsoleOptionsQuery.CONSOLE_CLIENT_RESOURCES_URL + "other", "http://www.ovirt.org/", "/b/c", "some" + "http://www.ovirt.org/b/c" + "other" ); } private void testFillRemoteViewerUrl(String toRepalce, String baseUrl, String resourceUrl, String expected) { ConsoleOptions options = new ConsoleOptions(); ConfigureConsoleOptionsParams params = new ConfigureConsoleOptionsParams(getValidOptions(GraphicsType.SPICE), false); ConfigureConsoleOptionsQuery query = new ConfigureConsoleOptionsQuery(params); query.fillRemoteViewerUrl( options, toRepalce, baseUrl, resourceUrl ); assertEquals(expected, options.getRemoteViewerNewerVersionUrl()); } private void mockSpiceRelatedConfig(ConfigureConsoleOptionsQuery query) { doReturn(null).when(query).getConfigValue(any(ConfigValues.class)); doReturn(false).when(query).getConfigValue(ConfigValues.SSLEnabled); doReturn(false).when(query).getConfigValue(ConfigValues.EnableUSBAsDefault); } private VM mockVm(GraphicsType graphicsType) { VM vm = new VM(); vm.setId(Guid.Empty); vm.getGraphicsInfos().put(graphicsType, new GraphicsInfo().setIp("host").setPort(5901)); vm.setStatus(VMStatus.Up); return vm; } }
package com.ragnarok.jparseutil.visitor; import com.github.javaparser.ast.body.*; import com.github.javaparser.ast.expr.AnnotationExpr; import com.github.javaparser.ast.type.ClassOrInterfaceType; import com.ragnarok.jparseutil.dataobject.*; import com.ragnarok.jparseutil.memberparser.*; import com.ragnarok.jparseutil.util.Log; import com.ragnarok.jparseutil.util.Util; import java.util.List; /** * Created by ragnarok on 15/5/24. * the ClassTreeVisitor, inspect {@link com.sun.source.tree.ClassTree} and * extract info from a class */ public class ClassTreeVisitor { public static final String TAG = "JParserUtil.ClassTreeVisitor"; private SourceInfo sourceInfo; private ClassInfo currentClassInfo; private String outerClassName = null; // fully qualified private String currentClassName = null; // fully qualified public ClassTreeVisitor() { } public void inspectTypeDeclaration(SourceInfo sourceInfo, TypeDeclaration typeDeclaration, String outerClassName, boolean ignoreSelf) { if (typeDeclaration == null) { return; } this.sourceInfo = sourceInfo; this.outerClassName = outerClassName; Log.d(TAG, "inspectClassTree, name: %s, kind: %s, outerClassName: %s", typeDeclaration.getName(), typeDeclaration.getClass().getName(), outerClassName); if (typeDeclaration instanceof ClassOrInterfaceDeclaration || typeDeclaration instanceof EnumDeclaration || typeDeclaration instanceof AnnotationDeclaration) { if (!ignoreSelf) { addClassInfo(typeDeclaration); } else if (outerClassName != null) { currentClassName = Util.buildClassName(outerClassName, typeDeclaration.getName()); currentClassInfo = sourceInfo.getClassInfoByQualifiedName(currentClassName); } if (typeDeclaration.getMembers() != null) { inspectAllClassMembers(typeDeclaration.getMembers()); } } } private void addClassInfo(TypeDeclaration typeDeclaration) { if (typeDeclaration != null) { String simpleName = typeDeclaration.getName(); ClassInfo classInfo = null; if (typeDeclaration instanceof AnnotationDeclaration) { classInfo = new AnnotationInfo(); } else { classInfo = new ClassInfo(); } classInfo.setPackageName(this.sourceInfo.getPackageName()); classInfo.setSimpleName(simpleName); classInfo = setClassInfoAttribute(classInfo, typeDeclaration); String qualifiedName; if (outerClassName != null) { qualifiedName = Util.buildClassName(outerClassName, simpleName); } else { qualifiedName = Util.buildClassName(sourceInfo.getPackageName(), simpleName); } classInfo.setQualifiedName(qualifiedName); ReferenceSourceMap.getInstance().addClassNameToSourceMap(classInfo.getQualifiedName()); currentClassName = qualifiedName; Log.d(TAG, "addClassInfo, simpleName: %s, qualifiedName: %s", simpleName, qualifiedName); sourceInfo.addClassInfo(classInfo); if (classInfo.isAnnotaiton() && classInfo instanceof AnnotationInfo) { sourceInfo.putAnnotaiotn((AnnotationInfo) classInfo); } currentClassInfo = classInfo; if (classInfo.isEnum()) { EnumDeclaration enumDeclaration = (EnumDeclaration) typeDeclaration; if (enumDeclaration.getEntries() != null) { for (EnumConstantDeclaration enumConstantDeclaration : enumDeclaration.getEntries()) { inspectEnumConstant(enumConstantDeclaration); } } } } } private ClassInfo setClassInfoAttribute(ClassInfo classInfo, TypeDeclaration typeDeclaration) { if (typeDeclaration instanceof ClassOrInterfaceDeclaration) { ClassOrInterfaceDeclaration classOrInterfaceDeclaration = (ClassOrInterfaceDeclaration) typeDeclaration; if (classOrInterfaceDeclaration.isInterface()) { classInfo.setIsInterface(true); classInfo.setIsEnum(false); classInfo.setIsAnnotation(false); } } else if (typeDeclaration instanceof EnumDeclaration) { classInfo.setIsInterface(false); classInfo.setIsEnum(true); classInfo.setIsAnnotation(false); } else if (typeDeclaration instanceof AnnotationDeclaration) { classInfo.setIsEnum(false); classInfo.setIsInterface(false); classInfo.setIsAnnotation(true); } else { classInfo.setIsEnum(false); classInfo.setIsInterface(false); classInfo.setIsAnnotation(false); } if (typeDeclaration.getModifiers() != 0) { classInfo.addAllModifiers(Modifier.parseModifiersFromFlags(typeDeclaration.getModifiers())); } if (typeDeclaration instanceof ClassOrInterfaceDeclaration) { ClassOrInterfaceDeclaration classDeclaration = (ClassOrInterfaceDeclaration) typeDeclaration; if (classDeclaration.getExtends() != null && classDeclaration.getExtends().size() > 0) { Type superClass = TypeParser.parseType(sourceInfo, classDeclaration.getExtends().get(0), classDeclaration.getExtends().get(0).getName()); classInfo.setSuperClass(superClass); } if (classDeclaration.getImplements() != null && classDeclaration.getImplements().size() > 0) { for (ClassOrInterfaceType type : classDeclaration.getImplements()) { Type implementType = TypeParser.parseType(sourceInfo, type, type.getName()); classInfo.addImplements(implementType); } } } classInfo = parseClassAnnotation(typeDeclaration, classInfo); return classInfo; } private ClassInfo parseClassAnnotation(TypeDeclaration typeDeclaration, ClassInfo classInfo) { if (typeDeclaration.getAnnotations() != null && typeDeclaration.getAnnotations().size() > 0) { for (AnnotationExpr annotation : typeDeclaration.getAnnotations()) { AnnotationModifier annotationModifier = AnnotationModifierParser.parseAnnotation(sourceInfo, annotation); classInfo.putAnnotation(annotationModifier); } } return classInfo; } private void inspectAllClassMembers(List<BodyDeclaration> classMembers) { for (BodyDeclaration member : classMembers) { // Log.d(TAG, "member.class: %s", member.getClass().getSimpleName()); if (member instanceof FieldDeclaration) { inspectVariable((FieldDeclaration) member); } else if (member instanceof MethodDeclaration) { inspectMethod((MethodDeclaration) member); } else if (member instanceof TypeDeclaration) { inspectInnerClass((TypeDeclaration) member); } else if (member instanceof AnnotationMemberDeclaration) { inspectAnnotationMember((AnnotationMemberDeclaration) member); } } } private void inspectVariable(FieldDeclaration variableDecl) { Log.d(TAG, "inspectVariable, class: %s", currentClassName); VariableInfo variableInfo = VariableParser.parseVariable(sourceInfo, variableDecl); if (currentClassInfo == null) { currentClassInfo = sourceInfo.getClassInfoByQualifiedName(currentClassName); } if (currentClassInfo != null) { variableInfo.setContainedClass(currentClassInfo); currentClassInfo.addVariable(variableInfo); sourceInfo.updateClassInfoByQualifiedName(currentClassName, currentClassInfo); if (currentClassInfo.isAnnotaiton()) { sourceInfo.updateAnnotationByQualifiedName(currentClassName, (AnnotationInfo) currentClassInfo); } } } private void inspectMethod(MethodDeclaration methodDecl) { if (currentClassInfo == null) { currentClassInfo = sourceInfo.getClassInfoByQualifiedName(currentClassName); } MethodInfo methodInfo = MethodParser.parseMethodInfo(currentClassInfo, sourceInfo, methodDecl); if (methodInfo != null) { currentClassInfo.putMethod(methodInfo); sourceInfo.updateClassInfoByQualifiedName(currentClassName, currentClassInfo); if (currentClassInfo.isAnnotaiton()) { sourceInfo.updateAnnotationByQualifiedName(currentClassName, (AnnotationInfo) currentClassInfo); } } } private void inspectInnerClass(TypeDeclaration classDecl) { String simpleName = classDecl.getName(); String qualifiedName = null; qualifiedName = Util.buildClassName(this.currentClassName, simpleName); Log.d(TAG, "inspectInnerClass, qualifiedName: %s, outerClassName: %s, currentClassName: %s", qualifiedName, outerClassName, currentClassName); ClassInfo classInfo = null; if (classDecl instanceof AnnotationDeclaration) { classInfo = new AnnotationInfo(); } else { classInfo = new ClassInfo(); } classInfo.setSimpleName(simpleName); classInfo.setQualifiedName(qualifiedName); classInfo.setPackageName(sourceInfo.getPackageName()); setClassInfoAttribute(classInfo, classDecl); sourceInfo.addClassInfo(classInfo); if (classInfo.isAnnotaiton()) { sourceInfo.putAnnotaiotn((AnnotationInfo) classInfo); } ReferenceSourceMap.getInstance().addClassNameToSourceMap(classInfo.getQualifiedName()); // recursive parse new ClassTreeVisitor().inspectTypeDeclaration(sourceInfo, classDecl, currentClassInfo.getQualifiedName(), true); } private void inspectAnnotationMember(AnnotationMemberDeclaration annotationMember) { if (currentClassInfo == null) { currentClassInfo = sourceInfo.getClassInfoByQualifiedName(currentClassName); } if (currentClassInfo.isAnnotaiton()) { AnnotationInfo annotationInfo = (AnnotationInfo) currentClassInfo; Type type = TypeParser.parseType(sourceInfo, annotationMember.getType(), annotationMember.getType().toString()); String name = annotationMember.getName(); Log.d(TAG, "inspectAnnotationMember, paramName: %s, paramType: %s", name, type); if (annotationMember.getDefaultValue() != null) { String defaultValueLiteral = annotationMember.getDefaultValue().toString(); Log.d(TAG, "defaultValueLiteral: %s, defaultValueClass: %s", defaultValueLiteral, annotationMember.getDefaultValue().getClass().getSimpleName()); Object defaultValue = VariableInitParser.parseVariableInit(sourceInfo, annotationMember.getDefaultValue()); annotationInfo.putParams(type, name, defaultValue); } else { annotationInfo.putParams(type, name, null); } sourceInfo.updateClassInfoByQualifiedName(currentClassName, annotationInfo); sourceInfo.updateAnnotationByQualifiedName(currentClassName, annotationInfo); } } private void inspectEnumConstant(EnumConstantDeclaration enumConstantDeclaration) { Log.d(TAG, "inspectEnumConstant, name: %s", enumConstantDeclaration.getName()); if (currentClassInfo != null) { EnumConstant enumConstant = new EnumConstant(); enumConstant.setName(enumConstantDeclaration.getName()); currentClassInfo.addEnumConstant(enumConstant); sourceInfo.updateClassInfoByQualifiedName(currentClassName, currentClassInfo); if (currentClassInfo.isAnnotaiton()) { sourceInfo.updateAnnotationByQualifiedName(currentClassName, (AnnotationInfo) currentClassInfo); } } } }
package org.openqa.selenium.server; import org.openqa.selenium.Capabilities; import org.openqa.selenium.browserlaunchers.BrowserLauncher; import org.openqa.selenium.server.browserlaunchers.BrowserLauncherFactory; import org.openqa.selenium.server.browserlaunchers.InvalidBrowserExecutableException; import org.openqa.selenium.server.log.LoggingManager; import java.io.IOException; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.Timer; import java.util.TimerTask; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; /** * Manages browser sessions, their creation, and their closure. * <p/> * Maintains a cache of unused and available browser sessions in case * the server is reusing sessions. Also manages the creation and * finalization of all browser sessions. * * @author jbevan@google.com (Jennifer Bevan) */ public class BrowserSessionFactory { private static final long DEFAULT_CLEANUP_INTERVAL = 300000; // 5 minutes. private static final long DEFAULT_MAX_IDLE_SESSION_TIME = 600000; // 10 minutes private static Logger log = Logger.getLogger(BrowserSessionFactory.class.getName()); // cached, unused, already-launched browser sessions. protected final Set<BrowserSessionInfo> availableSessions = Collections.synchronizedSet(new HashSet<BrowserSessionInfo>()); // active browser sessions. protected final Set<BrowserSessionInfo> activeSessions = Collections.synchronizedSet(new HashSet<BrowserSessionInfo>()); private final BrowserLauncherFactory browserLauncherFactory; private final Timer cleanupTimer; private final long maxIdleSessionTime; private final boolean doCleanup; public BrowserSessionFactory(BrowserLauncherFactory blf) { this(blf, DEFAULT_CLEANUP_INTERVAL, DEFAULT_MAX_IDLE_SESSION_TIME, true); } /** * Constructor for testing purposes. * * @param blf an injected BrowserLauncherFactory. * @param cleanupInterval the time between idle available session cleaning sweeps. * @param maxIdleSessionTime the max time in ms for an available session to be idle. * @param doCleanup whether or not the idle session cleanup thread should run. */ protected BrowserSessionFactory(BrowserLauncherFactory blf, long cleanupInterval, long maxIdleSessionTime, boolean doCleanup) { browserLauncherFactory = blf; this.maxIdleSessionTime = maxIdleSessionTime; this.doCleanup = doCleanup; cleanupTimer = new Timer(/* daemon= */true); if (doCleanup) { cleanupTimer.schedule(new CleanupTask(), 0, cleanupInterval); } } /** * Gets a new browser session, using the SeleniumServer static fields * to populate parameters. * * @param browserString * @param startURL * @param extensionJs per-session user extension Javascript * @param configuration Remote Control configuration. Cannot be null. * @return the BrowserSessionInfo for the new browser session. * @throws RemoteCommandException */ public BrowserSessionInfo getNewBrowserSession(String browserString, String startURL, String extensionJs, Capabilities browserConfigurations, RemoteControlConfiguration configuration) throws RemoteCommandException { return getNewBrowserSession(browserString, startURL, extensionJs, browserConfigurations, configuration.reuseBrowserSessions(), configuration.isEnsureCleanSession(), configuration); } /** * Gets a new browser session * * @param browserString * @param startURL * @param extensionJs per-session user extension Javascript * @param configuration Remote Control configuration. Cannot be null. * @param useCached if a cached session should be used if one is available * @param ensureClean if a clean session (e.g. no previous cookies) is required. * @return the BrowserSessionInfo for the new browser session. * @throws RemoteCommandException */ protected BrowserSessionInfo getNewBrowserSession(String browserString, String startURL, String extensionJs, Capabilities browserConfigurations, boolean useCached, boolean ensureClean, RemoteControlConfiguration configuration) throws RemoteCommandException { BrowserSessionInfo sessionInfo = null; browserString = validateBrowserString(browserString, configuration); if (configuration.getProxyInjectionModeArg()) { InjectionHelper.setBrowserSideLogEnabled(configuration.isBrowserSideLogEnabled()); InjectionHelper.init(); } if (useCached) { log.info("grabbing available session..."); sessionInfo = grabAvailableSession(browserString, startURL); } // couldn't find one in the cache, or not reusing sessions. if (null == sessionInfo) { log.info("creating new remote session"); sessionInfo = createNewRemoteSession(browserString, startURL, extensionJs, browserConfigurations, ensureClean, configuration); } assert null != sessionInfo; if (false/*ensureClean*/) { // need to add this to the launcher API. // sessionInfo.launcher.hideCurrentSessionData(); } return sessionInfo; } /** * Ends all browser sessions. * <p/> * Active and available but inactive sessions are ended. */ protected void endAllBrowserSessions(RemoteControlConfiguration configuration) { boolean done = false; Set<BrowserSessionInfo> allSessions = new HashSet<BrowserSessionInfo>(); while (!done) { // to avoid concurrent modification exceptions... synchronized (activeSessions) { for (BrowserSessionInfo sessionInfo : activeSessions) { allSessions.add(sessionInfo); } } synchronized (availableSessions) { for (BrowserSessionInfo sessionInfo : availableSessions) { allSessions.add(sessionInfo); } } for (BrowserSessionInfo sessionInfo : allSessions) { endBrowserSession(true, sessionInfo.sessionId, configuration); } done = (0 == activeSessions.size() && 0 == availableSessions.size()); allSessions.clear(); if (doCleanup) { cleanupTimer.cancel(); } } } /** * Ends a browser session, using SeleniumServer static fields to populate * parameters. * * @param sessionId the id of the session to be ended * @param configuration Remote Control configuration. Cannot be null. */ public void endBrowserSession(String sessionId, RemoteControlConfiguration configuration) { endBrowserSession(false, sessionId, configuration, configuration.isEnsureCleanSession()); } /** * Ends a browser session, using SeleniumServer static fields to populate * parameters. * * @param sessionId the id of the session to be ended * @param configuration Remote Control configuration. Cannot be null. */ public void endBrowserSession(boolean forceClose, String sessionId, RemoteControlConfiguration configuration) { endBrowserSession(forceClose, sessionId, configuration, configuration.isEnsureCleanSession()); } /** * Ends a browser session. * * @param sessionId the id of the session to be ended * @param configuration Remote Control configuration. Cannot be null. * @param ensureClean if clean sessions (e.g. no leftover cookies) are required. */ protected void endBrowserSession(boolean forceClose, String sessionId, RemoteControlConfiguration configuration, boolean ensureClean) { BrowserSessionInfo sessionInfo = lookupInfoBySessionId(sessionId, activeSessions); if (null != sessionInfo) { activeSessions.remove(sessionInfo); try { if (forceClose || !configuration.reuseBrowserSessions()) { shutdownBrowserAndClearSessionData(sessionInfo); } else { if (null != sessionInfo.session) { // optional field sessionInfo.session.reset(sessionInfo.baseUrl); } // mark what time this session was ended sessionInfo.lastClosedAt = System.currentTimeMillis(); availableSessions.add(sessionInfo); } } finally { try { LoggingManager.perSessionLogHandler().clearSessionLogRecords(sessionId); } catch (IOException ex) { // ignore } if (ensureClean) { // need to add this to the launcher API. // sessionInfo.launcher.restoreOriginalSessionData(); } } } else { // look for it in the available sessions. sessionInfo = lookupInfoBySessionId(sessionId, availableSessions); if (null != sessionInfo && (forceClose || !configuration.reuseBrowserSessions())) { try { availableSessions.remove(sessionInfo); shutdownBrowserAndClearSessionData(sessionInfo); } finally { try { LoggingManager.perSessionLogHandler().clearSessionLogRecords(sessionId); } catch (IOException e) { // ignore } if (ensureClean) { // sessionInfo.launcher.restoreOriginalSessionData(); } } } } } /** * Shuts down this browser session's launcher and clears out its session * data (if session is not null). * * @param sessionInfo the browser session to end. */ protected void shutdownBrowserAndClearSessionData(BrowserSessionInfo sessionInfo) { try { sessionInfo.launcher.close(); // can throw RuntimeException } finally { if (null != sessionInfo.session) { FrameGroupCommandQueueSet.clearQueueSet(sessionInfo.sessionId); } } } /** * Rewrites the given browser string based on server settings. * * @param inputString the input browser string * @return a possibly-modified browser string. * @throws IllegalArgumentException if inputString is null. */ private String validateBrowserString(String inputString, RemoteControlConfiguration configuration) throws IllegalArgumentException { String browserString = inputString; if (configuration.getForcedBrowserMode() != null) { browserString = configuration.getForcedBrowserMode(); log.info("overriding browser mode w/ forced browser mode setting: " + browserString); } if (configuration.getProxyInjectionModeArg() && browserString.equals("*iexplore")) { log.warning("running in proxy injection mode, but you used a *iexplore browser string; this is " + "almost surely inappropriate, so I'm changing it to *piiexplore..."); browserString = "*piiexplore"; } else if (configuration.getProxyInjectionModeArg() && (browserString.equals("*firefox") || browserString.equals("*firefox2") || browserString.equals("*firefox3"))) { log.warning("running in proxy injection mode, but you used a " + browserString + " browser string; this is " + "almost surely inappropriate, so I'm changing it to *pifirefox..."); browserString = "*pifirefox"; } if (null == browserString) { throw new IllegalArgumentException("browser string may not be null"); } return browserString; } /** * Retrieves an available, unused session from the cache. * * @param browserString the necessary browser for a suitable session * @param baseUrl the necessary baseUrl for a suitable session * @return the session info of the cached session, null if none found. */ protected BrowserSessionInfo grabAvailableSession(String browserString, String baseUrl) { BrowserSessionInfo sessionInfo = null; synchronized (availableSessions) { sessionInfo = lookupInfoByBrowserAndUrl(browserString, baseUrl, availableSessions); if (null != sessionInfo) { availableSessions.remove(sessionInfo); } } if (null != sessionInfo) { activeSessions.add(sessionInfo); } return sessionInfo; } /** * Isolated dependency * * @param sessionId * @param port * @param configuration * @return a new FrameGroupCommandQueueSet instance */ protected FrameGroupCommandQueueSet makeQueueSet(String sessionId, int port, RemoteControlConfiguration configuration) { return FrameGroupCommandQueueSet.makeQueueSet(sessionId, configuration.getPortDriversShouldContact(), configuration); } /** * Isolated dependency * * @param sessionId * @return an existing FrameGroupCommandQueueSet instance */ protected FrameGroupCommandQueueSet getQueueSet(String sessionId) { return FrameGroupCommandQueueSet.getQueueSet(sessionId); } /** * Creates and tries to open a new session. * * @param browserString * @param startURL * @param extensionJs * @param configuration Remote Control configuration. Cannot be null. * @param ensureClean if a clean session is required * @return the BrowserSessionInfo of the new session. * @throws RemoteCommandException if the browser failed to launch and * request work in the required amount of time. */ protected BrowserSessionInfo createNewRemoteSession(String browserString, String startURL, String extensionJs, Capabilities browserConfiguration, boolean ensureClean, RemoteControlConfiguration configuration) throws RemoteCommandException { final FrameGroupCommandQueueSet queueSet; final BrowserSessionInfo sessionInfo; final BrowserLauncher launcher; final String sessionId; sessionId = UUID.randomUUID().toString().replace("-", ""); queueSet = makeQueueSet(sessionId, configuration.getPortDriversShouldContact(), configuration); queueSet.setExtensionJs(extensionJs); try { launcher = browserLauncherFactory.getBrowserLauncher(browserString, sessionId, configuration, browserConfiguration); } catch (InvalidBrowserExecutableException e) { throw new RemoteCommandException(e.getMessage(), ""); } sessionInfo = new BrowserSessionInfo(sessionId, browserString, startURL, launcher, queueSet); SeleniumDriverResourceHandler.setLastSessionId(sessionId); log.info("Allocated session " + sessionId + " for " + startURL + ", launching..."); LoggingManager.perSessionLogHandler().setThreadToSessionMapping(Thread.currentThread().getId(), sessionId); LoggingManager.perSessionLogHandler().copyThreadTempLogsToSessionLogs(sessionId, Thread.currentThread().getId()); try { launcher.launchRemoteSession(startURL); queueSet.waitForLoad(configuration.getTimeoutInSeconds() * 1000l); // TODO DGF log4j only // NDC.push("sessionId="+sessionId); FrameGroupCommandQueueSet queue = getQueueSet(sessionId); queue.doCommand("setContext", sessionId, ""); activeSessions.add(sessionInfo); return sessionInfo; } catch (Exception e) { /* * At this point the session might not have been added to neither available nor active sessions. * This session is unlikely to be of any practical use so we need to make sure we close the browser * and clear all session data. */ log.log(Level.SEVERE, "Failed to start new browser session, shutdown browser and clear all session data", e); shutdownBrowserAndClearSessionData(sessionInfo); throw new RemoteCommandException("Error while launching browser", "", e); } } /** * Adds a browser session that was not created by this factory to the * set of active sessions. * <p/> * Allows for creation of unmanaged sessions (i.e. no FrameGroupCommandQueueSet) * for task such as running the HTML tests (see HTMLLauncher.java). All * fields other than session are required to be non-null. * * @param sessionInfo the session info to register. */ protected boolean registerExternalSession(BrowserSessionInfo sessionInfo) { boolean result = false; if (BrowserSessionInfo.isValid(sessionInfo)) { activeSessions.add(sessionInfo); result = true; } return result; } /** * Removes a previously registered external browser session from the * list of active sessions. * * @param sessionInfo the session to remove. */ protected void deregisterExternalSession(BrowserSessionInfo sessionInfo) { activeSessions.remove(sessionInfo); } /** * Looks up a session in the named set by session id * * @param sessionId the session id to find * @param set the Set to inspect * @return the matching BrowserSessionInfo or null if not found. */ protected BrowserSessionInfo lookupInfoBySessionId(String sessionId, Set<BrowserSessionInfo> set) { BrowserSessionInfo result = null; synchronized (set) { for (BrowserSessionInfo info : set) { if (info.sessionId.equals(sessionId)) { result = info; break; } } } return result; } /** * Looks up a session in the named set by browser string and base URL * * @param browserString the browser string to match * @param baseUrl the base URL to match. * @param set the Set to inspect * @return the matching BrowserSessionInfo or null if not found. */ protected BrowserSessionInfo lookupInfoByBrowserAndUrl(String browserString, String baseUrl, Set<BrowserSessionInfo> set) { BrowserSessionInfo result = null; synchronized (set) { for (BrowserSessionInfo info : set) { if (info.browserString.equals(browserString) && info.baseUrl.equals(baseUrl)) { result = info; break; } } } return result; } protected void removeIdleAvailableSessions() { long now = System.currentTimeMillis(); synchronized (availableSessions) { Iterator<BrowserSessionInfo> iter = availableSessions.iterator(); while (iter.hasNext()) { BrowserSessionInfo info = iter.next(); if (now - info.lastClosedAt > maxIdleSessionTime) { iter.remove(); } } } } /** * for testing only */ protected boolean hasActiveSession(String sessionId) { BrowserSessionInfo info = lookupInfoBySessionId(sessionId, activeSessions); return (null != info); } /** * for testing only */ protected boolean hasAvailableSession(String sessionId) { BrowserSessionInfo info = lookupInfoBySessionId(sessionId, availableSessions); return (null != info); } /** * for testing only */ protected void addToAvailableSessions(BrowserSessionInfo sessionInfo) { availableSessions.add(sessionInfo); } /** * Collection class to hold the objects associated with a browser session. * * @author jbevan@google.com (Jennifer Bevan) */ public static class BrowserSessionInfo { public BrowserSessionInfo(String sessionId, String browserString, String baseUrl, BrowserLauncher launcher, FrameGroupCommandQueueSet session) { this.sessionId = sessionId; this.browserString = browserString; this.baseUrl = baseUrl; this.launcher = launcher; this.session = session; // optional field; may be null. lastClosedAt = 0; } public final String sessionId; public final String browserString; public final String baseUrl; public final BrowserLauncher launcher; public final FrameGroupCommandQueueSet session; public long lastClosedAt; /** * Browser sessions require the session id, the browser, the base URL, * and the launcher. They don't actually require the session to be set * up as a FrameGroupCommandQueueSet. * * @param sessionInfo the sessionInfo to validate. * @return true if all fields excepting session are non-null. */ protected static boolean isValid(BrowserSessionInfo sessionInfo) { boolean result = (null != sessionInfo.sessionId && null != sessionInfo.browserString && null != sessionInfo.baseUrl && null != sessionInfo.launcher); return result; } } /** * TimerTask that looks for unused sessions in the availableSessions collection. * * @author jbevan@google.com (Jennifer Bevan) */ protected class CleanupTask extends TimerTask { @Override public void run() { removeIdleAvailableSessions(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tomcat.dbcp.dbcp2.cpdsadapter; import java.io.PrintWriter; import java.io.Serializable; import java.sql.DriverManager; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.util.Hashtable; import java.util.Properties; import java.util.logging.Logger; import javax.naming.Context; import javax.naming.Name; import javax.naming.NamingException; import javax.naming.RefAddr; import javax.naming.Reference; import javax.naming.Referenceable; import javax.naming.StringRefAddr; import javax.naming.spi.ObjectFactory; import javax.sql.ConnectionPoolDataSource; import javax.sql.PooledConnection; import org.apache.tomcat.dbcp.dbcp2.PoolablePreparedStatement; import org.apache.tomcat.dbcp.pool2.KeyedObjectPool; import org.apache.tomcat.dbcp.pool2.impl.BaseObjectPoolConfig; import org.apache.tomcat.dbcp.pool2.impl.GenericKeyedObjectPool; import org.apache.tomcat.dbcp.pool2.impl.GenericKeyedObjectPoolConfig; /** * <p> * An adapter for JDBC drivers that do not include an implementation * of {@link javax.sql.ConnectionPoolDataSource}, but still include a * {@link java.sql.DriverManager} implementation. * <code>ConnectionPoolDataSource</code>s are not used within general * applications. They are used by <code>DataSource</code> implementations * that pool <code>Connection</code>s, such as * {@link org.apache.tomcat.dbcp.dbcp2.datasources.SharedPoolDataSource}. A J2EE * container will normally provide some method of initializing the * <code>ConnectionPoolDataSource</code> whose attributes are presented * as bean getters/setters and then deploying it via JNDI. It is then * available as a source of physical connections to the database, when * the pooling <code>DataSource</code> needs to create a new * physical connection. * </p> * * <p> * Although normally used within a JNDI environment, the DriverAdapterCPDS * can be instantiated and initialized as any bean and then attached * directly to a pooling <code>DataSource</code>. * <code>Jdbc2PoolDataSource</code> can use the * <code>ConnectionPoolDataSource</code> with or without the use of JNDI. * </p> * * <p> * The DriverAdapterCPDS also provides <code>PreparedStatement</code> pooling * which is not generally available in jbdc2 * <code>ConnectionPoolDataSource</code> implementation, but is * addressed within the jdbc3 specification. The <code>PreparedStatement</code> * pool in DriverAdapterCPDS has been in the dbcp package for some time, but * it has not undergone extensive testing in the configuration used here. * It should be considered experimental and can be toggled with the * poolPreparedStatements attribute. * </p> * * <p> * The <a href="package-summary.html">package documentation</a> contains an * example using catalina and JNDI. The <a * href="../datasources/package-summary.html">datasources package documentation</a> * shows how to use <code>DriverAdapterCPDS</code> as a source for * <code>Jdbc2PoolDataSource</code> without the use of JNDI. * </p> * * @author John D. McNally * @since 2.0 */ public class DriverAdapterCPDS implements ConnectionPoolDataSource, Referenceable, Serializable, ObjectFactory { private static final long serialVersionUID = -4820523787212147844L; private static final String GET_CONNECTION_CALLED = "A PooledConnection was already requested from this source, " + "further initialization is not allowed."; /** Description */ private String description; /** Password */ private String password; /** Url name */ private String url; /** User name */ private String user; /** Driver class name */ private String driver; /** Login TimeOut in seconds */ private int loginTimeout; /** Log stream. NOT USED */ private transient PrintWriter logWriter = null; // PreparedStatement pool properties private boolean poolPreparedStatements; private int maxIdle = 10; private long _timeBetweenEvictionRunsMillis = BaseObjectPoolConfig.DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS; private int _numTestsPerEvictionRun = -1; private int _minEvictableIdleTimeMillis = -1; private int _maxPreparedStatements = -1; /** Whether or not getConnection has been called */ private volatile boolean getConnectionCalled = false; /** Connection properties passed to JDBC Driver */ private Properties connectionProperties = null; static { // Attempt to prevent deadlocks - see DBCP - 272 DriverManager.getDrivers(); } /** * Controls access to the underlying connection */ private boolean accessToUnderlyingConnectionAllowed = false; /** * Default no-arg constructor for Serialization */ public DriverAdapterCPDS() { } /** * Attempt to establish a database connection using the default * user and password. */ @Override public PooledConnection getPooledConnection() throws SQLException { return getPooledConnection(getUser(), getPassword()); } /** * Attempt to establish a database connection. * @param username name to be used for the connection * @param pass password to be used fur the connection */ @Override public PooledConnection getPooledConnection(final String username, final String pass) throws SQLException { getConnectionCalled = true; PooledConnectionImpl pci = null; // Workaround for buggy WebLogic 5.1 classloader - ignore the // exception upon first invocation. try { if (connectionProperties != null) { connectionProperties.put("user", username); connectionProperties.put("password", pass); pci = new PooledConnectionImpl(DriverManager.getConnection( getUrl(), connectionProperties)); } else { pci = new PooledConnectionImpl(DriverManager.getConnection( getUrl(), username, pass)); } pci.setAccessToUnderlyingConnectionAllowed(isAccessToUnderlyingConnectionAllowed()); } catch (final ClassCircularityError e) { if (connectionProperties != null) { pci = new PooledConnectionImpl(DriverManager.getConnection( getUrl(), connectionProperties)); } else { pci = new PooledConnectionImpl(DriverManager.getConnection( getUrl(), username, pass)); } pci.setAccessToUnderlyingConnectionAllowed(isAccessToUnderlyingConnectionAllowed()); } KeyedObjectPool<PStmtKeyCPDS, PoolablePreparedStatement<PStmtKeyCPDS>> stmtPool = null; if (isPoolPreparedStatements()) { final GenericKeyedObjectPoolConfig config = new GenericKeyedObjectPoolConfig(); config.setMaxTotalPerKey(Integer.MAX_VALUE); config.setBlockWhenExhausted(false); config.setMaxWaitMillis(0); config.setMaxIdlePerKey(getMaxIdle()); if (getMaxPreparedStatements() <= 0) { // since there is no limit, create a prepared statement pool with an eviction thread // evictor settings are the same as the connection pool settings. config.setTimeBetweenEvictionRunsMillis(getTimeBetweenEvictionRunsMillis()); config.setNumTestsPerEvictionRun(getNumTestsPerEvictionRun()); config.setMinEvictableIdleTimeMillis(getMinEvictableIdleTimeMillis()); } else { // since there is limit, create a prepared statement pool without an eviction thread // pool has LRU functionality so when the limit is reached, 15% of the pool is cleared. // see org.apache.tomcat.dbcp.pool2.impl.GenericKeyedObjectPool.clearOldest method config.setMaxTotal(getMaxPreparedStatements()); config.setTimeBetweenEvictionRunsMillis(-1); config.setNumTestsPerEvictionRun(0); config.setMinEvictableIdleTimeMillis(0); } stmtPool = new GenericKeyedObjectPool<>(pci, config); pci.setStatementPool(stmtPool); } return pci; } @Override public Logger getParentLogger() throws SQLFeatureNotSupportedException { throw new SQLFeatureNotSupportedException(); } // ---------------------------------------------------------------------- // Referenceable implementation /** * <CODE>Referenceable</CODE> implementation. */ @Override public Reference getReference() throws NamingException { // this class implements its own factory final String factory = getClass().getName(); final Reference ref = new Reference(getClass().getName(), factory, null); ref.add(new StringRefAddr("description", getDescription())); ref.add(new StringRefAddr("driver", getDriver())); ref.add(new StringRefAddr("loginTimeout", String.valueOf(getLoginTimeout()))); ref.add(new StringRefAddr("password", getPassword())); ref.add(new StringRefAddr("user", getUser())); ref.add(new StringRefAddr("url", getUrl())); ref.add(new StringRefAddr("poolPreparedStatements", String.valueOf(isPoolPreparedStatements()))); ref.add(new StringRefAddr("maxIdle", String.valueOf(getMaxIdle()))); ref.add(new StringRefAddr("timeBetweenEvictionRunsMillis", String.valueOf(getTimeBetweenEvictionRunsMillis()))); ref.add(new StringRefAddr("numTestsPerEvictionRun", String.valueOf(getNumTestsPerEvictionRun()))); ref.add(new StringRefAddr("minEvictableIdleTimeMillis", String.valueOf(getMinEvictableIdleTimeMillis()))); ref.add(new StringRefAddr("maxPreparedStatements", String.valueOf(getMaxPreparedStatements()))); return ref; } // ---------------------------------------------------------------------- // ObjectFactory implementation /** * implements ObjectFactory to create an instance of this class */ @Override public Object getObjectInstance(final Object refObj, final Name name, final Context context, final Hashtable<?,?> env) throws Exception { // The spec says to return null if we can't create an instance // of the reference DriverAdapterCPDS cpds = null; if (refObj instanceof Reference) { final Reference ref = (Reference)refObj; if (ref.getClassName().equals(getClass().getName())) { RefAddr ra = ref.get("description"); if (ra != null && ra.getContent() != null) { setDescription(ra.getContent().toString()); } ra = ref.get("driver"); if (ra != null && ra.getContent() != null) { setDriver(ra.getContent().toString()); } ra = ref.get("url"); if (ra != null && ra.getContent() != null) { setUrl(ra.getContent().toString()); } ra = ref.get("user"); if (ra != null && ra.getContent() != null) { setUser(ra.getContent().toString()); } ra = ref.get("password"); if (ra != null && ra.getContent() != null) { setPassword(ra.getContent().toString()); } ra = ref.get("poolPreparedStatements"); if (ra != null && ra.getContent() != null) { setPoolPreparedStatements(Boolean.valueOf( ra.getContent().toString()).booleanValue()); } ra = ref.get("maxIdle"); if (ra != null && ra.getContent() != null) { setMaxIdle(Integer.parseInt(ra.getContent().toString())); } ra = ref.get("timeBetweenEvictionRunsMillis"); if (ra != null && ra.getContent() != null) { setTimeBetweenEvictionRunsMillis( Integer.parseInt(ra.getContent().toString())); } ra = ref.get("numTestsPerEvictionRun"); if (ra != null && ra.getContent() != null) { setNumTestsPerEvictionRun( Integer.parseInt(ra.getContent().toString())); } ra = ref.get("minEvictableIdleTimeMillis"); if (ra != null && ra.getContent() != null) { setMinEvictableIdleTimeMillis( Integer.parseInt(ra.getContent().toString())); } ra = ref.get("maxPreparedStatements"); if (ra != null && ra.getContent() != null) { setMaxPreparedStatements( Integer.parseInt(ra.getContent().toString())); } ra = ref.get("accessToUnderlyingConnectionAllowed"); if (ra != null && ra.getContent() != null) { setAccessToUnderlyingConnectionAllowed( Boolean.valueOf(ra.getContent().toString()).booleanValue()); } cpds = this; } } return cpds; } /** * Throws an IllegalStateException, if a PooledConnection has already * been requested. */ private void assertInitializationAllowed() throws IllegalStateException { if (getConnectionCalled) { throw new IllegalStateException(GET_CONNECTION_CALLED); } } // ---------------------------------------------------------------------- // Properties /** * Gets the connection properties passed to the JDBC driver. * * @return the JDBC connection properties used when creating connections. */ public Properties getConnectionProperties() { return connectionProperties; } /** * <p>Sets the connection properties passed to the JDBC driver.</p> * * <p>If <code>props</code> contains "user" and/or "password" * properties, the corresponding instance properties are set. If these * properties are not present, they are filled in using * {@link #getUser()}, {@link #getPassword()} when {@link #getPooledConnection()} * is called, or using the actual parameters to the method call when * {@link #getPooledConnection(String, String)} is called. Calls to * {@link #setUser(String)} or {@link #setPassword(String)} overwrite the values * of these properties if <code>connectionProperties</code> is not null.</p> * * @param props Connection properties to use when creating new connections. * @throws IllegalStateException if {@link #getPooledConnection()} has been called */ public void setConnectionProperties(final Properties props) { assertInitializationAllowed(); connectionProperties = props; if (connectionProperties.containsKey("user")) { setUser(connectionProperties.getProperty("user")); } if (connectionProperties.containsKey("password")) { setPassword(connectionProperties.getProperty("password")); } } /** * Gets the value of description. This property is here for use by * the code which will deploy this datasource. It is not used * internally. * * @return value of description, may be null. * @see #setDescription(String) */ public String getDescription() { return description; } /** * Sets the value of description. This property is here for use by * the code which will deploy this datasource. It is not used * internally. * * @param v Value to assign to description. */ public void setDescription(final String v) { this.description = v; } /** * Gets the value of password for the default user. * @return value of password. */ public String getPassword() { return password; } /** * Sets the value of password for the default user. * @param v Value to assign to password. * @throws IllegalStateException if {@link #getPooledConnection()} has been called */ public void setPassword(final String v) { assertInitializationAllowed(); this.password = v; if (connectionProperties != null) { connectionProperties.setProperty("password", v); } } /** * Gets the value of url used to locate the database for this datasource. * @return value of url. */ public String getUrl() { return url; } /** * Sets the value of URL string used to locate the database for this datasource. * @param v Value to assign to url. * @throws IllegalStateException if {@link #getPooledConnection()} has been called */ public void setUrl(final String v) { assertInitializationAllowed(); this.url = v; } /** * Gets the value of default user (login or username). * @return value of user. */ public String getUser() { return user; } /** * Sets the value of default user (login or username). * @param v Value to assign to user. * @throws IllegalStateException if {@link #getPooledConnection()} has been called */ public void setUser(final String v) { assertInitializationAllowed(); this.user = v; if (connectionProperties != null) { connectionProperties.setProperty("user", v); } } /** * Gets the driver classname. * @return value of driver. */ public String getDriver() { return driver; } /** * Sets the driver classname. Setting the driver classname cause the * driver to be registered with the DriverManager. * @param v Value to assign to driver. * @throws ClassNotFoundException Driver class was not found * @throws IllegalStateException if {@link #getPooledConnection()} has been called */ public void setDriver(final String v) throws ClassNotFoundException { assertInitializationAllowed(); this.driver = v; // make sure driver is registered Class.forName(v); } /** * Gets the maximum time in seconds that this data source can wait * while attempting to connect to a database. NOT USED. */ @Override public int getLoginTimeout() { return loginTimeout; } /** * Gets the log writer for this data source. NOT USED. */ @Override public PrintWriter getLogWriter() { return logWriter; } /** * Sets the maximum time in seconds that this data source will wait * while attempting to connect to a database. NOT USED. */ @Override public void setLoginTimeout(final int seconds) { loginTimeout = seconds; } /** * Sets the log writer for this data source. NOT USED. */ @Override public void setLogWriter(final PrintWriter out) { logWriter = out; } // ------------------------------------------------------------------ // PreparedStatement pool properties /** * Flag to toggle the pooling of <code>PreparedStatement</code>s * @return value of poolPreparedStatements. */ public boolean isPoolPreparedStatements() { return poolPreparedStatements; } /** * Flag to toggle the pooling of <code>PreparedStatement</code>s * @param v true to pool statements. * @throws IllegalStateException if {@link #getPooledConnection()} has been called */ public void setPoolPreparedStatements(final boolean v) { assertInitializationAllowed(); this.poolPreparedStatements = v; } /** * Gets the maximum number of statements that can remain idle in the * pool, without extra ones being released, or negative for no limit. * @return the value of maxIdle */ public int getMaxIdle() { return this.maxIdle; } /** * Gets the maximum number of statements that can remain idle in the * pool, without extra ones being released, or negative for no limit. * * @param maxIdle The maximum number of statements that can remain idle * @throws IllegalStateException if {@link #getPooledConnection()} has been called */ public void setMaxIdle(final int maxIdle) { assertInitializationAllowed(); this.maxIdle = maxIdle; } /** * Gets the number of milliseconds to sleep between runs of the * idle object evictor thread. * When non-positive, no idle object evictor thread will be * run. * @return the value of the evictor thread timer * @see #setTimeBetweenEvictionRunsMillis(long) */ public long getTimeBetweenEvictionRunsMillis() { return _timeBetweenEvictionRunsMillis; } /** * Sets the number of milliseconds to sleep between runs of the * idle object evictor thread. * When non-positive, no idle object evictor thread will be * run. * @param timeBetweenEvictionRunsMillis The time between runs * @see #getTimeBetweenEvictionRunsMillis() * @throws IllegalStateException if {@link #getPooledConnection()} has been called */ public void setTimeBetweenEvictionRunsMillis( final long timeBetweenEvictionRunsMillis) { assertInitializationAllowed(); _timeBetweenEvictionRunsMillis = timeBetweenEvictionRunsMillis; } /** * Gets the number of statements to examine during each run of the * idle object evictor thread (if any). * * @see #setNumTestsPerEvictionRun(int) * @see #setTimeBetweenEvictionRunsMillis(long) * @return the number of statements */ public int getNumTestsPerEvictionRun() { return _numTestsPerEvictionRun; } /** * Sets the number of statements to examine during each run of the * idle object evictor thread (if any). * <p> * When a negative value is supplied, <tt>ceil({*link #numIdle})/abs({*link #getNumTestsPerEvictionRun})</tt> * tests will be run. I.e., when the value is <i>-n</i>, roughly one <i>n</i>th of the * idle objects will be tested per run. * * @param numTestsPerEvictionRun number of statements to examine per run * @see #getNumTestsPerEvictionRun() * @see #setTimeBetweenEvictionRunsMillis(long) * @throws IllegalStateException if {@link #getPooledConnection()} has been called */ public void setNumTestsPerEvictionRun(final int numTestsPerEvictionRun) { assertInitializationAllowed(); _numTestsPerEvictionRun = numTestsPerEvictionRun; } /** * Gets the minimum amount of time a statement may sit idle in the pool * before it is eligible for eviction by the idle object evictor * (if any). * * @see #setMinEvictableIdleTimeMillis(int) * @see #setTimeBetweenEvictionRunsMillis(long) * @return the amount of time */ public int getMinEvictableIdleTimeMillis() { return _minEvictableIdleTimeMillis; } /** * Sets the minimum amount of time a statement may sit idle in the pool * before it is eligible for eviction by the idle object evictor * (if any). * When non-positive, no objects will be evicted from the pool * due to idle time alone. * @param minEvictableIdleTimeMillis minimum time to set (in ms) * @see #getMinEvictableIdleTimeMillis() * @see #setTimeBetweenEvictionRunsMillis(long) * @throws IllegalStateException if {@link #getPooledConnection()} has been called */ public void setMinEvictableIdleTimeMillis(final int minEvictableIdleTimeMillis) { assertInitializationAllowed(); _minEvictableIdleTimeMillis = minEvictableIdleTimeMillis; } /** * Returns the value of the accessToUnderlyingConnectionAllowed property. * * @return <code>true</code> if access to the underlying is allowed, * <code>false</code> otherwise. */ public synchronized boolean isAccessToUnderlyingConnectionAllowed() { return this.accessToUnderlyingConnectionAllowed; } /** * Sets the value of the accessToUnderlyingConnectionAllowed property. * It controls if the PoolGuard allows access to the underlying connection. * (Default: false) * * @param allow Access to the underlying connection is granted when true. */ public synchronized void setAccessToUnderlyingConnectionAllowed(final boolean allow) { this.accessToUnderlyingConnectionAllowed = allow; } /** * Gets the maximum number of prepared statements. * * @return maxPrepartedStatements value */ public int getMaxPreparedStatements() { return _maxPreparedStatements; } /** * Sets the maximum number of prepared statements. * @param maxPreparedStatements the new maximum number of prepared * statements */ public void setMaxPreparedStatements(final int maxPreparedStatements) { _maxPreparedStatements = maxPreparedStatements; } }
/* * NOTE: This copyright does *not* cover user programs that use HQ * program services by normal system calls through the application * program interfaces provided as part of the Hyperic Plug-in Development * Kit or the Hyperic Client Development Kit - this is merely considered * normal use of the program, and does *not* fall under the heading of * "derived work". * * Copyright (C) [2004-2008], Hyperic, Inc. * This file is part of HQ. * * HQ is free software; you can redistribute it and/or modify * it under the terms version 2 of the GNU General Public License as * published by the Free Software Foundation. This program is distributed * in the hope that it will be useful, but WITHOUT ANY WARRANTY; without * even the implied warranty of MERCHANTABILITY or FITNESS FOR A * PARTICULAR PURPOSE. See the GNU General Public License for more * details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA. */ package org.hyperic.tools.ant.dbupgrade; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.tools.ant.BuildException; import org.hyperic.hq.measurement.MeasurementConstants; import org.hyperic.hq.measurement.shared.MeasRange; import org.hyperic.hq.measurement.shared.MeasRangeObj; import org.hyperic.util.jdbc.DBUtil; public class SST_AvailRLEUpgrader extends SchemaSpecTask { public static final String logCtx = SST_AvailRLEUpgrader.class.getName(); public static final String SCHEMA_MOD_IN_PROGRESS = " *** UPGRADE TASK: migrating availability data to" + " Run Length Encoded Availability"; private static final List dataTables = new ArrayList(); private static final String TAB_MEAS = MeasurementConstants.TAB_MEAS; private static final String AVAILABILITY = MeasurementConstants.CAT_AVAILABILITY.toUpperCase(); private static final String TAB_MEAS_TEMPL = "EAM_MEASUREMENT_TEMPL"; private static final String TAB_AVAIL_RLE = "HQ_AVAIL_DATA_RLE"; private static final long ONE_HR = 1000*3600; private static final long SIX_HRS = 6*ONE_HR; private static final long ONE_DAY = 24*ONE_HR; private static final long MAX_TIMESTAMP = Long.MAX_VALUE; private static final long BATCH_SIZE = 1000; private static final String TAB_DATA_1H = MeasurementConstants.TAB_DATA_1H; private static final String TAB_DATA_6H = MeasurementConstants.TAB_DATA_6H; private static final String TAB_DATA_1D = MeasurementConstants.TAB_DATA_1D; public SST_AvailRLEUpgrader () {} private void initDataTables(Connection conn) throws SQLException { // these must be ordered from the latest table to the oldest List ranges = MeasRangeObj.getInstance().getRanges(); for (Iterator i=ranges.iterator(); i.hasNext(); ) { MeasRange range = (MeasRange)i.next(); dataTables.add(new TableObj(range.getTable(), ONE_HR, getMinTimestamp(range.getTable(), conn), getMaxTimestamp(range.getTable(), conn))); } // need to sort in descending order Collections.sort(dataTables); dataTables.add(new TableObj(TAB_DATA_1H, ONE_HR*10, getMinTimestamp(TAB_DATA_1H, conn), getMaxTimestamp(TAB_DATA_1H, conn))); dataTables.add(new TableObj(TAB_DATA_6H, SIX_HRS*10, getMinTimestamp(TAB_DATA_6H, conn), getMaxTimestamp(TAB_DATA_6H, conn))); dataTables.add(new TableObj(TAB_DATA_1D, ONE_DAY*10, getMinTimestamp(TAB_DATA_1D, conn), getMaxTimestamp(TAB_DATA_1D, conn))); } public void execute() throws BuildException { Map avails = new HashMap(); try { Connection conn = getConnection(); initDataTables(conn); log(SCHEMA_MOD_IN_PROGRESS); for (Iterator i=dataTables.iterator(); i.hasNext(); ) { TableObj table = (TableObj)i.next(); log("Migrating Table: "+table.getTable() + " min: "+table.getMinTimestamp() + " max: "+table.getMaxTimestamp()); setAvailData(avails, table, conn); } insertAvailData(avails, conn); } catch (SQLException e) { throw new BuildException(logCtx+": " + e.getMessage(), e); } } private void insertAvailData(Map avails, Connection conn) throws SQLException { String sql = "INSERT INTO " + TAB_AVAIL_RLE + " (measurement_id, " + "startime, endtime, availval) VALUES (?, ?, ?, ?)"; PreparedStatement pstmt = conn.prepareStatement(sql); List debugList = new ArrayList((int)BATCH_SIZE); try { for (Iterator i=avails.entrySet().iterator(); i.hasNext(); ) { Map.Entry entry = (Map.Entry)i.next(); int mid = ((Integer)entry.getKey()).intValue(); List list = (List)entry.getValue(); int ii=0; for (Iterator it=list.iterator(); it.hasNext(); ii++) { if (0 == (ii % BATCH_SIZE) && ii != 0) { int[] res = pstmt.executeBatch(); checkResult(res, debugList); debugList.clear(); pstmt.clearBatch(); } AvailData data = (AvailData)it.next(); //log("\tmid," + mid + // ":startime," + data.getStartTime() + // ":endtime," + data.getEndTime() + // ":availVal," + data.getAvailVal()); pstmt.clearParameters(); pstmt.setInt(1, mid); pstmt.setLong(2, data.getStartTime()); pstmt.setLong(3, data.getEndTime()); pstmt.setDouble(4, data.getAvailVal()); debugList.add(data); pstmt.addBatch(); } } int[] res = pstmt.executeBatch(); checkResult(res, debugList); } finally { DBUtil.closeStatement(logCtx, pstmt); } } private void checkResult(int[] res, List pts) { Iterator it = pts.iterator(); for (int i=0; i<res.length; i++) { AvailData pt = (AvailData)it.next(); if (res[i] == Statement.EXECUTE_FAILED) { log("ERROR inserting datapoint -> " + pt); } } } private long getMaxTimestamp(String table, Connection conn) throws SQLException { String sql = "SELECT max(timestamp) from " + table; ResultSet rs = null; Statement stmt = null; try { stmt = conn.createStatement(); rs = stmt.executeQuery(sql); if (rs.next()) { return rs.getLong(1); } return -1; } finally { DBUtil.closeJDBCObjects(logCtx, null, stmt, rs); } } private long getMinTimestamp(String table, Connection conn) throws SQLException { String sql = "SELECT min(timestamp) from " + table; ResultSet rs = null; Statement stmt = null; try { stmt = conn.createStatement(); rs = stmt.executeQuery(sql); if (rs.next()) { return rs.getLong(1); } return -1; } finally { DBUtil.closeJDBCObjects(logCtx, null, stmt, rs); } } private void setAvailData(Map avails, TableObj table, Connection conn) throws SQLException { long min = table.getMinTimestamp(); long max = table.getMaxTimestamp(); String sql = "SELECT timestamp, value, measurement_id" + " FROM " + table.getTable() + " d, " + TAB_MEAS + " m, " + TAB_MEAS_TEMPL + " t" + " WHERE d.timestamp between ? and ?" + " AND d.measurement_id = m.id" + " AND t.id = m.template_id AND upper(t.alias) = '" + AVAILABILITY + "'" + " ORDER BY d.timestamp desc, d.measurement_id"; PreparedStatement pstmt = conn.prepareStatement(sql); ResultSet rs = null; try { long interval = table.getInterval(); for (long i=max; i>min; i-=interval) { pstmt.setLong(1, (i-interval)); pstmt.setLong(2, i); rs = pstmt.executeQuery(); int timestamp_col = rs.findColumn("timestamp"); int value_col = rs.findColumn("value"); int measId_col = rs.findColumn("measurement_id"); while (rs.next()) { long timestamp = rs.getLong(timestamp_col); double value = rs.getDouble(value_col); int measId = rs.getInt(measId_col); setAvail(avails, timestamp, value, measId); } DBUtil.closeResultSet(logCtx, rs); } } finally { DBUtil.closeJDBCObjects(logCtx, null, pstmt, rs); } } private void setAvail(Map avails, long timestamp, double value, int id) { List list; Integer mid = new Integer(id); if (null == (list = (List)avails.get(mid))) { AvailData data = new AvailData(timestamp, MAX_TIMESTAMP, id, value); list = new ArrayList(); list.add(data); avails.put(mid, list); } else { AvailData last = (AvailData)list.get(list.size()-1); // the timestamps in the rollup tables may overlap the more recent // tables due to delays in deletion after compression. If we see // overlapping just ignore the datapoint if (timestamp >= last.getStartTime()) { return; } if (value != last.getAvailVal()) { AvailData data = new AvailData(timestamp, last.getStartTime(), id, value); list.add(data); } else { last.setStartTime(timestamp); } } } private class TableObj implements Comparable { private String _table; private long _interval; private long _minTime; private long _maxTime; public TableObj(String table, long interval, long minTime, long maxTime) { _minTime = minTime; _maxTime = maxTime; _table = table; _interval = interval; } public String getTable() { return _table; } public long getInterval() { return _interval; } public long getMinTimestamp() { return _minTime; } public long getMaxTimestamp() { return _maxTime; } public int compareTo(Object rhs) throws ClassCastException { return compareTo((TableObj)rhs); } public int compareTo(TableObj rhs) throws ClassCastException { Long min = new Long(_minTime); Long rmin = new Long(rhs._minTime); // want to sort in descending order return rmin.compareTo(min); } } private class AvailData { long _startTime; long _endtime; int _mid; double _availval; public AvailData(long starttime, long endtime, int metric_id, double availval) { super(); _startTime = starttime; _endtime = endtime; _mid = metric_id; _availval = availval; } public long getStartTime() { return _startTime; } public void setStartTime(long startTime) { _startTime = startTime; } public long getEndTime() { return _endtime; } public int getMetric() { return _mid; } public double getAvailVal() { return _availval; } public String toString() { return "mid:"+_mid+";startime:"+_startTime+";endtime:"+_endtime+";availval:"+_availval; } } }
/******************************************************************************* * Copyright 2017 Cognizant Technology Solutions * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. ******************************************************************************/ package com.cognizant.devops.platformservice.webhook.service; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.StringTokenizer; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.springframework.stereotype.Service; import org.springframework.util.StringUtils; import com.cognizant.devops.platformcommons.constants.PlatformServiceConstants; import com.cognizant.devops.platformcommons.core.util.ValidationUtils; import com.cognizant.devops.platformcommons.exception.InsightsCustomException; import com.cognizant.devops.platformdal.webhookConfig.WebHookConfig; import com.cognizant.devops.platformdal.webhookConfig.WebHookConfigDAL; import com.cognizant.devops.platformdal.webhookConfig.WebhookDerivedConfig; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; @Service("webhookConfigurationService") public class WebHookServiceImpl implements IWebHook { private static final Logger log = LogManager.getLogger(WebHookServiceImpl.class); WebHookConfigDAL webhookConfigurationDAL = new WebHookConfigDAL(); @Override public Boolean saveWebHookConfiguration(JsonObject registerWebhookjson) throws InsightsCustomException { try { WebHookConfig webHookConfig = populateWebHookConfiguration(registerWebhookjson); return webhookConfigurationDAL.saveWebHookConfiguration(webHookConfig); } catch (InsightsCustomException e) { log.error("Error while saving the webhook .. {} ", e.getMessage()); throw new InsightsCustomException(e.getMessage()); } } @Override public List<WebHookConfig> getRegisteredWebHooks() throws InsightsCustomException { try { List<WebHookConfig> webhookConfigList = webhookConfigurationDAL.getAllWebHookConfigurations(); return webhookConfigList; } catch (Exception e) { log.error("Error getting all webhook config.. ", e); throw new InsightsCustomException(e.toString()); } } /** * Validation of the Response Template which has been entered by the user * * @param responseTemplate * @return * @throws InsightsCustomException */ private Boolean checkResponseTemplate(String responseTemplate) throws InsightsCustomException { try { String responseTemplateValidated = ValidationUtils.cleanXSS(responseTemplate); StringTokenizer st = new StringTokenizer(responseTemplateValidated, ","); int tokenCount = st.countTokens(); int maxTokenCount = 70; if(tokenCount > maxTokenCount) { log.debug("In checkResponseTemplate ==== parameter count exceeds max limit {}",tokenCount); throw new InsightsCustomException("Response template parameter count exceeds max limit."); } else { for(int i=0; i < tokenCount; i++){ String keyValuePairs = st.nextToken(); String validatedString = ValidationUtils.cleanXSS(keyValuePairs); int count = StringUtils.countOccurrencesOf(validatedString, "="); if (count != 1) { throw new InsightsCustomException(PlatformServiceConstants.INCORRECT_RESPONSE_TEMPLATE); } else { String[] dataKeyMapper = validatedString.split("="); log.debug(" {} , {} ", dataKeyMapper[0].trim(), dataKeyMapper[1].trim()); } } return true; } } catch (InsightsCustomException e) { log.error("Error in Response Template.. {}", e.getMessage()); throw new InsightsCustomException(PlatformServiceConstants.INCORRECT_RESPONSE_TEMPLATE); } } /** * Populating the data received from the user into the Object of the entity class. * * @param registerWebhookjson * @return * @throws InsightsCustomException */ private WebHookConfig populateWebHookConfiguration(JsonObject registerWebhookjson) throws InsightsCustomException { try { WebHookConfig webhookConfiguration = new WebHookConfig(); String responseTemplate = registerWebhookjson.get("responseTemplate").getAsString(); if (responseTemplate != "") { checkResponseTemplate(responseTemplate); } String dynamicTemplate = registerWebhookjson.get("dynamicTemplate").getAsString(); Boolean isUpdateRequired = registerWebhookjson.get("isUpdateRequired").getAsBoolean(); String eventConfig=registerWebhookjson.get("eventConfig").getAsString(); Boolean isEventProcessing = registerWebhookjson.get("isEventProcessing").getAsBoolean(); String webhookName = registerWebhookjson.get("webhookName").getAsString(); JsonArray derivedOperationsArray = registerWebhookjson.get("derivedOperations").getAsJsonArray(); Set<WebhookDerivedConfig> setWebhookDerivedConfigs = new HashSet<WebhookDerivedConfig>(); webhookConfiguration.setDataFormat(registerWebhookjson.get("dataformat").getAsString()); webhookConfiguration.setLabelName(registerWebhookjson.get("labelDisplay").getAsString().toUpperCase()); webhookConfiguration.setToolName(registerWebhookjson.get("toolName").getAsString()); webhookConfiguration.setMQChannel(registerWebhookjson.get("mqchannel").getAsString()); webhookConfiguration.setWebHookName(webhookName); webhookConfiguration.setSubscribeStatus(registerWebhookjson.get("statussubscribe").getAsBoolean()); webhookConfiguration.setResponseTemplate(responseTemplate); webhookConfiguration.setDynamicTemplate(dynamicTemplate); webhookConfiguration.setFieldUsedForUpdate(registerWebhookjson.get("fieldUsedForUpdate").getAsString()); webhookConfiguration.setEventConfigJson(eventConfig); webhookConfiguration.setEventProcessing(isEventProcessing); if (responseTemplate.isEmpty()) { webhookConfiguration.setResponseTemplate(null); } if (dynamicTemplate.isEmpty()) { webhookConfiguration.setDynamicTemplate(null); } if(eventConfig.isEmpty()) { webhookConfiguration.setEventConfigJson(null); } webhookConfiguration.setIsUpdateRequired(isUpdateRequired); if (!isUpdateRequired.booleanValue()) { webhookConfiguration.setFieldUsedForUpdate(null); } for (JsonElement webhookDerivedConfigJson : derivedOperationsArray) { WebhookDerivedConfig webhookDerivedConfig = new WebhookDerivedConfig(); JsonObject receivedObject = webhookDerivedConfigJson.getAsJsonObject(); int wid = receivedObject.get("wid").getAsInt(); webhookDerivedConfig.setOperationName(receivedObject.get("operationName").getAsString()); webhookDerivedConfig.setOperationFields(receivedObject.get("operationFields").toString()); webhookDerivedConfig.setWebhookName(webhookName); if (wid != -1) { webhookDerivedConfig.setWid(wid); } setWebhookDerivedConfigs.add(webhookDerivedConfig); } webhookConfiguration.setWebhookDerivedConfig(setWebhookDerivedConfigs); return webhookConfiguration; } catch (InsightsCustomException e) { log.error(e.getMessage()); throw new InsightsCustomException(e.getMessage()); } } @Override public String uninstallWebhook(String webhookname) throws InsightsCustomException { try { webhookConfigurationDAL.deleteWebhookConfigurations(webhookname); return PlatformServiceConstants.SUCCESS; } catch (Exception e) { log.error("Error while un-installing webhook..", e); throw new InsightsCustomException(e.toString()); } } @Override public Boolean updateWebHook(JsonObject registerWebhookjson) throws InsightsCustomException { Boolean status = false; try { WebHookConfig webHookConfig = populateWebHookConfiguration(registerWebhookjson); status = webhookConfigurationDAL.updateWebHookConfiguration(webHookConfig); } catch (Exception e) { log.error("Error in updating the webhook.. ", e); throw new InsightsCustomException(e.toString()); } return status; } public String updateWebhookStatus(JsonObject updateWebhookJsonValidated) throws InsightsCustomException { try { String webhookName = updateWebhookJsonValidated.get("webhookName").getAsString(); Boolean statussubscribe = updateWebhookJsonValidated.get("statussubscribe").getAsBoolean(); webhookConfigurationDAL.updateWebhookStatus(webhookName, statussubscribe); return PlatformServiceConstants.SUCCESS; } catch (Exception e) { log.error("Error while updating webhook status..", e); throw new InsightsCustomException(e.toString()); } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.tasks; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ResultDeduplicator; import org.elasticsearch.action.StepListener; import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.NodeDisconnectedException; import org.elasticsearch.transport.NodeNotConnectedException; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestHandler; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Objects; public class TaskCancellationService { public static final String BAN_PARENT_ACTION_NAME = "internal:admin/tasks/ban"; private static final Logger logger = LogManager.getLogger(TaskCancellationService.class); private final TransportService transportService; private final TaskManager taskManager; private final ResultDeduplicator<CancelRequest, Void> deduplicator = new ResultDeduplicator<>(); public TaskCancellationService(TransportService transportService) { this.transportService = transportService; this.taskManager = transportService.getTaskManager(); transportService.registerRequestHandler( BAN_PARENT_ACTION_NAME, ThreadPool.Names.SAME, BanParentTaskRequest::new, new BanParentRequestHandler() ); } private String localNodeId() { return transportService.getLocalNode().getId(); } private static class CancelRequest { final CancellableTask task; final boolean waitForCompletion; CancelRequest(CancellableTask task, boolean waitForCompletion) { this.task = task; this.waitForCompletion = waitForCompletion; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final CancelRequest that = (CancelRequest) o; return waitForCompletion == that.waitForCompletion && Objects.equals(task, that.task); } @Override public int hashCode() { return Objects.hash(task, waitForCompletion); } } void cancelTaskAndDescendants(CancellableTask task, String reason, boolean waitForCompletion, ActionListener<Void> finalListener) { deduplicator.executeOnce( new CancelRequest(task, waitForCompletion), finalListener, (r, listener) -> doCancelTaskAndDescendants(task, reason, waitForCompletion, listener) ); } void doCancelTaskAndDescendants(CancellableTask task, String reason, boolean waitForCompletion, ActionListener<Void> listener) { final TaskId taskId = task.taskInfo(localNodeId(), false).getTaskId(); if (task.shouldCancelChildrenOnCancellation()) { logger.trace("cancelling task [{}] and its descendants", taskId); StepListener<Void> completedListener = new StepListener<>(); GroupedActionListener<Void> groupedListener = new GroupedActionListener<>(completedListener.map(r -> null), 3); Collection<Transport.Connection> childConnections = taskManager.startBanOnChildTasks(task.getId(), reason, () -> { logger.trace("child tasks of parent [{}] are completed", taskId); groupedListener.onResponse(null); }); taskManager.cancel(task, reason, () -> { logger.trace("task [{}] is cancelled", taskId); groupedListener.onResponse(null); }); StepListener<Void> setBanListener = new StepListener<>(); setBanOnChildConnections(reason, waitForCompletion, task, childConnections, setBanListener); setBanListener.addListener(groupedListener); // If we start unbanning when the last child task completed and that child task executed with a specific user, then unban // requests are denied because internal requests can't run with a user. We need to remove bans with the current thread context. final Runnable removeBansRunnable = transportService.getThreadPool() .getThreadContext() .preserveContext(() -> removeBanOnChildConnections(task, childConnections)); // We remove bans after all child tasks are completed although in theory we can do it on a per-connection basis. completedListener.whenComplete(r -> removeBansRunnable.run(), e -> removeBansRunnable.run()); // if wait_for_completion is true, then only return when (1) bans are placed on child connections, (2) child tasks are // completed or failed, (3) the main task is cancelled. Otherwise, return after bans are placed on child connections. if (waitForCompletion) { completedListener.addListener(listener); } else { setBanListener.addListener(listener); } } else { logger.trace("task [{}] doesn't have any children that should be cancelled", taskId); if (waitForCompletion) { taskManager.cancel(task, reason, () -> listener.onResponse(null)); } else { taskManager.cancel(task, reason, () -> {}); listener.onResponse(null); } } } private void setBanOnChildConnections( String reason, boolean waitForCompletion, CancellableTask task, Collection<Transport.Connection> childConnections, ActionListener<Void> listener ) { if (childConnections.isEmpty()) { listener.onResponse(null); return; } final TaskId taskId = new TaskId(localNodeId(), task.getId()); logger.trace("cancelling child tasks of [{}] on child connections {}", taskId, childConnections); GroupedActionListener<Void> groupedListener = new GroupedActionListener<>(listener.map(r -> null), childConnections.size()); final BanParentTaskRequest banRequest = BanParentTaskRequest.createSetBanParentTaskRequest(taskId, reason, waitForCompletion); for (Transport.Connection connection : childConnections) { assert TransportService.unwrapConnection(connection) == connection : "Child connection must be unwrapped"; transportService.sendRequest( connection, BAN_PARENT_ACTION_NAME, banRequest, TransportRequestOptions.EMPTY, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { @Override public void handleResponse(TransportResponse.Empty response) { logger.trace("sent ban for tasks with the parent [{}] for connection [{}]", taskId, connection); groupedListener.onResponse(null); } @Override public void handleException(TransportException exp) { final Throwable cause = ExceptionsHelper.unwrapCause(exp); assert cause instanceof ElasticsearchSecurityException == false; if (isUnimportantBanFailure(cause)) { logger.debug( new ParameterizedMessage( "cannot send ban for tasks with the parent [{}] on connection [{}]", taskId, connection ), exp ); } else if (logger.isDebugEnabled()) { logger.warn( new ParameterizedMessage( "cannot send ban for tasks with the parent [{}] on connection [{}]", taskId, connection ), exp ); } else { logger.warn( "cannot send ban for tasks with the parent [{}] on connection [{}]: {}", taskId, connection, exp.getMessage() ); } groupedListener.onFailure(exp); } } ); } } private void removeBanOnChildConnections(CancellableTask task, Collection<Transport.Connection> childConnections) { final BanParentTaskRequest request = BanParentTaskRequest.createRemoveBanParentTaskRequest(new TaskId(localNodeId(), task.getId())); for (Transport.Connection connection : childConnections) { assert TransportService.unwrapConnection(connection) == connection : "Child connection must be unwrapped"; logger.trace("Sending remove ban for tasks with the parent [{}] for connection [{}]", request.parentTaskId, connection); transportService.sendRequest( connection, BAN_PARENT_ACTION_NAME, request, TransportRequestOptions.EMPTY, new EmptyTransportResponseHandler(ThreadPool.Names.SAME) { @Override public void handleException(TransportException exp) { final Throwable cause = ExceptionsHelper.unwrapCause(exp); assert cause instanceof ElasticsearchSecurityException == false; if (isUnimportantBanFailure(cause)) { logger.debug( new ParameterizedMessage( "failed to remove ban for tasks with the parent [{}] on connection [{}]", request.parentTaskId, connection ), exp ); } else if (logger.isDebugEnabled()) { logger.warn( new ParameterizedMessage( "failed to remove ban for tasks with the parent [{}] on connection [{}]", request.parentTaskId, connection ), exp ); } else { logger.warn( "failed to remove ban for tasks with the parent [{}] on connection [{}]: {}", request.parentTaskId, connection, exp.getMessage() ); } } } ); } } private static boolean isUnimportantBanFailure(Throwable cause) { return cause instanceof NodeDisconnectedException || cause instanceof NodeNotConnectedException; } private static class BanParentTaskRequest extends TransportRequest { private final TaskId parentTaskId; private final boolean ban; private final boolean waitForCompletion; private final String reason; static BanParentTaskRequest createSetBanParentTaskRequest(TaskId parentTaskId, String reason, boolean waitForCompletion) { return new BanParentTaskRequest(parentTaskId, reason, waitForCompletion); } static BanParentTaskRequest createRemoveBanParentTaskRequest(TaskId parentTaskId) { return new BanParentTaskRequest(parentTaskId); } private BanParentTaskRequest(TaskId parentTaskId, String reason, boolean waitForCompletion) { this.parentTaskId = parentTaskId; this.ban = true; this.reason = reason; this.waitForCompletion = waitForCompletion; } private BanParentTaskRequest(TaskId parentTaskId) { this.parentTaskId = parentTaskId; this.ban = false; this.reason = null; this.waitForCompletion = false; } private BanParentTaskRequest(StreamInput in) throws IOException { super(in); parentTaskId = TaskId.readFromStream(in); ban = in.readBoolean(); reason = ban ? in.readString() : null; if (in.getVersion().onOrAfter(Version.V_7_8_0)) { waitForCompletion = in.readBoolean(); } else { waitForCompletion = false; } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); parentTaskId.writeTo(out); out.writeBoolean(ban); if (ban) { out.writeString(reason); } if (out.getVersion().onOrAfter(Version.V_7_8_0)) { out.writeBoolean(waitForCompletion); } } } private class BanParentRequestHandler implements TransportRequestHandler<BanParentTaskRequest> { @Override public void messageReceived(final BanParentTaskRequest request, final TransportChannel channel, Task task) throws Exception { if (request.ban) { logger.debug( "Received ban for the parent [{}] on the node [{}], reason: [{}]", request.parentTaskId, localNodeId(), request.reason ); final List<CancellableTask> childTasks = taskManager.setBan(request.parentTaskId, request.reason, channel); final GroupedActionListener<Void> listener = new GroupedActionListener<>( new ChannelActionListener<>(channel, BAN_PARENT_ACTION_NAME, request).map(r -> TransportResponse.Empty.INSTANCE), childTasks.size() + 1 ); for (CancellableTask childTask : childTasks) { cancelTaskAndDescendants(childTask, request.reason, request.waitForCompletion, listener); } listener.onResponse(null); } else { logger.debug("Removing ban for the parent [{}] on the node [{}]", request.parentTaskId, localNodeId()); taskManager.removeBan(request.parentTaskId); channel.sendResponse(TransportResponse.Empty.INSTANCE); } } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.operator; import com.google.common.collect.ImmutableList; import com.google.common.primitives.Ints; import io.airlift.units.DataSize; import io.prestosql.RowPagesBuilder; import io.prestosql.spi.Page; import io.prestosql.spi.block.BlockBuilder; import io.prestosql.spi.block.SortOrder; import io.prestosql.spi.type.Type; import io.prestosql.testing.TestingTaskContext; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.OutputTimeUnit; import org.openjdk.jmh.annotations.Param; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.TearDown; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.runner.Runner; import org.openjdk.jmh.runner.RunnerException; import org.openjdk.jmh.runner.options.Options; import org.openjdk.jmh.runner.options.OptionsBuilder; import org.openjdk.jmh.runner.options.VerboseMode; import org.testng.annotations.Test; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static io.airlift.units.DataSize.Unit.GIGABYTE; import static io.prestosql.SessionTestUtils.TEST_SESSION; import static io.prestosql.block.BlockAssertions.createLongSequenceBlock; import static io.prestosql.operator.BenchmarkWindowOperator.Context.ROWS_PER_PAGE; import static io.prestosql.operator.BenchmarkWindowOperator.Context.TOTAL_PAGES; import static io.prestosql.operator.TestWindowOperator.ROW_NUMBER; import static io.prestosql.operator.TestWindowOperator.createFactoryUnbounded; import static io.prestosql.spi.type.BigintType.BIGINT; import static java.util.concurrent.Executors.newCachedThreadPool; import static java.util.concurrent.Executors.newScheduledThreadPool; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import static org.openjdk.jmh.annotations.Mode.AverageTime; import static org.openjdk.jmh.annotations.Scope.Thread; import static org.testng.Assert.assertEquals; @State(Thread) @OutputTimeUnit(MILLISECONDS) @BenchmarkMode(AverageTime) @Fork(3) @Warmup(iterations = 5) @Measurement(iterations = 10, time = 2, timeUnit = SECONDS) public class BenchmarkWindowOperator { @State(Thread) public static class Context { public static final int NUMBER_OF_GROUP_COLUMNS = 2; public static final int TOTAL_PAGES = 140; public static final int ROWS_PER_PAGE = 10000; private static final List<Integer> PARTITION_CHANNELS = Ints.asList(0, 1); @Param({"10", "20", "100"}) public int rowsPerPartition; @Param({"0", "1", "2", "3"}) public int numberOfPregroupedColumns; @Param({"10", "50", "100"}) public int partitionsPerGroup; private ExecutorService executor; private ScheduledExecutorService scheduledExecutor; private OperatorFactory operatorFactory; private List<Page> pages; @Setup public void setup() { executor = newCachedThreadPool(daemonThreadsNamed("test-executor-%s")); scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed("test-scheduledExecutor-%s")); createOperatorFactoryAndGenerateTestData(numberOfPregroupedColumns); } @TearDown public void cleanup() { executor.shutdownNow(); scheduledExecutor.shutdownNow(); } private void createOperatorFactoryAndGenerateTestData(int numberOfPreGroupedColumns) { pages = generateTestData(); if (numberOfPreGroupedColumns == 0) { // Ungrouped operatorFactory = createFactoryUnbounded( ImmutableList.of(BIGINT, BIGINT, BIGINT, BIGINT), Ints.asList(0, 1, 2, 3), ROW_NUMBER, PARTITION_CHANNELS, Ints.asList(), Ints.asList(3), ImmutableList.of(SortOrder.ASC_NULLS_LAST), 0, new DummySpillerFactory(), false); } else if (numberOfPreGroupedColumns < NUMBER_OF_GROUP_COLUMNS) { // Partially grouped operatorFactory = createFactoryUnbounded( ImmutableList.of(BIGINT, BIGINT, BIGINT, BIGINT), Ints.asList(0, 1, 2, 3), ROW_NUMBER, PARTITION_CHANNELS, Ints.asList(1), Ints.asList(3), ImmutableList.of(SortOrder.ASC_NULLS_LAST), 0, new DummySpillerFactory(), false); } else { // Fully grouped and (potentially) sorted operatorFactory = createFactoryUnbounded( ImmutableList.of(BIGINT, BIGINT, BIGINT, BIGINT), Ints.asList(0, 1, 2, 3), ROW_NUMBER, PARTITION_CHANNELS, Ints.asList(0, 1), Ints.asList(3), ImmutableList.of(SortOrder.ASC_NULLS_LAST), (numberOfPreGroupedColumns - NUMBER_OF_GROUP_COLUMNS), new DummySpillerFactory(), false); } } private List<Page> generateTestData() { List<Type> typesArray = new ArrayList<>(); int currentPartitionIdentifier = 1; typesArray.add(BIGINT); typesArray.add(BIGINT); typesArray.add(BIGINT); typesArray.add(BIGINT); RowPagesBuilder pagesBuilder = buildPages(currentPartitionIdentifier, typesArray); return pagesBuilder.build(); } private RowPagesBuilder buildPages(int currentPartitionIdentifier, List<Type> typesArray) { int groupIdentifier = 100; RowPagesBuilder rowPagesBuilder = RowPagesBuilder.rowPagesBuilder(false, ImmutableList.of(0), typesArray); for (int i = 0; i < TOTAL_PAGES; i++) { BlockBuilder firstColumnBlockBuilder = BIGINT.createBlockBuilder(null, ROWS_PER_PAGE); BlockBuilder secondColumnBlockBuilder = BIGINT.createBlockBuilder(null, ROWS_PER_PAGE); int currentNumberOfRowsInPartition = 0; int numberOfPartitionsInCurrentGroup = 0; int currentGroupIdentifier = groupIdentifier++; for (int j = 0; j < ROWS_PER_PAGE; j++) { if (currentNumberOfRowsInPartition == rowsPerPartition) { ++currentPartitionIdentifier; ++numberOfPartitionsInCurrentGroup; currentNumberOfRowsInPartition = 0; } if (numberOfPartitionsInCurrentGroup == partitionsPerGroup) { numberOfPartitionsInCurrentGroup = 0; currentGroupIdentifier = groupIdentifier++; } firstColumnBlockBuilder.writeLong(currentGroupIdentifier); secondColumnBlockBuilder.writeLong(currentPartitionIdentifier); ++currentNumberOfRowsInPartition; } rowPagesBuilder.addBlocksPage( firstColumnBlockBuilder.build(), secondColumnBlockBuilder.build(), createLongSequenceBlock(0, ROWS_PER_PAGE), createLongSequenceBlock(0, ROWS_PER_PAGE)); } return rowPagesBuilder; } public TaskContext createTaskContext() { return TestingTaskContext.createTaskContext(executor, scheduledExecutor, TEST_SESSION, new DataSize(2, GIGABYTE)); } public OperatorFactory getOperatorFactory() { return operatorFactory; } public List<Page> getPages() { return pages; } } @Benchmark public List<Page> benchmark(BenchmarkWindowOperator.Context context) { DriverContext driverContext = context.createTaskContext().addPipelineContext(0, true, true, false).addDriverContext(); Operator operator = context.getOperatorFactory().createOperator(driverContext); Iterator<Page> input = context.getPages().iterator(); ImmutableList.Builder<Page> outputPages = ImmutableList.builder(); boolean finishing = false; for (int loops = 0; !operator.isFinished() && loops < 1_000_000; loops++) { if (operator.needsInput()) { if (input.hasNext()) { Page inputPage = input.next(); operator.addInput(inputPage); } else if (!finishing) { operator.finish(); finishing = true; } } Page outputPage = operator.getOutput(); if (outputPage != null) { outputPages.add(outputPage); } } return outputPages.build(); } @Test public void verifyUnGroupedWithMultiplePartitions() { verify(10, 0, false); } @Test public void verifyUnGroupedWithSinglePartition() { verify(10, 0, true); } @Test public void verifyPartiallyGroupedWithMultiplePartitions() { verify(10, 1, false); } @Test public void verifyPartiallyGroupedWithSinglePartition() { verify(10, 1, true); } @Test public void verifyFullyGroupedWithMultiplePartitions() { verify(10, 2, false); } @Test public void verifyFullyGroupedWithSinglePartition() { verify(10, 2, true); } @Test public void verifyFullyGroupedAndFullySortedWithMultiplePartitions() { verify(10, 3, false); } @Test public void verifyFullyGroupedAndFullySortedWithSinglePartition() { verify(10, 3, true); } private void verify( int numberOfRowsPerPartition, int numberOfPreGroupedColumns, boolean useSinglePartition) { Context context = new Context(); context.rowsPerPartition = numberOfRowsPerPartition; context.numberOfPregroupedColumns = numberOfPreGroupedColumns; if (useSinglePartition) { context.partitionsPerGroup = 1; context.rowsPerPartition = ROWS_PER_PAGE; } context.setup(); assertEquals(TOTAL_PAGES, context.getPages().size()); for (int i = 0; i < TOTAL_PAGES; i++) { assertEquals(ROWS_PER_PAGE, context.getPages().get(i).getPositionCount()); } benchmark(context); context.cleanup(); } public static void main(String[] args) throws RunnerException { Options options = new OptionsBuilder() .verbosity(VerboseMode.NORMAL) .include(".*" + BenchmarkWindowOperator.class.getSimpleName() + ".*") .build(); new Runner(options).run(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math3.geometry.partitioning.utilities; import java.util.Arrays; import org.apache.commons.math3.util.FastMath; /** This class implements an ordering operation for T-uples. * * <p>Ordering is done by encoding all components of the T-uple into a * single scalar value and using this value as the sorting * key. Encoding is performed using the method invented by Georg * Cantor in 1877 when he proved it was possible to establish a * bijection between a line and a plane. The binary representations of * the components of the T-uple are mixed together to form a single * scalar. This means that the 2<sup>k</sup> bit of component 0 is * followed by the 2<sup>k</sup> bit of component 1, then by the * 2<sup>k</sup> bit of component 2 up to the 2<sup>k</sup> bit of * component {@code t}, which is followed by the 2<sup>k-1</sup> * bit of component 0, followed by the 2<sup>k-1</sup> bit of * component 1 ... The binary representations are extended as needed * to handle numbers with different scales and a suitable * 2<sup>p</sup> offset is added to the components in order to avoid * negative numbers (this offset is adjusted as needed during the * comparison operations).</p> * * <p>The more interesting property of the encoding method for our * purpose is that it allows to select all the points that are in a * given range. This is depicted in dimension 2 by the following * picture:</p> * * <img src="doc-files/OrderedTuple.png" /> * * <p>This picture shows a set of 100000 random 2-D pairs having their * first component between -50 and +150 and their second component * between -350 and +50. We wanted to extract all pairs having their * first component between +30 and +70 and their second component * between -120 and -30. We built the lower left point at coordinates * (30, -120) and the upper right point at coordinates (70, -30). All * points smaller than the lower left point are drawn in red and all * points larger than the upper right point are drawn in blue. The * green points are between the two limits. This picture shows that * all the desired points are selected, along with spurious points. In * this case, we get 15790 points, 4420 of which really belonging to * the desired rectangle. It is possible to extract very small * subsets. As an example extracting from the same 100000 points set * the points having their first component between +30 and +31 and * their second component between -91 and -90, we get a subset of 11 * points, 2 of which really belonging to the desired rectangle.</p> * * <p>the previous selection technique can be applied in all * dimensions, still using two points to define the interval. The * first point will have all its components set to their lower bounds * while the second point will have all its components set to their * upper bounds.</p> * * <p>T-uples with negative infinite or positive infinite components * are sorted logically.</p> * * <p>Since the specification of the {@code Comparator} interface * allows only {@code ClassCastException} errors, some arbitrary * choices have been made to handle specific cases. The rationale for * these choices is to keep <em>regular</em> and consistent T-uples * together.</p> * <ul> * <li>instances with different dimensions are sorted according to * their dimension regardless of their components values</li> * <li>instances with {@code Double.NaN} components are sorted * after all other ones (even after instances with positive infinite * components</li> * <li>instances with both positive and negative infinite components * are considered as if they had {@code Double.NaN} * components</li> * </ul> * * @version $Id: OrderedTuple.java 1591835 2014-05-02 09:04:01Z tn $ * @since 3.0 */ public class OrderedTuple implements Comparable<OrderedTuple> { /** Sign bit mask. */ private static final long SIGN_MASK = 0x8000000000000000L; /** Exponent bits mask. */ private static final long EXPONENT_MASK = 0x7ff0000000000000L; /** Mantissa bits mask. */ private static final long MANTISSA_MASK = 0x000fffffffffffffL; /** Implicit MSB for normalized numbers. */ private static final long IMPLICIT_ONE = 0x0010000000000000L; /** Double components of the T-uple. */ private double[] components; /** Offset scale. */ private int offset; /** Least Significant Bit scale. */ private int lsb; /** Ordering encoding of the double components. */ private long[] encoding; /** Positive infinity marker. */ private boolean posInf; /** Negative infinity marker. */ private boolean negInf; /** Not A Number marker. */ private boolean nan; /** Build an ordered T-uple from its components. * @param components double components of the T-uple */ public OrderedTuple(final double ... components) { this.components = components.clone(); int msb = Integer.MIN_VALUE; lsb = Integer.MAX_VALUE; posInf = false; negInf = false; nan = false; for (int i = 0; i < components.length; ++i) { if (Double.isInfinite(components[i])) { if (components[i] < 0) { negInf = true; } else { posInf = true; } } else if (Double.isNaN(components[i])) { nan = true; } else { final long b = Double.doubleToLongBits(components[i]); final long m = mantissa(b); if (m != 0) { final int e = exponent(b); msb = FastMath.max(msb, e + computeMSB(m)); lsb = FastMath.min(lsb, e + computeLSB(m)); } } } if (posInf && negInf) { // instance cannot be sorted logically posInf = false; negInf = false; nan = true; } if (lsb <= msb) { // encode the T-upple with the specified offset encode(msb + 16); } else { encoding = new long[] { 0x0L }; } } /** Encode the T-uple with a given offset. * @param minOffset minimal scale of the offset to add to all * components (must be greater than the MSBs of all components) */ private void encode(final int minOffset) { // choose an offset with some margins offset = minOffset + 31; offset -= offset % 32; if ((encoding != null) && (encoding.length == 1) && (encoding[0] == 0x0L)) { // the components are all zeroes return; } // allocate an integer array to encode the components (we use only // 63 bits per element because there is no unsigned long in Java) final int neededBits = offset + 1 - lsb; final int neededLongs = (neededBits + 62) / 63; encoding = new long[components.length * neededLongs]; // mix the bits from all components int eIndex = 0; int shift = 62; long word = 0x0L; for (int k = offset; eIndex < encoding.length; --k) { for (int vIndex = 0; vIndex < components.length; ++vIndex) { if (getBit(vIndex, k) != 0) { word |= 0x1L << shift; } if (shift-- == 0) { encoding[eIndex++] = word; word = 0x0L; shift = 62; } } } } /** Compares this ordered T-uple with the specified object. * <p>The ordering method is detailed in the general description of * the class. Its main property is to be consistent with distance: * geometrically close T-uples stay close to each other when stored * in a sorted collection using this comparison method.</p> * <p>T-uples with negative infinite, positive infinite are sorted * logically.</p> * <p>Some arbitrary choices have been made to handle specific * cases. The rationale for these choices is to keep * <em>normal</em> and consistent T-uples together.</p> * <ul> * <li>instances with different dimensions are sorted according to * their dimension regardless of their components values</li> * <li>instances with {@code Double.NaN} components are sorted * after all other ones (evan after instances with positive infinite * components</li> * <li>instances with both positive and negative infinite components * are considered as if they had {@code Double.NaN} * components</li> * </ul> * @param ot T-uple to compare instance with * @return a negative integer if the instance is less than the * object, zero if they are equal, or a positive integer if the * instance is greater than the object */ public int compareTo(final OrderedTuple ot) { if (components.length == ot.components.length) { if (nan) { return +1; } else if (ot.nan) { return -1; } else if (negInf || ot.posInf) { return -1; } else if (posInf || ot.negInf) { return +1; } else { if (offset < ot.offset) { encode(ot.offset); } else if (offset > ot.offset) { ot.encode(offset); } final int limit = FastMath.min(encoding.length, ot.encoding.length); for (int i = 0; i < limit; ++i) { if (encoding[i] < ot.encoding[i]) { return -1; } else if (encoding[i] > ot.encoding[i]) { return +1; } } if (encoding.length < ot.encoding.length) { return -1; } else if (encoding.length > ot.encoding.length) { return +1; } else { return 0; } } } return components.length - ot.components.length; } /** {@inheritDoc} */ @Override public boolean equals(final Object other) { if (this == other) { return true; } else if (other instanceof OrderedTuple) { return compareTo((OrderedTuple) other) == 0; } else { return false; } } /** {@inheritDoc} */ @Override public int hashCode() { // the following constants are arbitrary small primes final int multiplier = 37; final int trueHash = 97; final int falseHash = 71; // hash fields and combine them // (we rely on the multiplier to have different combined weights // for all int fields and all boolean fields) int hash = Arrays.hashCode(components); hash = hash * multiplier + offset; hash = hash * multiplier + lsb; hash = hash * multiplier + (posInf ? trueHash : falseHash); hash = hash * multiplier + (negInf ? trueHash : falseHash); hash = hash * multiplier + (nan ? trueHash : falseHash); return hash; } /** Get the components array. * @return array containing the T-uple components */ public double[] getComponents() { return components.clone(); } /** Extract the sign from the bits of a double. * @param bits binary representation of the double * @return sign bit (zero if positive, non zero if negative) */ private static long sign(final long bits) { return bits & SIGN_MASK; } /** Extract the exponent from the bits of a double. * @param bits binary representation of the double * @return exponent */ private static int exponent(final long bits) { return ((int) ((bits & EXPONENT_MASK) >> 52)) - 1075; } /** Extract the mantissa from the bits of a double. * @param bits binary representation of the double * @return mantissa */ private static long mantissa(final long bits) { return ((bits & EXPONENT_MASK) == 0) ? ((bits & MANTISSA_MASK) << 1) : // subnormal number (IMPLICIT_ONE | (bits & MANTISSA_MASK)); // normal number } /** Compute the most significant bit of a long. * @param l long from which the most significant bit is requested * @return scale of the most significant bit of {@code l}, * or 0 if {@code l} is zero * @see #computeLSB */ private static int computeMSB(final long l) { long ll = l; long mask = 0xffffffffL; int scale = 32; int msb = 0; while (scale != 0) { if ((ll & mask) != ll) { msb |= scale; ll >>= scale; } scale >>= 1; mask >>= scale; } return msb; } /** Compute the least significant bit of a long. * @param l long from which the least significant bit is requested * @return scale of the least significant bit of {@code l}, * or 63 if {@code l} is zero * @see #computeMSB */ private static int computeLSB(final long l) { long ll = l; long mask = 0xffffffff00000000L; int scale = 32; int lsb = 0; while (scale != 0) { if ((ll & mask) == ll) { lsb |= scale; ll >>= scale; } scale >>= 1; mask >>= scale; } return lsb; } /** Get a bit from the mantissa of a double. * @param i index of the component * @param k scale of the requested bit * @return the specified bit (either 0 or 1), after the offset has * been added to the double */ private int getBit(final int i, final int k) { final long bits = Double.doubleToLongBits(components[i]); final int e = exponent(bits); if ((k < e) || (k > offset)) { return 0; } else if (k == offset) { return (sign(bits) == 0L) ? 1 : 0; } else if (k > (e + 52)) { return (sign(bits) == 0L) ? 0 : 1; } else { final long m = (sign(bits) == 0L) ? mantissa(bits) : -mantissa(bits); return (int) ((m >> (k - e)) & 0x1L); } } }
package org.apache.taverna.scufl2.api.common; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.net.URI; import java.util.Collection; import java.util.UUID; import java.util.regex.Matcher; import org.apache.taverna.scufl2.api.annotation.Annotation; /** * Abstract implementation of a {@link Named} {@link WorkflowBean}. * * @author Alan R Williams * @author Stian Soiland-Reyes */ public abstract class AbstractNamed extends AbstractCloneable implements Named { private String name; /** * Constructs a {@link Named} {@link WorkflowBean} with a random UUID as the name. */ public AbstractNamed() { setName(UUID.randomUUID().toString()); } /** * Constructs a {@link Named} {@link WorkflowBean} with the specified name. * * @param name * the name of the <code>Named</code> <code>WorkflowBean</code>. <strong>Must not</strong> be <code>null</code> * or an empty String. */ public AbstractNamed(String name) { setName(name); } @Override public int compareTo(Object o) { if (!(o instanceof AbstractNamed)) // Other comparables go first return 1; AbstractNamed other = (AbstractNamed) o; if (other == this) return 0; /** * Disabled as this means the order changes depending on setParents being called or not; * could cause a DataLink to appear twice in workflow.getDataLinks(). * * if (this instanceof Child) { if (!(other instanceof Child)) { // He's orphan, he's sorted first return 1; } WorkflowBean parent = ((Child<?>) this).getParent(); WorkflowBean otherParent = ((Child<?>) other).getParent(); if (parent instanceof Comparable && otherParent instanceof Comparable) { int comparedParents = ((Comparable) parent).compareTo(otherParent); if (comparedParents != 0) { return comparedParents; } } } else { if (other instanceof Child) { // We're orphan, we're first return -1; } } */ if (getClass() != other.getClass()) { int classCompare = getClass().getCanonicalName().compareTo( other.getClass().getCanonicalName()); if (classCompare != 0) // Allow having say InputPorts and OutputPorts in the same sorted list return classCompare; } // We're the same class, let's compare the names return getName().compareTo(other.getName()); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; AbstractNamed other = (AbstractNamed) obj; if (!getName().equals(other.getName())) return false; if (this instanceof Child) { WorkflowBean parent = ((Child<?>) this).getParent(); WorkflowBean otherParent = ((Child<?>) other).getParent(); if (parent != null) return parent.equals(otherParent); if (parent == null && otherParent != null) return false; } if (this instanceof Typed) { URI myId = ((Typed) this).getType(); URI otherId = ((Typed) obj).getType(); if (myId != null) return myId.equals(otherId); if (myId == null && otherId != null) return false; } return true; } @Override public String getName() { return name; } @SuppressWarnings({ "rawtypes" }) @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + (name == null ? 0 : name.hashCode()); if (this instanceof Child) { WorkflowBean parent = ((Child) this).getParent(); if (parent != null) result = prime * result + parent.hashCode(); } return result; } @Override @SuppressWarnings({ "unchecked", "rawtypes" }) public void setName(String name) { if (name == null) throw new NullPointerException("Name can't be null"); Matcher invalidMatcher = INVALID_NAME.matcher(name); if (invalidMatcher.find()) // http://dev.mygrid.org.uk/issues/browse/SCUFL2-87 // TODO: Any other characters that must be disallowed? throw new IllegalArgumentException("Name invalid in position " + invalidMatcher.start() + ": '" + name + "'"); if (this instanceof Child) { Child child = (Child) this; WorkflowBean parent = child.getParent(); if (parent != null) { child.setParent(null); this.name = name; // Might overwrite other Named object with same name child.setParent(parent); } } this.name = name; } @Override public String toString() { return getClass().getSimpleName() + " \"" + getName() + '"'; } @Override protected void cloneInto(WorkflowBean clone, Cloning cloning) { AbstractNamed namedClone = (AbstractNamed)clone; namedClone.setName(getName()); } // Derived operations /** * Get all the annotations that pertain to this workflow element. * * @return The collection of annotations. * @see Scufl2Tools#annotationsFor(Child) */ public Collection<Annotation> getAnnotations() { if (this instanceof Child) return getTools().annotationsFor((Child<?>) this); throw new UnsupportedOperationException( "operation needs to be overridden for root elements"); } /** * Get the URI of this workflow element. * * @return The absolute URI. * @see URITools#uriForBean(WorkflowBean) */ public URI getURI() { return getUriTools().uriForBean(this); } /** * Get the URI of this workflow element relative to another workflow * element. * * @return The relative URI. * @see URITools#relativeUriForBean(WorkflowBean,WorflowBean) */ public URI getRelativeURI(WorkflowBean relativeTo) { return getUriTools().relativeUriForBean(this, relativeTo); } }
package com.easemob.chatuidemo.widget; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Bitmap; import android.graphics.BitmapShader; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.RectF; import android.graphics.Shader; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.net.Uri; import android.util.AttributeSet; import android.widget.ImageView; import com.easemob.qixin.R; public class CircleImageView extends ImageView{ private static final ScaleType SCALE_TYPE = ScaleType.CENTER_CROP; private static final Bitmap.Config BITMAP_CONFIG = Bitmap.Config.ARGB_8888; private static final int COLORDRAWABLE_DIMENSION = 1; private static final int DEFAULT_BORDER_WIDTH = 0; private static final int DEFAULT_BORDER_COLOR = Color.BLACK; private final RectF mDrawableRect = new RectF(); private final RectF mBorderRect = new RectF(); private final Matrix mShaderMatrix = new Matrix(); private final Paint mBitmapPaint = new Paint(); private final Paint mBorderPaint = new Paint(); private int mBorderColor = DEFAULT_BORDER_COLOR; private int mBorderWidth = DEFAULT_BORDER_WIDTH; private Bitmap mBitmap; private BitmapShader mBitmapShader; private int mBitmapWidth; private int mBitmapHeight; private float mDrawableRadius; private float mBorderRadius; private boolean mReady; private boolean mSetupPending; public CircleImageView(Context context) { super(context); init(); } public CircleImageView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public CircleImageView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.CircleImageView, defStyle, 0); mBorderWidth = a.getDimensionPixelSize(R.styleable.CircleImageView_border_width, DEFAULT_BORDER_WIDTH); mBorderColor = a.getColor(R.styleable.CircleImageView_border_color, DEFAULT_BORDER_COLOR); a.recycle(); init(); } private void init() { super.setScaleType(SCALE_TYPE); mReady = true; if (mSetupPending) { setup(); mSetupPending = false; } } @Override public ScaleType getScaleType() { return SCALE_TYPE; } @Override public void setScaleType(ScaleType scaleType) { if (scaleType != SCALE_TYPE) { throw new IllegalArgumentException(String.format("ScaleType %s not supported.", scaleType)); } } @Override protected void onDraw(Canvas canvas) { if (getDrawable() == null) { return; } canvas.drawCircle(getWidth() / 2, getHeight() / 2, mDrawableRadius, mBitmapPaint); if (mBorderWidth != 0) { canvas.drawCircle(getWidth() / 2, getHeight() / 2, mBorderRadius, mBorderPaint); } } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); setup(); } public int getBorderColor() { return mBorderColor; } public void setBorderColor(int borderColor) { if (borderColor == mBorderColor) { return; } mBorderColor = borderColor; mBorderPaint.setColor(mBorderColor); invalidate(); } public int getBorderWidth() { return mBorderWidth; } public void setBorderWidth(int borderWidth) { if (borderWidth == mBorderWidth) { return; } mBorderWidth = borderWidth; setup(); } @Override public void setImageBitmap(Bitmap bm) { super.setImageBitmap(bm); mBitmap = bm; setup(); } @Override public void setImageDrawable(Drawable drawable) { super.setImageDrawable(drawable); mBitmap = getBitmapFromDrawable(drawable); setup(); } @Override public void setImageResource(int resId) { super.setImageResource(resId); mBitmap = getBitmapFromDrawable(getDrawable()); setup(); } @Override public void setImageURI(Uri uri) { super.setImageURI(uri); mBitmap = getBitmapFromDrawable(getDrawable()); setup(); } private Bitmap getBitmapFromDrawable(Drawable drawable) { if (drawable == null) { return null; } if (drawable instanceof BitmapDrawable) { return ((BitmapDrawable) drawable).getBitmap(); } try { Bitmap bitmap; if (drawable instanceof ColorDrawable) { bitmap = Bitmap.createBitmap(COLORDRAWABLE_DIMENSION, COLORDRAWABLE_DIMENSION, BITMAP_CONFIG); } else { bitmap = Bitmap.createBitmap(drawable.getIntrinsicWidth(), drawable.getIntrinsicHeight(), BITMAP_CONFIG); } Canvas canvas = new Canvas(bitmap); drawable.setBounds(0, 0, canvas.getWidth(), canvas.getHeight()); drawable.draw(canvas); return bitmap; } catch (OutOfMemoryError e) { return null; } } private void setup() { if (!mReady) { mSetupPending = true; return; } if (mBitmap == null) { return; } mBitmapShader = new BitmapShader(mBitmap, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP); mBitmapPaint.setAntiAlias(true); mBitmapPaint.setShader(mBitmapShader); mBorderPaint.setStyle(Paint.Style.STROKE); mBorderPaint.setAntiAlias(true); mBorderPaint.setColor(mBorderColor); mBorderPaint.setStrokeWidth(mBorderWidth); mBitmapHeight = mBitmap.getHeight(); mBitmapWidth = mBitmap.getWidth(); mBorderRect.set(0, 0, getWidth(), getHeight()); mBorderRadius = Math.min((mBorderRect.height() - mBorderWidth) / 2, (mBorderRect.width() - mBorderWidth) / 2); mDrawableRect.set(mBorderWidth, mBorderWidth, mBorderRect.width() - mBorderWidth, mBorderRect.height() - mBorderWidth); mDrawableRadius = Math.min(mDrawableRect.height() / 2, mDrawableRect.width() / 2); updateShaderMatrix(); invalidate(); } private void updateShaderMatrix() { float scale; float dx = 0; float dy = 0; mShaderMatrix.set(null); if (mBitmapWidth * mDrawableRect.height() > mDrawableRect.width() * mBitmapHeight) { scale = mDrawableRect.height() / (float) mBitmapHeight; dx = (mDrawableRect.width() - mBitmapWidth * scale) * 0.5f; } else { scale = mDrawableRect.width() / (float) mBitmapWidth; dy = (mDrawableRect.height() - mBitmapHeight * scale) * 0.5f; } mShaderMatrix.setScale(scale, scale); mShaderMatrix.postTranslate((int) (dx + 0.5f) + mBorderWidth, (int) (dy + 0.5f) + mBorderWidth); mBitmapShader.setLocalMatrix(mShaderMatrix); } }
package mat.client.measure.measuredetails; import java.util.List; import org.gwtbootstrap3.client.ui.Button; import org.gwtbootstrap3.client.ui.ButtonToolBar; import org.gwtbootstrap3.client.ui.constants.ButtonType; import org.gwtbootstrap3.client.ui.constants.Pull; import com.google.gwt.event.dom.client.KeyUpEvent; import com.google.gwt.user.client.DOM; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.rpc.AsyncCallback; import com.google.gwt.user.client.ui.HTML; import com.google.gwt.user.client.ui.HasHorizontalAlignment; import com.google.gwt.user.client.ui.HorizontalPanel; import com.google.gwt.user.client.ui.VerticalPanel; import com.google.gwt.user.client.ui.Widget; import mat.client.buttons.CancelButton; import mat.client.buttons.DeleteButton; import mat.client.buttons.SaveButton; import mat.client.measure.measuredetails.navigation.MeasureDetailsNavigation; import mat.client.measure.measuredetails.views.MeasureDetailViewInterface; import mat.client.measure.measuredetails.views.MeasureDetailsViewFactory; import mat.client.measure.measuredetails.views.ReferencesView; import mat.client.shared.ConfirmationDialogBox; import mat.client.shared.ErrorMessageAlert; import mat.client.shared.MatContext; import mat.client.shared.MatDetailItem; import mat.client.shared.MeasureDetailsConstants.MeasureDetailsItems; import mat.client.shared.MessageAlert; import mat.client.shared.MessagePanel; import mat.client.shared.SpacerWidget; import mat.shared.measure.measuredetails.models.MeasureDetailsComponentModel; import mat.shared.measure.measuredetails.models.MeasureDetailsModel; public class MeasureDetailsView { private VerticalPanel mainPanel = new VerticalPanel(); private HorizontalPanel mainContentPanel = new HorizontalPanel(); private HorizontalPanel headingPanel = new HorizontalPanel(); private HorizontalPanel saveButtonPanel = new HorizontalPanel(); private VerticalPanel widgetComponentPanel = new VerticalPanel(); private ErrorMessageAlert errorAlert = new ErrorMessageAlert(); private MatDetailItem currentMeasureDetail; private MeasureDetailViewInterface componentDetailView; private boolean isMeasureEditable; private SaveButton saveButton = new SaveButton("Measure Details"); private CancelButton cancelButton = new CancelButton("MeasureDetails"); private DeleteButton deleteMeasureButton = new DeleteButton("Measure Details", "Delete Measure"); private Button viewHumanReadableButton; private MeasureDetailsModel measureDetailsModel; private MessagePanel messagePanel; private MeasureDetailsObserver measureDetailsObserver; private HTML headingHTML; public MeasureDetailsView(MeasureDetailsModel measureDetailsModel, MeasureDetailsItems measureDetail, MeasureDetailsNavigation navigationPanel, MeasureDetailsObserver measureDetailsObserver) { currentMeasureDetail = measureDetail; this.measureDetailsModel = measureDetailsModel; HorizontalPanel errorPanel = new HorizontalPanel(); errorPanel.add(errorAlert); mainPanel.add(errorPanel); buildMeasureDetailsButtonPanel(); mainContentPanel.add(navigationPanel.getWidget()); mainContentPanel.setWidth("850px"); widgetComponentPanel = buildDetailView(currentMeasureDetail, measureDetailsObserver); mainContentPanel.add(widgetComponentPanel); mainContentPanel.getElement().setId("measureDetailsView_ContentPanel"); mainPanel.add(mainContentPanel); mainPanel.setStyleName("contentPanel"); } private void buildHeading() { headingPanel.clear(); headingHTML = new HTML(); headingHTML.setHTML("<h4><b>" + currentMeasureDetail.displayName() + "</b></h4>"); headingHTML.getElement().setId("measureDetailsView_HeadingContent"); headingHTML.setTitle(currentMeasureDetail.displayName()); headingHTML.getElement().setTabIndex(0); headingPanel.add(headingHTML); headingPanel.getElement().setId("measureDetailsView_HeadingPanel"); widgetComponentPanel.add(headingPanel); messagePanel = new MessagePanel(); messagePanel.setWidth("625px"); widgetComponentPanel.add(messagePanel); widgetComponentPanel.add(new SpacerWidget()); } private void buildSavePanel(MatDetailItem currentMeasureDetail) { ButtonToolBar buttonToolBar = new ButtonToolBar(); if(currentMeasureDetail == MeasureDetailsItems.REFERENCES) { buttonToolBar.add(cancelButton); } if(currentMeasureDetail != MeasureDetailsItems.POPULATIONS && currentMeasureDetail != MeasureDetailsItems.COMPONENT_MEASURES) { widgetComponentPanel.add(new SpacerWidget()); saveButtonPanel.clear(); saveButtonPanel.setWidth("625px"); saveButtonPanel.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_RIGHT); saveButton.setPull(Pull.RIGHT); buttonToolBar.add(saveButton); saveButtonPanel.add(buttonToolBar); widgetComponentPanel.add(saveButtonPanel); } } private void buildMeasureDetailsButtonPanel() { deleteMeasureButton.getElement().setId("MeasureDetailsView.deleteMeasureButton"); mainPanel.add(new SpacerWidget()); HorizontalPanel panel = new HorizontalPanel(); panel.setWidth("100%"); ButtonToolBar toolbar = new ButtonToolBar(); toolbar.add(deleteMeasureButton); viewHumanReadableButton = buildHumanReadableButton(); toolbar.add(viewHumanReadableButton); panel.add(toolbar); mainPanel.add(panel); mainPanel.add(new SpacerWidget()); } private Button buildHumanReadableButton() { viewHumanReadableButton = new Button("View Human Readable"); viewHumanReadableButton.setTitle("View Human Readable"); viewHumanReadableButton.getElement().setAttribute("id", "view_human_readable_button"); viewHumanReadableButton.setType(ButtonType.PRIMARY); viewHumanReadableButton.setPull(Pull.RIGHT); viewHumanReadableButton.addClickHandler(event -> generateHumanReadableForMeasureDetails()); return viewHumanReadableButton; } private void generateHumanReadableForMeasureDetails() { messagePanel.clearAlerts(); MatContext.get().getMeasureService().getHumanReadableForMeasureDetails(MatContext.get().getCurrentMeasureId(), new AsyncCallback<String>() { @Override public void onSuccess(String result) { showHumanReadableDialogBox(result, measureDetailsModel.getGeneralInformationModel().getMeasureName()); } @Override public void onFailure(Throwable caught) { Window.alert(MatContext.get().getMessageDelegate().getGenericErrorMessage()); } }); } public static native void showHumanReadableDialogBox(String result, String measureName) /*-{ var dummyURL = window.location.protocol + "//" + window.location.hostname + ":" + window.location.port + "/" + "Mat.html"; var humanReadableWindow = window.open(dummyURL, "", "width=1200,height=700,scrollbars=yes,resizable=yes"); if (humanReadableWindow && humanReadableWindow.top) { humanReadableWindow.document.write(result); humanReadableWindow.document.title = measureName; } }-*/; public VerticalPanel buildDetailView(MatDetailItem currentMeasureDetail, MeasureDetailsObserver measureDetailsObserver) { this.currentMeasureDetail = currentMeasureDetail; widgetComponentPanel.clear(); buildHeading(); componentDetailView = MeasureDetailsViewFactory.get().getMeasureDetailComponentView(measureDetailsModel, currentMeasureDetail, this.measureDetailsObserver, messagePanel); widgetComponentPanel.add(componentDetailView.getWidget()); widgetComponentPanel.setWidth("100%"); widgetComponentPanel.setStyleName("marginLeft15px"); widgetComponentPanel.getElement().setId("measureDetailsView_ComponentPanel"); buildSavePanel(currentMeasureDetail); setReadOnly(isMeasureEditable); return widgetComponentPanel; } public void setFocusOnFirstElement() { if(componentDetailView.getFirstElement() != null) { componentDetailView.getFirstElement().getElement().focus(); } else { headingHTML.getElement().focus(); } } private void handleRichTextTabOut(KeyUpEvent keyUpEvent) { if(keyUpEvent.getNativeEvent().getCtrlKey() && keyUpEvent.getNativeEvent().getShiftKey() && keyUpEvent.getNativeEvent().getKeyCode() == 9) { keyUpEvent.getNativeEvent().preventDefault(); DOM.getElementById("measureDetailsView_HeadingContent").focus(); } else if(keyUpEvent.getNativeEvent().getCtrlKey() && keyUpEvent.getNativeEvent().getKeyCode() == 9) { keyUpEvent.getNativeEvent().preventDefault(); saveButton.setFocus(true); } } public VerticalPanel buildDetailView(MeasureDetailsModel measureDetailsModel, MatDetailItem currentMeasureDetail, MeasureDetailsNavigation navigationPanel, MeasureDetailsObserver measureDetailsObserver) { this.currentMeasureDetail = currentMeasureDetail; this.measureDetailsModel = measureDetailsModel; return buildDetailView(currentMeasureDetail, measureDetailsObserver); } public Widget getWidget() { return mainPanel; } public Button getDeleteMeasureButton() { return this.deleteMeasureButton; } public MessageAlert getErrorMessageAlert() { return errorAlert; } public void setReadOnly(boolean isReadOnly) { boolean enabled = !isReadOnly; saveButton.setEnabled(enabled); cancelButton.setEnabled(enabled); deleteMeasureButton.setEnabled(enabled); componentDetailView.setReadOnly(isReadOnly); isMeasureEditable = isReadOnly; } public void clear() { componentDetailView.clear(); messagePanel.clearAlerts(); errorAlert.clearAlert(); } public void clearAlerts() { messagePanel.clearAlerts(); if(currentMeasureDetail == MeasureDetailsItems.REFERENCES) { ReferencesView referencesView = (ReferencesView) getComponentDetailView(); referencesView.hideDirtyCheck(); } } public ConfirmationDialogBox getSaveConfirmation() { return componentDetailView.getSaveConfirmation(); } public void resetForm() { messagePanel.clearAlerts(); componentDetailView.resetForm(); } public MeasureDetailsComponentModel getMeasureDetailsComponentModel() { return componentDetailView.getMeasureDetailsComponentModel(); } public SaveButton getSaveButton() { return saveButton; } public void setSaveButton(SaveButton saveButton) { this.saveButton = saveButton; } public CancelButton getCancelButton() { return cancelButton; } public void setCancelButton(CancelButton cancelButton) { this.cancelButton = cancelButton; } public void displayErrorMessage(List<String> messages) { messagePanel.clearAlerts(); messagePanel.getErrorMessageAlert().createAlert(messages); } public void displayErrorMessage(String message) { messagePanel.clearAlerts(); messagePanel.getErrorMessageAlert().createAlert(message); } public void displaySuccessMessage(String message) { messagePanel.clearAlerts(); messagePanel.getSuccessMessageAlert().createAlert(message); } public void displayWarning(String message) { messagePanel.clearAlerts(); messagePanel.getWarningMessageAlert().createAlert(message); } public void displayDirtyCheck() { if(currentMeasureDetail == MeasureDetailsItems.REFERENCES) { ReferencesView referencesView = (ReferencesView) getComponentDetailView(); referencesView.hideDirtyCheck(); } messagePanel.clearAlerts(); messagePanel.getWarningConfirmationMessageAlert().createWarningAlert(); } public MatDetailItem getCurrentMeasureDetail() { return currentMeasureDetail; } public void setCurrentMeasureDetail(MatDetailItem currentMeasureDetail) { this.currentMeasureDetail = currentMeasureDetail; } public MessagePanel getMessagePanel() { return messagePanel; } public MeasureDetailViewInterface getComponentDetailView() { return componentDetailView; } public MeasureDetailsObserver getMeasureDetailsObserver() { return measureDetailsObserver; } public void setMeasureDetailsObserver(MeasureDetailsObserver measureDetailsObserver) { this.measureDetailsObserver = measureDetailsObserver; } }
/* * Copyright (C) 2004-2012 * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.kohsuke.rngom.parse.xml; import java.util.Enumeration; import java.util.Hashtable; import java.util.Stack; import java.util.Vector; import java.util.List; import java.util.ArrayList; import java.util.Arrays; import org.kohsuke.rngom.ast.builder.Annotations; import org.kohsuke.rngom.ast.builder.CommentList; import org.kohsuke.rngom.ast.builder.DataPatternBuilder; import org.kohsuke.rngom.ast.builder.Div; import org.kohsuke.rngom.ast.builder.ElementAnnotationBuilder; import org.kohsuke.rngom.ast.builder.Grammar; import org.kohsuke.rngom.ast.builder.GrammarSection; import org.kohsuke.rngom.ast.builder.Include; import org.kohsuke.rngom.ast.builder.IncludedGrammar; import org.kohsuke.rngom.ast.builder.NameClassBuilder; import org.kohsuke.rngom.ast.builder.SchemaBuilder; import org.kohsuke.rngom.ast.builder.Scope; import org.kohsuke.rngom.ast.om.Location; import org.kohsuke.rngom.ast.om.ParsedElementAnnotation; import org.kohsuke.rngom.ast.om.ParsedNameClass; import org.kohsuke.rngom.ast.om.ParsedPattern; import org.kohsuke.rngom.parse.Context; import org.kohsuke.rngom.parse.IllegalSchemaException; import org.kohsuke.rngom.parse.Parseable; import org.kohsuke.rngom.util.Localizer; import org.kohsuke.rngom.util.Uri; import org.kohsuke.rngom.xml.sax.AbstractLexicalHandler; import org.kohsuke.rngom.xml.sax.XmlBaseHandler; import org.kohsuke.rngom.xml.util.Naming; import org.kohsuke.rngom.xml.util.WellKnownNamespaces; import org.xml.sax.Attributes; import org.xml.sax.ContentHandler; import org.xml.sax.ErrorHandler; import org.xml.sax.Locator; import org.xml.sax.SAXException; import org.xml.sax.SAXNotRecognizedException; import org.xml.sax.SAXNotSupportedException; import org.xml.sax.SAXParseException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.DefaultHandler; class SchemaParser { private static final String relaxngURIPrefix = WellKnownNamespaces.RELAX_NG.substring(0, WellKnownNamespaces.RELAX_NG.lastIndexOf('/') + 1); static final String relaxng10URI = WellKnownNamespaces.RELAX_NG; private static final Localizer localizer = new Localizer(new Localizer(Parseable.class), SchemaParser.class); private String relaxngURI; private final XMLReader xr; private final ErrorHandler eh; private final SchemaBuilder schemaBuilder; /** * The value of the {@link SchemaBuilder#getNameClassBuilder()} for the * {@link #schemaBuilder} object. */ private final NameClassBuilder nameClassBuilder; private ParsedPattern startPattern; private Locator locator; private final XmlBaseHandler xmlBaseHandler = new XmlBaseHandler(); private final ContextImpl context = new ContextImpl(); private boolean hadError = false; private Hashtable patternTable; private Hashtable nameClassTable; static class PrefixMapping { final String prefix; final String uri; final PrefixMapping next; PrefixMapping(String prefix, String uri, PrefixMapping next) { this.prefix = prefix; this.uri = uri; this.next = next; } } static abstract class AbstractContext extends DtdContext implements Context { PrefixMapping prefixMapping; AbstractContext() { prefixMapping = new PrefixMapping("xml", WellKnownNamespaces.XML, null); } AbstractContext(AbstractContext context) { super(context); prefixMapping = context.prefixMapping; } public String resolveNamespacePrefix(String prefix) { for (PrefixMapping p = prefixMapping; p != null; p = p.next) { if (p.prefix.equals(prefix)) { return p.uri; } } return null; } public Enumeration prefixes() { Vector v = new Vector(); for (PrefixMapping p = prefixMapping; p != null; p = p.next) { if (!v.contains(p.prefix)) { v.addElement(p.prefix); } } return v.elements(); } public Context copy() { return new SavedContext(this); } } static class SavedContext extends AbstractContext { private final String baseUri; SavedContext(AbstractContext context) { super(context); this.baseUri = context.getBaseUri(); } public String getBaseUri() { return baseUri; } } class ContextImpl extends AbstractContext { public String getBaseUri() { return xmlBaseHandler.getBaseUri(); } } static interface CommentHandler { void comment(String value); } abstract class Handler implements ContentHandler, CommentHandler { CommentList comments; CommentList getComments() { CommentList tem = comments; comments = null; return tem; } public void comment(String value) { if (comments == null) { comments = schemaBuilder.makeCommentList(); } comments.addComment(value, makeLocation()); } public void processingInstruction(String target, String date) { } public void skippedEntity(String name) { } public void ignorableWhitespace(char[] ch, int start, int len) { } public void startDocument() { } public void endDocument() { } public void startPrefixMapping(String prefix, String uri) { context.prefixMapping = new PrefixMapping(prefix, uri, context.prefixMapping); } public void endPrefixMapping(String prefix) { context.prefixMapping = context.prefixMapping.next; } public void setDocumentLocator(Locator loc) { locator = loc; xmlBaseHandler.setLocator(loc); } } abstract class State extends Handler { State parent; String nsInherit; String ns; String datatypeLibrary; /** * The current scope, or null if there's none. */ Scope scope; Location startLocation; Annotations annotations; void set() { xr.setContentHandler(this); } abstract State create(); abstract State createChildState(String localName) throws SAXException; void setParent(State parent) { this.parent = parent; this.nsInherit = parent.getNs(); this.datatypeLibrary = parent.datatypeLibrary; this.scope = parent.scope; this.startLocation = makeLocation(); if (parent.comments != null) { annotations = schemaBuilder.makeAnnotations(parent.comments, getContext()); parent.comments = null; } else if (parent instanceof RootState) { annotations = schemaBuilder.makeAnnotations(null, getContext()); } } String getNs() { return ns == null ? nsInherit : ns; } boolean isRelaxNGElement(String uri) throws SAXException { return uri.equals(relaxngURI); } public void startElement(String namespaceURI, String localName, String qName, Attributes atts) throws SAXException { xmlBaseHandler.startElement(); if (isRelaxNGElement(namespaceURI)) { State state = createChildState(localName); if (state == null) { xr.setContentHandler(new Skipper(this)); return; } state.setParent(this); state.set(); state.attributes(atts); } else { checkForeignElement(); ForeignElementHandler feh = new ForeignElementHandler(this, getComments()); feh.startElement(namespaceURI, localName, qName, atts); xr.setContentHandler(feh); } } public void endElement(String namespaceURI, String localName, String qName) throws SAXException { xmlBaseHandler.endElement(); parent.set(); end(); } void setName(String name) throws SAXException { error("illegal_name_attribute"); } void setOtherAttribute(String name, String value) throws SAXException { error("illegal_attribute_ignored", name); } void endAttributes() throws SAXException { } void checkForeignElement() throws SAXException { } void attributes(Attributes atts) throws SAXException { int len = atts.getLength(); for (int i = 0; i < len; i++) { String uri = atts.getURI(i); if (uri.length() == 0) { String name = atts.getLocalName(i); if (name.equals("name")) { setName(atts.getValue(i).trim()); } else if (name.equals("ns")) { ns = atts.getValue(i); } else if (name.equals("datatypeLibrary")) { datatypeLibrary = atts.getValue(i); checkUri(datatypeLibrary); if (!datatypeLibrary.equals("") && !Uri.isAbsolute(datatypeLibrary)) { error("relative_datatype_library"); } if (Uri.hasFragmentId(datatypeLibrary)) { error("fragment_identifier_datatype_library"); } datatypeLibrary = Uri.escapeDisallowedChars(datatypeLibrary); } else { setOtherAttribute(name, atts.getValue(i)); } } else if (uri.equals(relaxngURI)) { error("qualified_attribute", atts.getLocalName(i)); } else if (uri.equals(WellKnownNamespaces.XML) && atts.getLocalName(i).equals("base")) { xmlBaseHandler.xmlBaseAttribute(atts.getValue(i)); } else { if (annotations == null) { annotations = schemaBuilder.makeAnnotations(null, getContext()); } annotations.addAttribute(uri, atts.getLocalName(i), findPrefix(atts.getQName(i), uri), atts.getValue(i), startLocation); } } endAttributes(); } abstract void end() throws SAXException; void endChild(ParsedPattern pattern) { // XXX cannot happen; throw exception } void endChild(ParsedNameClass nc) { // XXX cannot happen; throw exception } @Override public void startDocument() { } @Override public void endDocument() { if (comments != null && startPattern != null) { startPattern = schemaBuilder.commentAfter(startPattern, comments); comments = null; } } public void characters(char[] ch, int start, int len) throws SAXException { for (int i = 0; i < len; i++) { switch (ch[start + i]) { case ' ': case '\r': case '\n': case '\t': break; default: error("illegal_characters_ignored"); break; } } } boolean isPatternNamespaceURI(String s) { return s.equals(relaxngURI); } void endForeignChild(ParsedElementAnnotation ea) { if (annotations == null) { annotations = schemaBuilder.makeAnnotations(null, getContext()); } annotations.addElement(ea); } void mergeLeadingComments() { if (comments != null) { if (annotations == null) { annotations = schemaBuilder.makeAnnotations(comments, getContext()); } else { annotations.addLeadingComment(comments); } comments = null; } } } class ForeignElementHandler extends Handler { final State nextState; ElementAnnotationBuilder builder; final Stack builderStack = new Stack(); StringBuffer textBuf; Location textLoc; ForeignElementHandler(State nextState, CommentList comments) { this.nextState = nextState; this.comments = comments; } public void startElement(String namespaceURI, String localName, String qName, Attributes atts) { flushText(); if (builder != null) { builderStack.push(builder); } Location loc = makeLocation(); builder = schemaBuilder.makeElementAnnotationBuilder(namespaceURI, localName, findPrefix(qName, namespaceURI), loc, getComments(), getContext()); int len = atts.getLength(); for (int i = 0; i < len; i++) { String uri = atts.getURI(i); builder.addAttribute(uri, atts.getLocalName(i), findPrefix(atts.getQName(i), uri), atts.getValue(i), loc); } } public void endElement(String namespaceURI, String localName, String qName) { flushText(); if (comments != null) { builder.addComment(getComments()); } ParsedElementAnnotation ea = builder.makeElementAnnotation(); if (builderStack.empty()) { nextState.endForeignChild(ea); nextState.set(); } else { builder = (ElementAnnotationBuilder) builderStack.pop(); builder.addElement(ea); } } public void characters(char ch[], int start, int length) { if (textBuf == null) { textBuf = new StringBuffer(); } textBuf.append(ch, start, length); if (textLoc == null) { textLoc = makeLocation(); } } @Override public void comment(String value) { flushText(); super.comment(value); } void flushText() { if (textBuf != null && textBuf.length() != 0) { builder.addText(textBuf.toString(), textLoc, getComments()); textBuf.setLength(0); } textLoc = null; } } static class Skipper extends DefaultHandler implements CommentHandler { int level = 1; final State nextState; Skipper(State nextState) { this.nextState = nextState; } @Override public void startElement(String namespaceURI, String localName, String qName, Attributes atts) throws SAXException { ++level; } @Override public void endElement(String namespaceURI, String localName, String qName) throws SAXException { if (--level == 0) { nextState.set(); } } public void comment(String value) { } } abstract class EmptyContentState extends State { State createChildState(String localName) throws SAXException { error("expected_empty", localName); return null; } abstract ParsedPattern makePattern() throws SAXException; void end() throws SAXException { if (comments != null) { if (annotations == null) { annotations = schemaBuilder.makeAnnotations(null, getContext()); } annotations.addComment(comments); comments = null; } parent.endChild(makePattern()); } } static private final int INIT_CHILD_ALLOC = 5; abstract class PatternContainerState extends State { List<ParsedPattern> childPatterns; State createChildState(String localName) throws SAXException { State state = (State) patternTable.get(localName); if (state == null) { error("expected_pattern", localName); return null; } return state.create(); } ParsedPattern buildPattern(List<ParsedPattern> patterns, Location loc, Annotations anno) throws SAXException { if (patterns.size() == 1 && anno == null) { return patterns.get(0); } return schemaBuilder.makeGroup(patterns, loc, anno); } @Override void endChild(ParsedPattern pattern) { if (childPatterns == null) { childPatterns = new ArrayList<ParsedPattern>(INIT_CHILD_ALLOC); } childPatterns.add(pattern); } @Override void endForeignChild(ParsedElementAnnotation ea) { // Harshit : Annotation handling should always be taken care of, irrespective of childPatterns being null or not. super.endForeignChild(ea); if (childPatterns != null) { int idx = childPatterns.size() - 1; childPatterns.set(idx, schemaBuilder.annotateAfter(childPatterns.get(idx), ea)); } } void end() throws SAXException { if (childPatterns == null) { error("missing_children"); endChild(schemaBuilder.makeErrorPattern()); } if (comments != null) { int idx = childPatterns.size() - 1; childPatterns.set(idx, schemaBuilder.commentAfter(childPatterns.get(idx), comments)); comments = null; } sendPatternToParent(buildPattern(childPatterns, startLocation, annotations)); } void sendPatternToParent(ParsedPattern p) { parent.endChild(p); } } class GroupState extends PatternContainerState { State create() { return new GroupState(); } } class ZeroOrMoreState extends PatternContainerState { State create() { return new ZeroOrMoreState(); } @Override ParsedPattern buildPattern(List<ParsedPattern> patterns, Location loc, Annotations anno) throws SAXException { return schemaBuilder.makeZeroOrMore(super.buildPattern(patterns, loc, null), loc, anno); } } class OneOrMoreState extends PatternContainerState { State create() { return new OneOrMoreState(); } @Override ParsedPattern buildPattern(List<ParsedPattern> patterns, Location loc, Annotations anno) throws SAXException { return schemaBuilder.makeOneOrMore(super.buildPattern(patterns, loc, null), loc, anno); } } class OptionalState extends PatternContainerState { State create() { return new OptionalState(); } @Override ParsedPattern buildPattern(List<ParsedPattern> patterns, Location loc, Annotations anno) throws SAXException { return schemaBuilder.makeOptional(super.buildPattern(patterns, loc, null), loc, anno); } } class ListState extends PatternContainerState { State create() { return new ListState(); } @Override ParsedPattern buildPattern(List<ParsedPattern> patterns, Location loc, Annotations anno) throws SAXException { return schemaBuilder.makeList(super.buildPattern(patterns, loc, null), loc, anno); } } class ChoiceState extends PatternContainerState { State create() { return new ChoiceState(); } @Override ParsedPattern buildPattern(List<ParsedPattern> patterns, Location loc, Annotations anno) throws SAXException { return schemaBuilder.makeChoice(patterns, loc, anno); } } class InterleaveState extends PatternContainerState { State create() { return new InterleaveState(); } @Override ParsedPattern buildPattern(List<ParsedPattern> patterns, Location loc, Annotations anno) { return schemaBuilder.makeInterleave(patterns, loc, anno); } } class MixedState extends PatternContainerState { State create() { return new MixedState(); } @Override ParsedPattern buildPattern(List<ParsedPattern> patterns, Location loc, Annotations anno) throws SAXException { return schemaBuilder.makeMixed(super.buildPattern(patterns, loc, null), loc, anno); } } static interface NameClassRef { void setNameClass(ParsedNameClass nc); } class ElementState extends PatternContainerState implements NameClassRef { ParsedNameClass nameClass; boolean nameClassWasAttribute; String name; @Override void setName(String name) { this.name = name; } public void setNameClass(ParsedNameClass nc) { nameClass = nc; } @Override void endAttributes() throws SAXException { if (name != null) { nameClass = expandName(name, getNs(), null); nameClassWasAttribute = true; } else { new NameClassChildState(this, this).set(); } } State create() { return new ElementState(); } @Override ParsedPattern buildPattern(List<ParsedPattern> patterns, Location loc, Annotations anno) throws SAXException { return schemaBuilder.makeElement(nameClass, super.buildPattern(patterns, loc, null), loc, anno); } @Override void endForeignChild(ParsedElementAnnotation ea) { if (nameClassWasAttribute || childPatterns != null || nameClass == null) { super.endForeignChild(ea); } else { nameClass = nameClassBuilder.annotateAfter(nameClass, ea); } } } class RootState extends PatternContainerState { IncludedGrammar grammar; RootState() { } RootState(IncludedGrammar grammar, Scope scope, String ns) { this.grammar = grammar; this.scope = scope; this.nsInherit = ns; this.datatypeLibrary = ""; } State create() { return new RootState(); } @Override State createChildState(String localName) throws SAXException { if (grammar == null) { return super.createChildState(localName); } if (localName.equals("grammar")) { return new MergeGrammarState(grammar); } error("expected_grammar", localName); return null; } @Override void checkForeignElement() throws SAXException { error("root_bad_namespace_uri", WellKnownNamespaces.RELAX_NG); } @Override void endChild(ParsedPattern pattern) { startPattern = pattern; } @Override boolean isRelaxNGElement(String uri) throws SAXException { if (!uri.startsWith(relaxngURIPrefix)) { return false; } if (!uri.equals(WellKnownNamespaces.RELAX_NG)) { warning("wrong_uri_version", WellKnownNamespaces.RELAX_NG.substring(relaxngURIPrefix.length()), uri.substring(relaxngURIPrefix.length())); } relaxngURI = uri; return true; } } class NotAllowedState extends EmptyContentState { State create() { return new NotAllowedState(); } ParsedPattern makePattern() { return schemaBuilder.makeNotAllowed(startLocation, annotations); } } class EmptyState extends EmptyContentState { State create() { return new EmptyState(); } ParsedPattern makePattern() { return schemaBuilder.makeEmpty(startLocation, annotations); } } class TextState extends EmptyContentState { State create() { return new TextState(); } ParsedPattern makePattern() { return schemaBuilder.makeText(startLocation, annotations); } } class ValueState extends EmptyContentState { final StringBuffer buf = new StringBuffer(); String type; State create() { return new ValueState(); } @Override void setOtherAttribute(String name, String value) throws SAXException { if (name.equals("type")) { type = checkNCName(value.trim()); } else { super.setOtherAttribute(name, value); } } @Override public void characters(char[] ch, int start, int len) { buf.append(ch, start, len); } @Override void checkForeignElement() throws SAXException { error("value_contains_foreign_element"); } ParsedPattern makePattern() throws SAXException { if (type == null) { return makePattern("", "token"); } else { return makePattern(datatypeLibrary, type); } } @Override void end() throws SAXException { mergeLeadingComments(); super.end(); } ParsedPattern makePattern(String datatypeLibrary, String type) { return schemaBuilder.makeValue(datatypeLibrary, type, buf.toString(), getContext(), getNs(), startLocation, annotations); } } class DataState extends State { String type; ParsedPattern except = null; DataPatternBuilder dpb = null; State create() { return new DataState(); } State createChildState(String localName) throws SAXException { if (localName.equals("param")) { if (except != null) { error("param_after_except"); } return new ParamState(dpb); } if (localName.equals("except")) { if (except != null) { error("multiple_except"); } return new ChoiceState(); } error("expected_param_except", localName); return null; } @Override void setOtherAttribute(String name, String value) throws SAXException { if (name.equals("type")) { type = checkNCName(value.trim()); } else { super.setOtherAttribute(name, value); } } @Override void endAttributes() throws SAXException { if (type == null) { error("missing_type_attribute"); } else { dpb = schemaBuilder.makeDataPatternBuilder(datatypeLibrary, type, startLocation); } } void end() throws SAXException { ParsedPattern p; if (dpb != null) { if (except != null) { p = dpb.makePattern(except, startLocation, annotations); } else { p = dpb.makePattern(startLocation, annotations); } } else { p = schemaBuilder.makeErrorPattern(); } // XXX need to capture comments parent.endChild(p); } @Override void endChild(ParsedPattern pattern) { except = pattern; } } class ParamState extends State { private final StringBuffer buf = new StringBuffer(); private final DataPatternBuilder dpb; private String name; ParamState(DataPatternBuilder dpb) { this.dpb = dpb; } State create() { return new ParamState(null); } @Override void setName(String name) throws SAXException { this.name = checkNCName(name); } @Override void endAttributes() throws SAXException { if (name == null) { error("missing_name_attribute"); } } State createChildState(String localName) throws SAXException { error("expected_empty", localName); return null; } @Override public void characters(char[] ch, int start, int len) { buf.append(ch, start, len); } @Override void checkForeignElement() throws SAXException { error("param_contains_foreign_element"); } void end() throws SAXException { if (name == null) { return; } if (dpb == null) { return; } mergeLeadingComments(); dpb.addParam(name, buf.toString(), getContext(), getNs(), startLocation, annotations); } } class AttributeState extends PatternContainerState implements NameClassRef { ParsedNameClass nameClass; boolean nameClassWasAttribute; String name; State create() { return new AttributeState(); } @Override void setName(String name) { this.name = name; } public void setNameClass(ParsedNameClass nc) { nameClass = nc; } @Override void endAttributes() throws SAXException { if (name != null) { String nsUse; if (ns != null) { nsUse = ns; } else { nsUse = ""; } nameClass = expandName(name, nsUse, null); nameClassWasAttribute = true; } else { new NameClassChildState(this, this).set(); } } @Override void endForeignChild(ParsedElementAnnotation ea) { if (nameClassWasAttribute || childPatterns != null || nameClass == null) { super.endForeignChild(ea); } else { nameClass = nameClassBuilder.annotateAfter(nameClass, ea); } } @Override void end() throws SAXException { if (childPatterns == null) { endChild(schemaBuilder.makeText(startLocation, null)); } super.end(); } @Override ParsedPattern buildPattern(List<ParsedPattern> patterns, Location loc, Annotations anno) throws SAXException { return schemaBuilder.makeAttribute(nameClass, super.buildPattern(patterns, loc, null), loc, anno); } @Override State createChildState(String localName) throws SAXException { State tem = super.createChildState(localName); if (tem != null && childPatterns != null) { error("attribute_multi_pattern"); } return tem; } } abstract class SinglePatternContainerState extends PatternContainerState { @Override State createChildState(String localName) throws SAXException { if (childPatterns == null) { return super.createChildState(localName); } error("too_many_children"); return null; } } class GrammarSectionState extends State { GrammarSection section; GrammarSectionState() { } GrammarSectionState(GrammarSection section) { this.section = section; } State create() { return new GrammarSectionState(null); } State createChildState(String localName) throws SAXException { if (localName.equals("define")) { return new DefineState(section); } if (localName.equals("start")) { return new StartState(section); } if (localName.equals("include")) { Include include = section.makeInclude(); if (include != null) { return new IncludeState(include); } } if (localName.equals("div")) { return new DivState(section.makeDiv()); } error("expected_define", localName); // XXX better errors return null; } void end() throws SAXException { if (comments != null) { section.topLevelComment(comments); comments = null; } } @Override void endForeignChild(ParsedElementAnnotation ea) { section.topLevelAnnotation(ea); } } class DivState extends GrammarSectionState { final Div div; DivState(Div div) { super(div); this.div = div; } @Override void end() throws SAXException { super.end(); div.endDiv(startLocation, annotations); } } class IncludeState extends GrammarSectionState { String href; final Include include; IncludeState(Include include) { super(include); this.include = include; } @Override void setOtherAttribute(String name, String value) throws SAXException { if (name.equals("href")) { href = value; checkUri(href); } else { super.setOtherAttribute(name, value); } } @Override void endAttributes() throws SAXException { if (href == null) { error("missing_href_attribute"); } else { href = resolve(href); } } @Override void end() throws SAXException { super.end(); if (href != null) { try { include.endInclude(parseable, href, getNs(), startLocation, annotations); } catch (IllegalSchemaException e) { } } } } class MergeGrammarState extends GrammarSectionState { final IncludedGrammar grammar; MergeGrammarState(IncludedGrammar grammar) { super(grammar); this.grammar = grammar; } @Override void end() throws SAXException { super.end(); parent.endChild(grammar.endIncludedGrammar(startLocation, annotations)); } } class GrammarState extends GrammarSectionState { Grammar grammar; @Override void setParent(State parent) { super.setParent(parent); grammar = schemaBuilder.makeGrammar(scope); section = grammar; scope = grammar; } @Override State create() { return new GrammarState(); } @Override void end() throws SAXException { super.end(); parent.endChild(grammar.endGrammar(startLocation, annotations)); } } class RefState extends EmptyContentState { String name; State create() { return new RefState(); } @Override void endAttributes() throws SAXException { if (name == null) { error("missing_name_attribute"); } } @Override void setName(String name) throws SAXException { this.name = checkNCName(name); } ParsedPattern makePattern() throws SAXException { if (name == null) { return schemaBuilder.makeErrorPattern(); } if (scope == null) { error("ref_outside_grammar", name); return schemaBuilder.makeErrorPattern(); } else { return scope.makeRef(name, startLocation, annotations); } } } class ParentRefState extends RefState { @Override State create() { return new ParentRefState(); } @Override ParsedPattern makePattern() throws SAXException { if (name == null) { return schemaBuilder.makeErrorPattern(); } if (scope == null) { error("parent_ref_outside_grammar", name); return schemaBuilder.makeErrorPattern(); } else { return scope.makeParentRef(name, startLocation, annotations); } } } class ExternalRefState extends EmptyContentState { String href; State create() { return new ExternalRefState(); } @Override void setOtherAttribute(String name, String value) throws SAXException { if (name.equals("href")) { href = value; checkUri(href); } else { super.setOtherAttribute(name, value); } } @Override void endAttributes() throws SAXException { if (href == null) { error("missing_href_attribute"); } else { href = resolve(href); } } ParsedPattern makePattern() { if (href != null) { try { return schemaBuilder.makeExternalRef(parseable, href, getNs(), scope, startLocation, annotations); } catch (IllegalSchemaException e) { } } return schemaBuilder.makeErrorPattern(); } } abstract class DefinitionState extends PatternContainerState { GrammarSection.Combine combine = null; final GrammarSection section; DefinitionState(GrammarSection section) { this.section = section; } @Override void setOtherAttribute(String name, String value) throws SAXException { if (name.equals("combine")) { value = value.trim(); if (value.equals("choice")) { combine = GrammarSection.COMBINE_CHOICE; } else if (value.equals("interleave")) { combine = GrammarSection.COMBINE_INTERLEAVE; } else { error("combine_attribute_bad_value", value); } } else { super.setOtherAttribute(name, value); } } @Override ParsedPattern buildPattern(List<ParsedPattern> patterns, Location loc, Annotations anno) throws SAXException { return super.buildPattern(patterns, loc, null); } } class DefineState extends DefinitionState { String name; DefineState(GrammarSection section) { super(section); } State create() { return new DefineState(null); } @Override void setName(String name) throws SAXException { this.name = checkNCName(name); } @Override void endAttributes() throws SAXException { if (name == null) { error("missing_name_attribute"); } } @Override void sendPatternToParent(ParsedPattern p) { if (name != null) { section.define(name, combine, p, startLocation, annotations); } } } class StartState extends DefinitionState { StartState(GrammarSection section) { super(section); } State create() { return new StartState(null); } @Override void sendPatternToParent(ParsedPattern p) { section.define(GrammarSection.START, combine, p, startLocation, annotations); } @Override State createChildState(String localName) throws SAXException { State tem = super.createChildState(localName); if (tem != null && childPatterns != null) { error("start_multi_pattern"); } return tem; } } abstract class NameClassContainerState extends State { State createChildState(String localName) throws SAXException { State state = (State) nameClassTable.get(localName); if (state == null) { error("expected_name_class", localName); return null; } return state.create(); } } class NameClassChildState extends NameClassContainerState { final State prevState; final NameClassRef nameClassRef; State create() { return null; } NameClassChildState(State prevState, NameClassRef nameClassRef) { this.prevState = prevState; this.nameClassRef = nameClassRef; setParent(prevState.parent); this.ns = prevState.ns; } @Override void endChild(ParsedNameClass nameClass) { nameClassRef.setNameClass(nameClass); prevState.set(); } @Override void endForeignChild(ParsedElementAnnotation ea) { prevState.endForeignChild(ea); } void end() throws SAXException { nameClassRef.setNameClass(nameClassBuilder.makeErrorNameClass()); error("missing_name_class"); prevState.set(); prevState.end(); } } abstract class NameClassBaseState extends State { abstract ParsedNameClass makeNameClass() throws SAXException; void end() throws SAXException { parent.endChild(makeNameClass()); } } class NameState extends NameClassBaseState { final StringBuffer buf = new StringBuffer(); State createChildState(String localName) throws SAXException { error("expected_name", localName); return null; } State create() { return new NameState(); } @Override public void characters(char[] ch, int start, int len) { buf.append(ch, start, len); } @Override void checkForeignElement() throws SAXException { error("name_contains_foreign_element"); } ParsedNameClass makeNameClass() throws SAXException { mergeLeadingComments(); return expandName(buf.toString().trim(), getNs(), annotations); } } private static final int PATTERN_CONTEXT = 0; private static final int ANY_NAME_CONTEXT = 1; private static final int NS_NAME_CONTEXT = 2; private SAXParseable parseable; class AnyNameState extends NameClassBaseState { ParsedNameClass except = null; State create() { return new AnyNameState(); } State createChildState(String localName) throws SAXException { if (localName.equals("except")) { if (except != null) { error("multiple_except"); } return new NameClassChoiceState(getContext()); } error("expected_except", localName); return null; } int getContext() { return ANY_NAME_CONTEXT; } ParsedNameClass makeNameClass() { if (except == null) { return makeNameClassNoExcept(); } else { return makeNameClassExcept(except); } } ParsedNameClass makeNameClassNoExcept() { return nameClassBuilder.makeAnyName(startLocation, annotations); } ParsedNameClass makeNameClassExcept(ParsedNameClass except) { return nameClassBuilder.makeAnyName(except, startLocation, annotations); } @Override void endChild(ParsedNameClass nameClass) { except = nameClass; } } class NsNameState extends AnyNameState { @Override State create() { return new NsNameState(); } @Override ParsedNameClass makeNameClassNoExcept() { return nameClassBuilder.makeNsName(getNs(), null, null); } @Override ParsedNameClass makeNameClassExcept(ParsedNameClass except) { return nameClassBuilder.makeNsName(getNs(), except, null, null); } @Override int getContext() { return NS_NAME_CONTEXT; } } class NameClassChoiceState extends NameClassContainerState { private ParsedNameClass[] nameClasses; private int nNameClasses; private int context; NameClassChoiceState() { this.context = PATTERN_CONTEXT; } NameClassChoiceState(int context) { this.context = context; } @Override void setParent(State parent) { super.setParent(parent); if (parent instanceof NameClassChoiceState) { this.context = ((NameClassChoiceState) parent).context; } } State create() { return new NameClassChoiceState(); } @Override State createChildState(String localName) throws SAXException { if (localName.equals("anyName")) { if (context >= ANY_NAME_CONTEXT) { error(context == ANY_NAME_CONTEXT ? "any_name_except_contains_any_name" : "ns_name_except_contains_any_name"); return null; } } else if (localName.equals("nsName")) { if (context == NS_NAME_CONTEXT) { error("ns_name_except_contains_ns_name"); return null; } } return super.createChildState(localName); } @Override void endChild(ParsedNameClass nc) { if (nameClasses == null) { nameClasses = new ParsedNameClass[INIT_CHILD_ALLOC]; } else if (nNameClasses >= nameClasses.length) { ParsedNameClass[] newNameClasses = new ParsedNameClass[nameClasses.length * 2]; System.arraycopy(nameClasses, 0, newNameClasses, 0, nameClasses.length); nameClasses = newNameClasses; } nameClasses[nNameClasses++] = nc; } @Override void endForeignChild(ParsedElementAnnotation ea) { if (nNameClasses == 0) { super.endForeignChild(ea); } else { nameClasses[nNameClasses - 1] = nameClassBuilder.annotateAfter(nameClasses[nNameClasses - 1], ea); } } void end() throws SAXException { if (nNameClasses == 0) { error("missing_name_class"); parent.endChild(nameClassBuilder.makeErrorNameClass()); return; } if (comments != null) { nameClasses[nNameClasses - 1] = nameClassBuilder.commentAfter(nameClasses[nNameClasses - 1], comments); comments = null; } parent.endChild(nameClassBuilder.makeChoice(Arrays.asList(nameClasses).subList(0, nNameClasses), startLocation, annotations)); } } private void initPatternTable() { patternTable = new Hashtable(); patternTable.put("zeroOrMore", new ZeroOrMoreState()); patternTable.put("oneOrMore", new OneOrMoreState()); patternTable.put("optional", new OptionalState()); patternTable.put("list", new ListState()); patternTable.put("choice", new ChoiceState()); patternTable.put("interleave", new InterleaveState()); patternTable.put("group", new GroupState()); patternTable.put("mixed", new MixedState()); patternTable.put("element", new ElementState()); patternTable.put("attribute", new AttributeState()); patternTable.put("empty", new EmptyState()); patternTable.put("text", new TextState()); patternTable.put("value", new ValueState()); patternTable.put("data", new DataState()); patternTable.put("notAllowed", new NotAllowedState()); patternTable.put("grammar", new GrammarState()); patternTable.put("ref", new RefState()); patternTable.put("parentRef", new ParentRefState()); patternTable.put("externalRef", new ExternalRefState()); } private void initNameClassTable() { nameClassTable = new Hashtable(); nameClassTable.put("name", new NameState()); nameClassTable.put("anyName", new AnyNameState()); nameClassTable.put("nsName", new NsNameState()); nameClassTable.put("choice", new NameClassChoiceState()); } public ParsedPattern getParsedPattern() throws IllegalSchemaException { if (hadError) { throw new IllegalSchemaException(); } return startPattern; } private void error(String key) throws SAXException { error(key, locator); } private void error(String key, String arg) throws SAXException { error(key, arg, locator); } void error(String key, String arg1, String arg2) throws SAXException { error(key, arg1, arg2, locator); } private void error(String key, Locator loc) throws SAXException { error(new SAXParseException(localizer.message(key), loc)); } private void error(String key, String arg, Locator loc) throws SAXException { error(new SAXParseException(localizer.message(key, arg), loc)); } private void error(String key, String arg1, String arg2, Locator loc) throws SAXException { error(new SAXParseException(localizer.message(key, arg1, arg2), loc)); } private void error(SAXParseException e) throws SAXException { hadError = true; if (eh != null) { eh.error(e); } } void warning(String key) throws SAXException { warning(key, locator); } private void warning(String key, String arg) throws SAXException { warning(key, arg, locator); } private void warning(String key, String arg1, String arg2) throws SAXException { warning(key, arg1, arg2, locator); } private void warning(String key, Locator loc) throws SAXException { warning(new SAXParseException(localizer.message(key), loc)); } private void warning(String key, String arg, Locator loc) throws SAXException { warning(new SAXParseException(localizer.message(key, arg), loc)); } private void warning(String key, String arg1, String arg2, Locator loc) throws SAXException { warning(new SAXParseException(localizer.message(key, arg1, arg2), loc)); } private void warning(SAXParseException e) throws SAXException { if (eh != null) { eh.warning(e); } } SchemaParser(SAXParseable parseable, XMLReader xr, ErrorHandler eh, SchemaBuilder schemaBuilder, IncludedGrammar grammar, Scope scope, String inheritedNs) throws SAXException { this.parseable = parseable; this.xr = xr; this.eh = eh; this.schemaBuilder = schemaBuilder; this.nameClassBuilder = schemaBuilder.getNameClassBuilder(); if (eh != null) { xr.setErrorHandler(eh); } xr.setDTDHandler(context); if (schemaBuilder.usesComments()) { try { xr.setProperty("http://xml.org/sax/properties/lexical-handler", new LexicalHandlerImpl()); } catch (SAXNotRecognizedException e) { warning("no_comment_support", xr.getClass().getName()); } catch (SAXNotSupportedException e) { warning("no_comment_support", xr.getClass().getName()); } } initPatternTable(); initNameClassTable(); new RootState(grammar, scope, inheritedNs).set(); } private Context getContext() { return context; } class LexicalHandlerImpl extends AbstractLexicalHandler { private boolean inDtd = false; @Override public void startDTD(String s, String s1, String s2) throws SAXException { inDtd = true; } @Override public void endDTD() throws SAXException { inDtd = false; } @Override public void comment(char[] chars, int start, int length) throws SAXException { if (!inDtd) { ((CommentHandler) xr.getContentHandler()).comment(new String(chars, start, length)); } } } private ParsedNameClass expandName(String name, String ns, Annotations anno) throws SAXException { int ic = name.indexOf(':'); if (ic == -1) { return nameClassBuilder.makeName(ns, checkNCName(name), null, null, anno); } String prefix = checkNCName(name.substring(0, ic)); String localName = checkNCName(name.substring(ic + 1)); for (PrefixMapping tem = context.prefixMapping; tem != null; tem = tem.next) { if (tem.prefix.equals(prefix)) { return nameClassBuilder.makeName(tem.uri, localName, prefix, null, anno); } } error("undefined_prefix", prefix); return nameClassBuilder.makeName("", localName, null, null, anno); } private String findPrefix(String qName, String uri) { String prefix = null; if (qName == null || qName.equals("")) { for (PrefixMapping p = context.prefixMapping; p != null; p = p.next) { if (p.uri.equals(uri)) { prefix = p.prefix; break; } } } else { int off = qName.indexOf(':'); if (off > 0) { prefix = qName.substring(0, off); } } return prefix; } private String checkNCName(String str) throws SAXException { if (!Naming.isNcname(str)) { error("invalid_ncname", str); } return str; } private String resolve(String systemId) throws SAXException { if (Uri.hasFragmentId(systemId)) { error("href_fragment_id"); } systemId = Uri.escapeDisallowedChars(systemId); return Uri.resolve(xmlBaseHandler.getBaseUri(), systemId); } private Location makeLocation() { if (locator == null) { return null; } return schemaBuilder.makeLocation(locator.getSystemId(), locator.getLineNumber(), locator.getColumnNumber()); } private void checkUri(String s) throws SAXException { if (!Uri.isValid(s)) { error("invalid_uri", s); } } }
/* * NOTE: This copyright does *not* cover user programs that use HQ * program services by normal system calls through the application * program interfaces provided as part of the Hyperic Plug-in Development * Kit or the Hyperic Client Development Kit - this is merely considered * normal use of the program, and does *not* fall under the heading of * "derived work". * * Copyright (C) [2004-2008], Hyperic, Inc. * This file is part of HQ. * * HQ is free software; you can redistribute it and/or modify * it under the terms version 2 of the GNU General Public License as * published by the Free Software Foundation. This program is distributed * in the hope that it will be useful, but WITHOUT ANY WARRANTY; without * even the implied warranty of MERCHANTABILITY or FITNESS FOR A * PARTICULAR PURPOSE. See the GNU General Public License for more * details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA. */ package org.hyperic.hq.autoinventory.server.session; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.PostConstruct; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hyperic.hq.agent.AgentConnectionException; import org.hyperic.hq.agent.AgentRemoteException; import org.hyperic.hq.appdef.Agent; import org.hyperic.hq.appdef.server.session.*; import org.hyperic.hq.appdef.shared.AIAppdefResourceValue; import org.hyperic.hq.appdef.shared.AIPlatformValue; import org.hyperic.hq.appdef.shared.AIQueueConstants; import org.hyperic.hq.appdef.shared.AIQueueManager; import org.hyperic.hq.appdef.shared.AIServerValue; import org.hyperic.hq.appdef.shared.AgentManager; import org.hyperic.hq.appdef.shared.AgentNotFoundException; import org.hyperic.hq.appdef.shared.AppdefEntityID; import org.hyperic.hq.appdef.shared.AppdefUtil; import org.hyperic.hq.appdef.shared.ConfigFetchException; import org.hyperic.hq.appdef.shared.ConfigManager; import org.hyperic.hq.appdef.shared.PlatformManager; import org.hyperic.hq.appdef.shared.PlatformNotFoundException; import org.hyperic.hq.appdef.shared.PlatformValue; import org.hyperic.hq.appdef.shared.ServerManager; import org.hyperic.hq.appdef.shared.ServerTypeValue; import org.hyperic.hq.appdef.shared.ValidationException; import org.hyperic.hq.authz.server.session.AuthzSubject; import org.hyperic.hq.authz.server.session.Resource; import org.hyperic.hq.authz.server.shared.ResourceDeletedException; import org.hyperic.hq.authz.shared.AuthzConstants; import org.hyperic.hq.authz.shared.AuthzSubjectManager; import org.hyperic.hq.authz.shared.PermissionException; import org.hyperic.hq.authz.shared.PermissionManager; import org.hyperic.hq.authz.shared.ResourceManager; import org.hyperic.hq.autoinventory.AIHistory; import org.hyperic.hq.autoinventory.AIPlatform; import org.hyperic.hq.autoinventory.AutoinventoryException; import org.hyperic.hq.autoinventory.CompositeRuntimeResourceReport; import org.hyperic.hq.autoinventory.DuplicateAIScanNameException; import org.hyperic.hq.autoinventory.ScanConfigurationCore; import org.hyperic.hq.autoinventory.ScanState; import org.hyperic.hq.autoinventory.ScanStateCore; import org.hyperic.hq.autoinventory.ServerSignature; import org.hyperic.hq.autoinventory.agent.client.AICommandsClient; import org.hyperic.hq.autoinventory.agent.client.AICommandsClientFactory; import org.hyperic.hq.autoinventory.shared.AIScheduleManager; import org.hyperic.hq.autoinventory.shared.AutoinventoryManager; import org.hyperic.hq.common.ApplicationException; import org.hyperic.hq.common.NotFoundException; import org.hyperic.hq.common.SystemException; import org.hyperic.hq.dao.AIHistoryDAO; import org.hyperic.hq.dao.AIPlatformDAO; import org.hyperic.hq.measurement.shared.MeasurementProcessor; import org.hyperic.hq.product.AutoinventoryPluginManager; import org.hyperic.hq.product.GenericPlugin; import org.hyperic.hq.product.PluginException; import org.hyperic.hq.product.PluginNotFoundException; import org.hyperic.hq.product.ProductPlugin; import org.hyperic.hq.product.ServerDetector; import org.hyperic.hq.product.shared.ProductManager; import org.hyperic.hq.scheduler.ScheduleValue; import org.hyperic.hq.scheduler.ScheduleWillNeverFireException; import org.hyperic.util.StringUtil; import org.hyperic.util.config.ConfigResponse; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Transactional; /** * This class is responsible for managing Autoinventory objects in autoinventory * and their relationships */ @org.springframework.stereotype.Service public class AutoinventoryManagerImpl implements AutoinventoryManager { private Log log = LogFactory.getLog(AutoinventoryManagerImpl.class.getName()); private AutoinventoryPluginManager aiPluginManager; private AIScheduleManager aiScheduleManager; private AIHistoryDAO aiHistoryDao; private AIPlatformDAO aiPlatformDao; private ProductManager productManager; private ServerManager serverManager; private ResourceManager resourceManager; private ConfigManager configManager; private AuthzSubjectManager authzSubjectManager; private AIQueueManager aiQueueManager; private PermissionManager permissionManager; private AICommandsClientFactory aiCommandsClientFactory; private ServiceMerger serviceMerger; private RuntimePlatformAndServerMerger runtimePlatformAndServerMerger; private PlatformManager platformManager; private MeasurementProcessor measurementProcessor; private AgentManager agentManager; @Autowired public AutoinventoryManagerImpl(AIHistoryDAO aiHistoryDao, AIPlatformDAO aiPlatformDao, ProductManager productManager, ServerManager serverManager, AIScheduleManager aiScheduleManager, ResourceManager resourceManager, ConfigManager configManager, AuthzSubjectManager authzSubjectManager, AIQueueManager aiQueueManager, PermissionManager permissionManager, AICommandsClientFactory aiCommandsClientFactory, ServiceMerger serviceMerger, RuntimePlatformAndServerMerger runtimePlatformAndServerMerger, PlatformManager platformManager, MeasurementProcessor measurementProcessor, AgentManager agentManager) { this.aiHistoryDao = aiHistoryDao; this.aiPlatformDao = aiPlatformDao; this.productManager = productManager; this.serverManager = serverManager; this.aiScheduleManager = aiScheduleManager; this.resourceManager = resourceManager; this.configManager = configManager; this.authzSubjectManager = authzSubjectManager; this.aiQueueManager = aiQueueManager; this.permissionManager = permissionManager; this.aiCommandsClientFactory = aiCommandsClientFactory; this.serviceMerger = serviceMerger; this.runtimePlatformAndServerMerger = runtimePlatformAndServerMerger; this.platformManager = platformManager; this.measurementProcessor = measurementProcessor; this.agentManager = agentManager; } /** * Get server signatures for a set of servertypes. * @param serverTypes A List of ServerTypeValue objects representing the * server types to get signatures for. If this is null, all server * signatures are returned. * @return A Map, where the keys are the names of the ServerTypeValues, and * the values are the ServerSignature objects. */ @Transactional(readOnly = true) public Map<String, ServerSignature> getServerSignatures(AuthzSubject subject, List<ServerTypeValue> serverTypes) throws AutoinventoryException { // Plug server type names into a map for quick retrieval HashMap<String, ServerTypeValue> stNames = null; if (serverTypes != null) { stNames = new HashMap<String, ServerTypeValue>(); ServerTypeValue stValue; for (int i = 0; i < serverTypes.size(); i++) { stValue = (ServerTypeValue) serverTypes.get(i); stNames.put(stValue.getName(), stValue); } } Map<String, GenericPlugin> plugins = aiPluginManager.getPlugins(); Map<String, ServerSignature> results = new HashMap<String, ServerSignature>(); for (String name : plugins.keySet()) { GenericPlugin plugin = (GenericPlugin) plugins.get(name); String pluginName = plugin.getName(); if (!(plugin instanceof ServerDetector)) { log.debug("skipping non-server AI plugin: " + pluginName); continue; } if (stNames != null && stNames.get(pluginName) == null) { log.debug("skipping unrequested AI plugin: " + pluginName); continue; } results.put(pluginName, ((ServerDetector) plugin).getServerSignature()); } return results; } /** * Check if a given Appdef entity supports runtime auto-discovery. * * @param id The entity id to check. * @return true if the given resource supports runtime auto-discovery. */ @Transactional(readOnly = true) public boolean isRuntimeDiscoverySupported(AuthzSubject subject, AppdefEntityID id) { boolean retVal; try { Server server = serverManager.getServerById(id.getId()); if (server == null) { return false; } String pluginName = server.getServerType().getName(); AutoinventoryPluginManager aiPluginManager = (AutoinventoryPluginManager) productManager .getPluginManager(ProductPlugin.TYPE_AUTOINVENTORY); GenericPlugin plugin = aiPluginManager.getPlugin(pluginName); if (plugin instanceof ServerDetector) { retVal = ((ServerDetector) plugin).isRuntimeDiscoverySupported(); } else { retVal = false; } } catch (PluginNotFoundException pne) { return false; } catch (PluginException e) { log.error("Error getting plugin", e); return false; } return retVal; } /** * Turn off runtime-autodiscovery for a server that no longer exists. Use * this method when you know the appdefentity identified by "id" exists, so * that we'll be able to successfully find out which agent we should create * our AICommandsClient from. * @param id The AppdefEntityID of the resource to turn off runtime config * for. */ @Transactional public void turnOffRuntimeDiscovery(AuthzSubject subject, AppdefEntityID id) throws PermissionException { AICommandsClient client; try { client = aiCommandsClientFactory.getClient(id); } catch (AgentNotFoundException e) { throw new SystemException("Error looking up agent for resource " + "(" + id + "): " + e); } try { client.pushRuntimeDiscoveryConfig(id.getType(), id.getID(), null, null, null); } catch (AgentRemoteException e) { throw new SystemException("Error turning off runtime-autodiscovery " + "for resource (" + id + "): " + e); } } /** * Turn off runtime-autodiscovery for a server that no longer exists. We * need this as a separate method call because when the server no longer * exists, we have to manually specify the agent connection to use. * @param id The AppdefEntityID of the resource to turn off runtime config * for. * @param agentToken Which agent controls the runtime AI scans for this * resource. */ @Transactional public void turnOffRuntimeDiscovery(AuthzSubject subject, AppdefEntityID id, String agentToken) throws PermissionException { AICommandsClient client; try { client = aiCommandsClientFactory.getClient(agentToken); } catch (AgentNotFoundException e) { throw new SystemException("Error looking up agent for resource " + "(" + id + "): " + e); } try { client.pushRuntimeDiscoveryConfig(id.getType(), id.getID(), null, null, null); } catch (AgentRemoteException e) { throw new SystemException("Error turning off runtime-autodiscovery " + "for resource (" + id + "): " + e); } } /** * Toggle Runtime-AI config for the given server. */ @Transactional public void toggleRuntimeScan(AuthzSubject subject, AppdefEntityID id, boolean enable) throws PermissionException, AutoinventoryException, ResourceDeletedException { Resource res = resourceManager.findResource(id); // if resource is asynchronously deleted ignore if (res == null || res.isInAsyncDeleteState()) { final String m = id + " is asynchronously deleted"; throw new ResourceDeletedException(m); } if (!id.isServer()) { log.warn("toggleRuntimeScan() called for non-server type=" + id); return; } if (!isRuntimeDiscoverySupported(subject, id)) { return; } try { Server server = serverManager.findServerById(id.getId()); server.setRuntimeAutodiscovery(enable); ConfigResponse metricConfig = configManager.getMergedConfigResponse(subject, ProductPlugin.TYPE_MEASUREMENT, id, true); pushRuntimeDiscoveryConfig(subject, server, metricConfig); } catch (ConfigFetchException e) { // No config, no need to turn off auto-discovery. } catch (Exception e) { throw new AutoinventoryException("Error enabling Runtime-AI for " + "server: " + e.getMessage(), e); } } /** * Push the metric ConfigResponse out to an agent so it can perform * runtime-autodiscovery * @param res The appdef entity ID of the server. * @param response The configuration info. */ private void pushRuntimeDiscoveryConfig(AuthzSubject subject, AppdefResource res, ConfigResponse response) throws PermissionException { AppdefEntityID aeid = res.getEntityId(); if (!isRuntimeDiscoverySupported(subject, aeid)) { return; } AICommandsClient client; if (aeid.isServer()) { // Setting the response to null will disable runtime // autodiscovery at the agent. if (!AppdefUtil.areRuntimeScansEnabled((Server) res)) { response = null; } } try { client = aiCommandsClientFactory.getClient(aeid); } catch (AgentNotFoundException e) { throw new SystemException("Error looking up agent for server " + "(" + res + "): " + e); } String typeName = res.getAppdefResourceType().getName(); String name = null; if (!aeid.isServer()) { name = res.getName(); } try { client.pushRuntimeDiscoveryConfig(aeid.getType(), aeid.getID(), typeName, name, response); } catch (AgentRemoteException e) { throw new SystemException("Error pushing metric config response to " + "agent for server (" + res + "): " + e); } } /** * Start an autoinventory scan. * @param aid The appdef entity whose agent we'll talk to. * @param scanConfig The scan configuration to use when scanning. * @param scanName The name of the scan - this is ignored (i.e. it can be * null) for immediate, one-time scans. * @param scanDesc The description of the scan - this is ignored (i.e. it * can be null) for immediate, one-time scans. * @param schedule Described when and how often the scan should run. If this * is null, then the scan will be run as an immediate, one-time only * scan. */ @Transactional public void startScan(AuthzSubject subject, AppdefEntityID aid, ScanConfigurationCore scanConfig, String scanName, String scanDesc, ScheduleValue schedule) throws AgentConnectionException, AgentNotFoundException, AutoinventoryException, DuplicateAIScanNameException, ScheduleWillNeverFireException, PermissionException { try { permissionManager.checkAIScanPermission(subject, aid); ConfigResponse config = configManager.getMergedConfigResponse(subject, ProductPlugin.TYPE_MEASUREMENT, aid, false); if (log.isDebugEnabled()) { log.debug("startScan config=" + config); } scanConfig.setConfigResponse(config); // All scans go through the scheduler. aiScheduleManager.doScheduledScan(subject, aid, scanConfig, scanName, scanDesc, schedule); } catch (ScheduleWillNeverFireException e) { throw e; } catch (DuplicateAIScanNameException ae) { throw ae; } catch (AutoinventoryException ae) { log.warn("Error starting scan: " + StringUtil.getStackTrace(ae)); throw ae; } catch (PermissionException ae) { throw ae; } catch (Exception e) { throw new SystemException("Error starting scan " + "for agent: " + e, e); } } /** * Start an autoinventory scan by agentToken */ @Transactional public void startScan(AuthzSubject subject, String agentToken, ScanConfigurationCore scanConfig) throws AgentConnectionException, AgentNotFoundException, AutoinventoryException, PermissionException { log.info("AutoinventoryManager.startScan called"); // Is there an already-approved platform with this agent token? If so, // re-call using the other startScan method AIPlatform aipLocal = aiPlatformDao.findByAgentToken(agentToken); if (aipLocal == null) { throw new AutoinventoryException("No platform in auto-discovery " + "queue with agentToken=" + agentToken); } PlatformValue pValue; try { pValue = aiQueueManager.getPlatformByAI(subject, aipLocal.getId().intValue()); // It does exist. Call the other startScan method so that // authz checks will apply startScan(subject, AppdefEntityID.newPlatformID(pValue.getId()), scanConfig, null, null, null); return; } catch (PlatformNotFoundException e) { log.warn("startScan: no platform exists for queued AIPlatform: " + aipLocal.getId() + ": " + e); } catch (Exception e) { log.error("startScan: error starting scan for AIPlatform: " + aipLocal.getId() + ": " + e, e); throw new SystemException(e); } try { AICommandsClient client = aiCommandsClientFactory.getClient(agentToken); client.startScan(scanConfig); } catch (AgentRemoteException e) { throw new AutoinventoryException(e); } } /** * Stop an autoinventory scan. * @param aid The appdef entity whose agent we'll talk to. */ @Transactional public void stopScan(AuthzSubject subject, AppdefEntityID aid) throws AutoinventoryException { log.info("AutoinventoryManager.stopScan called"); try { AICommandsClient client = aiCommandsClientFactory.getClient(aid); client.stopScan(); } catch (Exception e) { throw new AutoinventoryException("Error stopping scan " + "for agent: " + e, e); } } /** * Get status for an autoinventory scan. * @param aid The appdef entity whose agent we'll talk to. */ @Transactional(readOnly = true) public ScanStateCore getScanStatus(AuthzSubject subject, AppdefEntityID aid) throws AgentNotFoundException, AgentConnectionException, AgentRemoteException, AutoinventoryException { log.info("AutoinventoryManager.getScanStatus called"); ScanStateCore core; try { AICommandsClient client = aiCommandsClientFactory.getClient(aid); core = client.getScanStatus(); } catch (AgentNotFoundException ae) { throw ae; } catch (AgentRemoteException ae) { throw ae; } catch (AgentConnectionException ae) { throw ae; } catch (AutoinventoryException ae) { throw ae; } catch (Exception e) { throw new SystemException("Error getting scan status for agent: " + e, e); } return core; } /** * create AIHistory */ @Transactional public AIHistory createAIHistory(AppdefEntityID id, Integer groupId, Integer batchId, String subjectName, ScanConfigurationCore config, String scanName, String scanDesc, Boolean scheduled, long startTime, long stopTime, long scheduleTime, String status, String errorMessage) throws AutoinventoryException { return aiHistoryDao.create(id, groupId, batchId, subjectName, config, scanName, scanDesc, scheduled, startTime, stopTime, scheduleTime, status, null /* description */, errorMessage); } /** * remove AIHistory */ @Transactional public void removeHistory(AIHistory history) { aiHistoryDao.remove(history); } /** * update AIHistory */ @Transactional public void updateAIHistory(Integer jobId, long endTime, String status, String message) { AIHistory local = aiHistoryDao.findById(jobId); local.setEndTime(endTime); local.setDuration(endTime - local.getStartTime()); local.setStatus(status); local.setMessage(message); } /** * Get status for an autoinventory scan, given the agentToken */ @Transactional(readOnly = true) public ScanStateCore getScanStatusByAgentToken(AuthzSubject subject, String agentToken) throws AgentNotFoundException, AgentConnectionException, AgentRemoteException, AutoinventoryException { log.info("AutoinventoryManager.getScanStatus called"); ScanStateCore core; try { AICommandsClient client = aiCommandsClientFactory.getClient(agentToken); core = client.getScanStatus(); } catch (AgentNotFoundException ae) { throw ae; } catch (AgentRemoteException ae) { throw ae; } catch (AgentConnectionException ae) { throw ae; } catch (AutoinventoryException ae) { throw ae; } catch (Exception e) { throw new SystemException("Error getting scan status " + "for agent: " + e, e); } return core; } private static List<Integer> buildAIResourceIds(AIAppdefResourceValue[] aiResources) { List<Integer> ids = new ArrayList<Integer>(); for (int i = 0; i < aiResources.length; i++) { Integer id = aiResources[i].getId(); if (id == null) { continue; // unchanged? } ids.add(id); } return ids; } /** * Called by agents to report platforms, servers, and services detected via * autoinventory scans. * @param agentToken The token identifying the agent that sent the report. * @param stateCore The ScanState that was detected during the autoinventory * scan. */ public AIPlatformValue reportAIData(String agentToken, ScanStateCore stateCore) throws AutoinventoryException { final boolean debug = log.isDebugEnabled(); ScanState state = new ScanState(stateCore); AIPlatformValue aiPlatform = state.getPlatform(); // This could happen if there was a serious error in the scan, // and not even the platform could be detected. if (state.getPlatform() == null) { log.warn("ScanState did not even contain a platform, ignoring."); return null; } // TODO: G log.info("Received auto-inventory report from " + aiPlatform.getFqdn() + "; IPs -> " + ArrayUtils.toString(aiPlatform.getAIIpValues()) + "; CertDN -> " + aiPlatform.getCertdn() + "; (" + state.getAllServers().size() + " servers)"); if (debug) { log.debug("AutoinventoryManager.reportAIData called, " + "scan state=" + state); log.debug("AISERVERS=" + state.getAllServers()); } // In the future we may want this method to act as // another user besides "admin". It might make sense to have // a user per-agent, so that actions that are agent-initiated // can be tracked. Of course, this will be difficult when the // agent is reporting itself to the server for the first time. // In that case, we'd have to act as admin and be careful about // what we allow that codepath to do. AuthzSubject subject = getHQAdmin(); aiPlatform.setAgentToken(agentToken); if (debug) { log.debug("AImgr.reportAIData: state.getPlatform()=" + aiPlatform); } addAIServersToAIPlatform(stateCore, state, aiPlatform); aiPlatform = aiQueueManager.queue(subject, aiPlatform, stateCore.getAreServersIncluded(), false, true); approvePlatformDevice(subject, aiPlatform); checkAgentAssignment(subject, agentToken, aiPlatform); return aiPlatform; } private void addAIServersToAIPlatform(ScanStateCore stateCore, ScanState state,AIPlatformValue aiPlatform) throws AutoinventoryException { if (stateCore.getAreServersIncluded()) { // TODO: G Set<AIServerValue> serverSet = state.getAllServers(); for (AIServerValue aiServer : serverSet) { // Ensure the server reported has a valid appdef type try { serverManager.findServerTypeByName(aiServer.getServerTypeName()); } catch (NotFoundException e) { log.error("Ignoring non-existent server type: " + aiServer.getServerTypeName(), e); continue; } aiPlatform.addAIServerValue(aiServer); } } } private void approvePlatformDevice(AuthzSubject subject, AIPlatformValue aiPlatform) { if (aiPlatform.isPlatformDevice()) { log.info("Auto-approving inventory for " + aiPlatform.getFqdn()); List<Integer> ips = buildAIResourceIds(aiPlatform.getAIIpValues()); List<Integer> servers = buildAIResourceIds(aiPlatform.getAIServerValues()); List<Integer> platforms = Collections.singletonList(aiPlatform.getId()); try { aiQueueManager.processQueue(subject, platforms, servers, ips, AIQueueConstants.Q_DECISION_APPROVE); } catch (Exception e) { throw new SystemException(e); } } } private void checkAgentAssignment(AuthzSubject subj, String agentToken, AIPlatformValue aiPlatform) { try { Platform platform = platformManager.getPlatformByAIPlatform(subj, aiPlatform); if (platform != null) { Agent agent = platform.getAgent(); if (agent == null || !agent.getAgentToken().equals(agentToken)) { Agent newAgent = agentManager.getAgent(agentToken); String fqdn = platform.getFqdn(); Integer pid = platform.getId(); log.info("reassigning platform agent (fqdn=" + fqdn + ",id=" + pid + ") from=" + agent + " to=" + newAgent); platform.setAgent(newAgent); measurementProcessor.scheduleHierarchyAfterCommit(platform.getResource()); } } } catch (PermissionException e) { // using admin, this should not happen log.error(e,e); } catch (AgentNotFoundException e) { // this is a problem since the agent should already exist in our // inventory before it gets here. log.error(e,e); } } /** * Called by agents to report resources detected at runtime via * monitoring-based autoinventory scans. * * There are some interesting situations that can occur related to * synchronization between the server and agent. If runtime scans are turned * off for a server, but the agent is never notified (for example if the * agent is not running at the time), then the agent is going to eventually * report a runtime scan that includes resources detected by that server's * runtime scan. If this happens, we detect it and take the opportunity to * tell the agent again that it should not perform runtime AI scans for that * server. Any resources reported by that server will be ignored. * * A similar situation occurs when the appdef server has been deleted but * the agent was never notified to turn off runtime AI. We handle this in * the same way, by telling the agent to turn off runtime scans for that * server, and ignoring anything in the report from that server. * * This method will process all platform and server merging, given by the * report. Any services will be added to Zevent queue to be processed in * their own transactions. * * @param agentToken The token identifying the agent that sent the report. * @param crrr The CompositeRuntimeResourceReport that was generated during * the runtime autoinventory scan. */ @Transactional public void reportAIRuntimeReport(String agentToken, CompositeRuntimeResourceReport crrr) throws AutoinventoryException, PermissionException, ValidationException, ApplicationException { runtimePlatformAndServerMerger.schedulePlatformAndServerMerges(agentToken, crrr); } /** * Handle ResourceZEvents for enabling runtime autodiscovery. * * @param events A list of ResourceZevents */ @Transactional public void handleResourceEvents(List<ResourceZevent> events) { for (ResourceZevent zevent : events) { AppdefEntityID id = zevent.getAppdefEntityID(); boolean isUpdate = zevent instanceof ResourceUpdatedZevent; // Only servers have runtime AI. if (!id.isServer()) { continue; } // Need to look up the AuthzSubject POJO AuthzSubject subj = authzSubjectManager.findSubjectById(zevent.getAuthzSubjectId()); if (isUpdate) { Server s = serverManager.getServerById(id.getId()); log.info("Toggling Runtime-AI for " + id); try { toggleRuntimeScan(subj, id, s.isRuntimeAutodiscovery()); } catch (ResourceDeletedException e) { log.debug(e); } catch (Exception e) { log.warn("Error toggling runtime-ai for server [" + id + "]", e); } } else { log.info("Enabling Runtime-AI for " + id); try { toggleRuntimeScan(subj, id, true); } catch (ResourceDeletedException e) { log.debug(e); } catch (Exception e) { log.warn("Error enabling runtime-ai for server [" + id + "]", e); } } } } public void invokeAutoApprove(AIPlatformValue aiPlatformValue) throws AutoinventoryException { AuthzSubject subject = getHQAdmin(); List<Integer> ips = buildAIResourceIds(aiPlatformValue.getAIIpValues()); List<Integer> platforms = Collections.singletonList(aiPlatformValue.getId()); List<Integer> servers = new ArrayList<Integer>(); AIServerValue[] aiServerValues = aiPlatformValue.getAIServerValues(); for (AIServerValue aiServerValue : aiServerValues) { if (aiServerValue.isAutoApprove() || isServerVirtual(aiServerValue)) { servers.add(aiServerValue.getId()); } } try { aiQueueManager.processQueue(subject, platforms, servers, ips, AIQueueConstants.Q_DECISION_APPROVE); } catch (Exception e) { throw new SystemException(e); } } private boolean isServerVirtual(AIServerValue aiServerValue) { try { ServerType serverType = serverManager.findServerTypeByName(aiServerValue.getServerTypeName()); return serverType.isVirtual(); } catch (NotFoundException exc) { log.error("Ignoring non-existent server type: " + aiServerValue.getServerTypeName(), exc); } return false; } /** * Create an autoinventory manager. * */ @PostConstruct public void createDependentManagers() { // Get reference to the AI plugin manager try { aiPluginManager = (AutoinventoryPluginManager) productManager.getPluginManager( ProductPlugin.TYPE_AUTOINVENTORY); } catch (Throwable e) { log.error("Unable to initialize AI Product Manager.", e); } } private AuthzSubject getHQAdmin() throws AutoinventoryException { try { return authzSubjectManager.getSubjectById(AuthzConstants.rootSubjectId); } catch (Exception e) { throw new AutoinventoryException("Error looking up subject", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.realm; import java.io.IOException; import javax.security.auth.callback.Callback; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.callback.NameCallback; import javax.security.auth.callback.PasswordCallback; import javax.security.auth.callback.TextInputCallback; import javax.security.auth.callback.UnsupportedCallbackException; import org.apache.tomcat.util.res.StringManager; /** * <p>Implementation of the JAAS <code>CallbackHandler</code> interface, * used to negotiate delivery of the username and credentials that were * specified to our constructor. No interaction with the user is required * (or possible).</p> * * <p>This <code>CallbackHandler</code> will pre-digest the supplied * password, if required by the <code>&lt;Realm&gt;</code> element in * <code>server.xml</code>.</p> * <p>At present, <code>JAASCallbackHandler</code> knows how to handle callbacks of * type <code>javax.security.auth.callback.NameCallback</code> and * <code>javax.security.auth.callback.PasswordCallback</code>.</p> * * @author Craig R. McClanahan * @author Andrew R. Jaquith * @version $Id: JAASCallbackHandler.java 939305 2010-04-29 13:43:39Z kkolinko $ */ public class JAASCallbackHandler implements CallbackHandler { // ------------------------------------------------------------ Constructor /** * Construct a callback handler configured with the specified values. * Note that if the <code>JAASRealm</code> instance specifies digested passwords, * the <code>password</code> parameter will be pre-digested here. * * @param realm Our associated JAASRealm instance * @param username Username to be authenticated with * @param password Password to be authenticated with */ public JAASCallbackHandler(JAASRealm realm, String username, String password) { super(); this.realm = realm; this.username = username; if (realm.hasMessageDigest()) { this.password = realm.digest(password); } else { this.password = password; } } /** * Construct a callback handler for DIGEST authentication. * * @param realm Our associated JAASRealm instance * @param username Username to be authenticated with * @param password Password to be authenticated with * @param nonce Server generated nonce * @param nc Nonce count * @param cnonce Client generated nonce * @param qop Quality of protection applied to the message * @param realmName Realm name * @param md5a2 Second MD5 digest used to calculate the digest * MD5(Method + ":" + uri) * @param authMethod The authentication method in use */ public JAASCallbackHandler(JAASRealm realm, String username, String password, String nonce, String nc, String cnonce, String qop, String realmName, String md5a2, String authMethod) { this(realm, username, password); this.nonce = nonce; this.nc = nc; this.cnonce = cnonce; this.qop = qop; this.realmName = realmName; this.md5a2 = md5a2; this.authMethod = authMethod; } // ----------------------------------------------------- Instance Variables /** * The string manager for this package. */ protected static final StringManager sm = StringManager.getManager(Constants.Package); /** * The password to be authenticated with. */ protected String password = null; /** * The associated <code>JAASRealm</code> instance. */ protected JAASRealm realm = null; /** * The username to be authenticated with. */ protected String username = null; /** * Server generated nonce. */ protected String nonce = null; /** * Nonce count. */ protected String nc = null; /** * Client generated nonce. */ protected String cnonce = null; /** * Quality of protection applied to the message. */ protected String qop; /** * Realm name. */ protected String realmName; /** * Second MD5 digest. */ protected String md5a2; /** * The authentication method to be used. If null, assume BASIC/FORM. */ protected String authMethod; // --------------------------------------------------------- Public Methods /** * Retrieve the information requested in the provided <code>Callbacks</code>. * This implementation only recognizes {@link NameCallback}, * {@link PasswordCallback} and {@link TextInputCallback}. * {@link TextInputCallback} is used to pass the various additional * parameters required for DIGEST authentication. * * @param callbacks The set of <code>Callback</code>s to be processed * * @exception IOException if an input/output error occurs * @exception UnsupportedCallbackException if the login method requests * an unsupported callback type */ public void handle(Callback callbacks[]) throws IOException, UnsupportedCallbackException { for (int i = 0; i < callbacks.length; i++) { if (callbacks[i] instanceof NameCallback) { if (realm.getContainer().getLogger().isTraceEnabled()) realm.getContainer().getLogger().trace(sm.getString("jaasCallback.username", username)); ((NameCallback) callbacks[i]).setName(username); } else if (callbacks[i] instanceof PasswordCallback) { final char[] passwordcontents; if (password != null) { passwordcontents = password.toCharArray(); } else { passwordcontents = new char[0]; } ((PasswordCallback) callbacks[i]).setPassword (passwordcontents); } else if (callbacks[i] instanceof TextInputCallback) { TextInputCallback cb = ((TextInputCallback) callbacks[i]); if (cb.getPrompt().equals("nonce")) { cb.setText(nonce); } else if (cb.getPrompt().equals("nc")) { cb.setText(nc); } else if (cb.getPrompt().equals("cnonce")) { cb.setText(cnonce); } else if (cb.getPrompt().equals("qop")) { cb.setText(qop); } else if (cb.getPrompt().equals("realmName")) { cb.setText(realmName); } else if (cb.getPrompt().equals("md5a2")) { cb.setText(md5a2); } else if (cb.getPrompt().equals("authMethod")) { cb.setText(authMethod); } else { throw new UnsupportedCallbackException(callbacks[i]); } } else { throw new UnsupportedCallbackException(callbacks[i]); } } } }
/* * Copyright 2000-2016 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.client.ui; import java.util.HashMap; import java.util.Map; import java.util.logging.Logger; import com.google.gwt.dom.client.Element; import com.google.gwt.user.client.DOM; import com.google.gwt.user.client.Event; import com.google.gwt.user.client.ui.HTML; import com.vaadin.client.ApplicationConnection; import com.vaadin.client.BrowserInfo; import com.vaadin.client.ComponentConnector; import com.vaadin.client.ConnectorMap; import com.vaadin.client.UIDL; import com.vaadin.client.Util; import com.vaadin.client.WidgetUtil; import com.vaadin.shared.ui.embedded.EmbeddedState; public class VEmbedded extends HTML { public static String CLASSNAME = "v-embedded"; /** For internal use only. May be removed or replaced in the future. */ public Element browserElement; /** For internal use only. May be removed or replaced in the future. */ public String type; /** For internal use only. May be removed or replaced in the future. */ public String mimetype; /** For internal use only. May be removed or replaced in the future. */ public ApplicationConnection client; public VEmbedded() { setStyleName(CLASSNAME); } /** * Creates the Object and Embed tags for the Flash plugin so it works * cross-browser. * <p> * For internal use only. May be removed or replaced in the future. * * @param state * The EmbeddedState * @param src * The src attribute * @return Tags concatenated into a string * @since 8.2 */ public String createFlashEmbed(EmbeddedState state, String src) { /* * To ensure cross-browser compatibility we are using the twice-cooked * method to embed flash i.e. we add a OBJECT tag for IE ActiveX and * inside it a EMBED for all other browsers. */ StringBuilder html = new StringBuilder(); // Start the object tag html.append("<object "); /* * Add classid required for ActiveX to recognize the flash. This is a * predefined value which ActiveX recognizes and must be the given * value. More info can be found on * http://kb2.adobe.com/cps/415/tn_4150.html. Allow user to override * this by setting his own classid. */ if (state.classId != null) { html.append("classid=\"" + WidgetUtil.escapeAttribute(state.classId) + "\" "); } else { html.append( "classid=\"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000\" "); } /* * Add codebase required for ActiveX and must be exactly this according * to http://kb2.adobe.com/cps/415/tn_4150.html to work with the above * given classid. Again, see more info on * http://kb2.adobe.com/cps/415/tn_4150.html. Limiting Flash version to * 6.0.0.0 and above. Allow user to override this by setting his own * codebase */ if (state.codebase != null) { html.append("codebase=\"" + WidgetUtil.escapeAttribute(state.codebase) + "\" "); } else { html.append( "codebase=\"http://download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=6,0,0,0\" "); } ComponentConnector paintable = ConnectorMap.get(client) .getConnector(this); String height = paintable.getState().height; String width = paintable.getState().width; // Add width and height html.append("width=\"" + WidgetUtil.escapeAttribute(width) + "\" "); html.append("height=\"" + WidgetUtil.escapeAttribute(height) + "\" "); html.append("type=\"application/x-shockwave-flash\" "); // Codetype if (state.codetype != null) { html.append("codetype=\"" + WidgetUtil.escapeAttribute(state.codetype) + "\" "); } // Standby if (state.standby != null) { html.append("standby=\"" + WidgetUtil.escapeAttribute(state.standby) + "\" "); } // Archive if (state.archive != null) { html.append("archive=\"" + WidgetUtil.escapeAttribute(state.archive) + "\" "); } // End object tag html.append('>'); // Ensure we have an movie parameter Map<String, String> parameters = state.parameters; if (parameters.get("movie") == null) { parameters.put("movie", getSrc(src, client)); } // Add parameters to OBJECT for (String name : parameters.keySet()) { html.append("<param "); html.append("name=\"" + WidgetUtil.escapeAttribute(name) + "\" "); html.append("value=\"" + WidgetUtil.escapeAttribute(parameters.get(name)) + "\" "); html.append("/>"); } // Build inner EMBED tag html.append("<embed "); html.append("src=\"" + WidgetUtil.escapeAttribute(getSrc(src, client)) + "\" "); html.append("width=\"" + WidgetUtil.escapeAttribute(width) + "\" "); html.append("height=\"" + WidgetUtil.escapeAttribute(height) + "\" "); html.append("type=\"application/x-shockwave-flash\" "); // Add the parameters to the Embed for (String name : parameters.keySet()) { html.append(WidgetUtil.escapeAttribute(name)); html.append('='); html.append("\"" + WidgetUtil.escapeAttribute(parameters.get(name)) + "\""); } // End embed tag html.append("></embed>"); if (state.altText != null) { html.append(state.altText); } // End object tag html.append("</object>"); return html.toString(); } /** * Returns a map (name -> value) of all parameters in the UIDL. * <p> * For internal use only. May be removed or replaced in the future. * * @param uidl * @return */ public static Map<String, String> getParameters(UIDL uidl) { Map<String, String> parameters = new HashMap<>(); for (Object child : uidl) { if (child instanceof UIDL) { UIDL childUIDL = (UIDL) child; if (childUIDL.getTag().equals("embeddedparam")) { String name = childUIDL.getStringAttribute("name"); String value = childUIDL.getStringAttribute("value"); parameters.put(name, value); } } } return parameters; } /** * Helper to return translated src-attribute from embedded's UIDL * <p> * For internal use only. May be removed or replaced in the future. * * @param src * the src attribute * @param client * @return */ public String getSrc(String src, ApplicationConnection client) { String url = client.translateVaadinUri(src); if (url == null) { return ""; } return url; } @Override protected void onDetach() { if (BrowserInfo.get().isIE()) { // Force browser to fire unload event when component is detached // from the view (IE doesn't do this automatically) if (browserElement != null) { /* * src was previously set to javascript:false, but this was not * enough to overcome a bug when detaching an iframe with a pdf * loaded in IE9. about:blank seems to cause the adobe reader * plugin to unload properly before the iframe is removed. See * #7855 */ DOM.setElementAttribute(browserElement, "src", "about:blank"); } } super.onDetach(); } @Override public void onBrowserEvent(Event event) { super.onBrowserEvent(event); if (DOM.eventGetType(event) == Event.ONLOAD) { getLogger().info("Embeddable onload"); Util.notifyParentOfSizeChange(this, true); } } private static Logger getLogger() { return Logger.getLogger(VEmbedded.class.getName()); } }
package org.topbraid.spin.model.impl; import java.util.Collections; import java.util.LinkedList; import java.util.List; import org.apache.jena.enhanced.EnhGraph; import org.apache.jena.graph.Node; import org.apache.jena.rdf.model.Literal; import org.apache.jena.rdf.model.Property; import org.apache.jena.rdf.model.RDFNode; import org.apache.jena.rdf.model.Resource; import org.apache.jena.rdf.model.Statement; import org.apache.jena.rdf.model.StmtIterator; import org.apache.jena.vocabulary.RDF; import org.rspspin.core.Formatter; import org.rspspin.vocabulary.RSPSPIN; import org.topbraid.spin.arq.ARQ2SPIN; import org.topbraid.spin.model.Element; import org.topbraid.spin.model.ElementList; import org.topbraid.spin.model.SPINFactory; import org.topbraid.spin.model.SolutionModifierQuery; import org.topbraid.spin.model.Values; import org.topbraid.spin.model.print.PrintContext; import org.topbraid.spin.model.print.Printable; import org.topbraid.spin.util.JenaUtil; import org.topbraid.spin.vocabulary.SP; public abstract class QueryImpl extends AbstractSPINResourceImpl implements SolutionModifierQuery { public QueryImpl(Node node, EnhGraph graph) { super(node, graph); } public List<String> getFrom() { return getStringList(SP.from); } public List<String> getFromNamed() { return getStringList(SP.fromNamed); } /** * Get logical windows as strings for printing. * * @param predicate * @return */ private List<String> getLogicalWindows() { List<String> results = new LinkedList<String>(); StmtIterator it = listProperties(RSPSPIN.fromNamedWindow); while (it.hasNext()) { Statement stmt = it.nextStatement(); Resource root = stmt.getObject().asResource(); // Check if type is logical window if (!root.hasProperty(RDF.type, RSPSPIN.LogicalWindow)) continue; RDFNode windowNameNode = root.listProperties(RSPSPIN.windowUri).next().getObject(); RDFNode streamNameNode = root.listProperties(RSPSPIN.streamUri).next().getObject(); RDFNode range = getModel().listObjectsOfProperty(root, RSPSPIN.logicalRange).next(); // If step is defined if (root.hasProperty(RSPSPIN.logicalStep)) { RDFNode step = root.listProperties(RSPSPIN.logicalStep).next().getObject(); String window = String.format("FROM NAMED WINDOW %s ON %s [RANGE %s STEP %s]", Formatter.varOrUriAsString(windowNameNode), Formatter.varOrUriAsString(streamNameNode), Formatter.varOrLiteralAsString(range), Formatter.varOrLiteralAsString(step)); results.add(window); } else { String window = String.format("FROM NAMED WINDOW %s ON %s [RANGE %s]", Formatter.varOrUriAsString(windowNameNode), Formatter.varOrUriAsString(streamNameNode), Formatter.varOrLiteralAsString(range)); results.add(window); } } Collections.sort(results); return results; } /** * Get logical past windows as strings for printing. * * @param predicate * @return */ private List<String> getLogicalPastWindows() { List<String> results = new LinkedList<String>(); StmtIterator it = listProperties(RSPSPIN.fromNamedWindow); while (it.hasNext()) { Statement stmt = it.nextStatement(); Resource root = stmt.getObject().asResource(); // Check if type is logical past window if (!root.hasProperty(RDF.type, RSPSPIN.LogicalPastWindow)) continue; RDFNode windowNameNode = root.listProperties(RSPSPIN.windowUri).next().getObject(); RDFNode streamNameNode = root.listProperties(RSPSPIN.streamUri).next().getObject(); RDFNode from = root.listProperties(RSPSPIN.from).next().getObject(); RDFNode to = root.listProperties(RSPSPIN.to).next().getObject(); // If step is defined if (root.hasProperty(RSPSPIN.logicalStep)) { RDFNode step = root.listProperties(RSPSPIN.logicalStep).next().getObject(); String window = String.format("FROM NAMED WINDOW %s ON %s [FROM NOW-%s TO NOW-%s STEP %s]", Formatter.varOrUriAsString(windowNameNode), Formatter.varOrUriAsString(streamNameNode), Formatter.varOrLiteralAsString(from), Formatter.varOrLiteralAsString(to), Formatter.varOrLiteralAsString(step)); results.add(window); } else { String window = String.format("FROM NAMED WINDOW %s ON %s [FROM NOW-%s TO NOW-%s]", Formatter.varOrUriAsString(windowNameNode), Formatter.varOrUriAsString(streamNameNode), Formatter.varOrLiteralAsString(from), Formatter.varOrLiteralAsString(to)); results.add(window); } } Collections.sort(results); return results; } /** * Get physical windows as strings for printing. * * @param predicate * @return */ private List<String> getPhysicalWindows() { List<String> results = new LinkedList<String>(); StmtIterator it = listProperties(RSPSPIN.fromNamedWindow); while (it.hasNext()) { Statement stmt = it.nextStatement(); Resource root = stmt.getObject().asResource(); // Check if type is logical window if (!root.hasProperty(RDF.type, RSPSPIN.PhysicalWindow)) continue; RDFNode windowNameNode = root.listProperties(RSPSPIN.windowUri).next().getObject(); RDFNode streamNameNode = root.listProperties(RSPSPIN.streamUri).next().getObject(); RDFNode range = getModel().listObjectsOfProperty(root, RSPSPIN.physicalRange).next(); // If step is defined if (root.hasProperty(RSPSPIN.physicalStep)) { RDFNode step = root.listProperties(RSPSPIN.physicalStep).next().getObject(); String window = String.format("FROM NAMED WINDOW %s ON %s [ITEM %s STEP %s]", Formatter.varOrUriAsString(windowNameNode), Formatter.varOrUriAsString(streamNameNode), Formatter.varOrLiteralAsString(range), Formatter.varOrLiteralAsString(step)); results.add(window); } else { String window = String.format("FROM NAMED WINDOW %s ON %s [ITEM %s]", Formatter.varOrUriAsString(windowNameNode), Formatter.varOrUriAsString(streamNameNode), Formatter.varOrLiteralAsString(range)); results.add(window); } } Collections.sort(results); return results; } public Long getLimit() { return getLong(SP.limit); } public Long getOffset() { return getLong(SP.offset); } private List<String> getStringList(Property predicate) { List<String> results = new LinkedList<String>(); StmtIterator it = listProperties(predicate); while (it.hasNext()) { RDFNode node = it.nextStatement().getObject(); if (node.isLiteral()) { results.add(((Literal) node).getLexicalForm()); } else if (node.isURIResource()) { results.add(((Resource) node).getURI()); } } return results; } @Override public Values getValues() { Resource values = JenaUtil.getResourceProperty(this, SP.values); if (values != null) { return values.as(Values.class); } else { return null; } } public ElementList getWhere() { Statement whereS = getProperty(SP.where); if (whereS != null) { Element element = SPINFactory.asElement(whereS.getResource()); return (ElementList) element; } else { return null; } } public List<Element> getWhereElements() { return getElements(SP.where); } @Override public void print(PrintContext p) { String text = ARQ2SPIN.getTextOnly(this); if (text != null) { if (p.hasInitialBindings()) { throw new IllegalArgumentException( "Queries that only have an sp:text cannot be converted to a query string if initial bindings are present."); } else { p.print(text); } } else { printSPINRDF(p); } } protected abstract void printSPINRDF(PrintContext p); protected void printStringFrom(PrintContext context) { for (String from : getFrom()) { context.println(); context.printKeyword("FROM"); context.print(" <"); context.print(from); context.print(">"); } for (String fromNamed : getFromNamed()) { context.println(); context.printKeyword("FROM NAMED"); context.print(" <"); context.print(fromNamed); context.print(">"); } for (String window : getLogicalWindows()) { context.println(); context.print(window); } for (String window : getLogicalPastWindows()) { context.println(); context.print(window); } for (String window : getPhysicalWindows()) { context.println(); context.print(window); } } protected void printSolutionModifiers(PrintContext context) { List<RDFNode> orderBy = getList(SP.orderBy); if (!orderBy.isEmpty()) { context.println(); context.printIndentation(context.getIndentation()); context.printKeyword("ORDER BY"); for (RDFNode node : orderBy) { if (node.isResource()) { Resource resource = (Resource) node; if (resource.hasProperty(RDF.type, SP.Asc)) { context.print(" "); context.printKeyword("ASC"); context.print(" "); RDFNode expression = resource.getProperty(SP.expression).getObject(); printOrderByExpression(context, expression); } else if (resource.hasProperty(RDF.type, SP.Desc)) { context.print(" "); context.printKeyword("DESC"); context.print(" "); RDFNode expression = resource.getProperty(SP.expression).getObject(); printOrderByExpression(context, expression); } else { context.print(" "); printOrderByExpression(context, node); } } } } Long limit = getLimit(); if (limit != null) { context.println(); context.printIndentation(context.getIndentation()); context.printKeyword("LIMIT"); context.print(" " + limit); } Long offset = getOffset(); if (offset != null) { context.println(); context.printIndentation(context.getIndentation()); context.print("OFFSET"); context.print(" " + offset); } } private void printOrderByExpression(PrintContext sb, RDFNode node) { if (node instanceof Resource) { Resource resource = (Resource) node; Printable printable = SPINFactory.asAggregation(resource); if (printable == null) { printable = SPINFactory.asFunctionCall(resource); } if (printable != null) { sb.print("("); PrintContext pc = sb.clone(); pc.setNested(true); printable.print(pc); sb.print(")"); return; } } printNestedExpressionString(sb, node, true); } protected void printValues(PrintContext p) { Values values = getValues(); if (values != null) { p.println(); values.print(p); } } protected void printWhere(PrintContext p) { p.printIndentation(p.getIndentation()); p.printKeyword("WHERE"); printNestedElementList(p, SP.where); } protected void printOutputStream(PrintContext p) { Statement stmt = getProperty(RSPSPIN.hasOutputStream); if(stmt == null) return; RDFNode node = stmt.getObject(); p.print(String.format("REGISTER STREAM %s AS", Formatter.varOrUriAsString(node))); p.println(); } protected void printOutputStreamOperator(PrintContext p) { Statement stmt = getProperty(RSPSPIN.hasOutputStreamOperator); if(stmt == null) return; RDFNode node = stmt.getObject(); if(node.equals(RSPSPIN.Istream)) p.print("ISTREAM "); if(node.equals(RSPSPIN.Dstream)) p.print("DSTREAM "); if(node.equals(RSPSPIN.Rstream)) p.print("RSTREAM "); } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. /** * DescribeVolumesSetItemResponseType.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT) */ package com.amazon.ec2; /** * DescribeVolumesSetItemResponseType bean class */ public class DescribeVolumesSetItemResponseType implements org.apache.axis2.databinding.ADBBean{ /* This type was generated from the piece of schema that had name = DescribeVolumesSetItemResponseType Namespace URI = http://ec2.amazonaws.com/doc/2009-10-31/ Namespace Prefix = ns1 */ private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://ec2.amazonaws.com/doc/2009-10-31/")){ return "ns1"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * field for VolumeId */ protected java.lang.String localVolumeId ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getVolumeId(){ return localVolumeId; } /** * Auto generated setter method * @param param VolumeId */ public void setVolumeId(java.lang.String param){ this.localVolumeId=param; } /** * field for Size */ protected java.lang.String localSize ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getSize(){ return localSize; } /** * Auto generated setter method * @param param Size */ public void setSize(java.lang.String param){ this.localSize=param; } /** * field for SnapshotId */ protected java.lang.String localSnapshotId ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getSnapshotId(){ return localSnapshotId; } /** * Auto generated setter method * @param param SnapshotId */ public void setSnapshotId(java.lang.String param){ this.localSnapshotId=param; } /** * field for AvailabilityZone */ protected java.lang.String localAvailabilityZone ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getAvailabilityZone(){ return localAvailabilityZone; } /** * Auto generated setter method * @param param AvailabilityZone */ public void setAvailabilityZone(java.lang.String param){ this.localAvailabilityZone=param; } /** * field for Status */ protected java.lang.String localStatus ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getStatus(){ return localStatus; } /** * Auto generated setter method * @param param Status */ public void setStatus(java.lang.String param){ this.localStatus=param; } /** * field for CreateTime */ protected java.util.Calendar localCreateTime ; /** * Auto generated getter method * @return java.util.Calendar */ public java.util.Calendar getCreateTime(){ return localCreateTime; } /** * Auto generated setter method * @param param CreateTime */ public void setCreateTime(java.util.Calendar param){ this.localCreateTime=param; } /** * field for AttachmentSet */ protected com.amazon.ec2.AttachmentSetResponseType localAttachmentSet ; /** * Auto generated getter method * @return com.amazon.ec2.AttachmentSetResponseType */ public com.amazon.ec2.AttachmentSetResponseType getAttachmentSet(){ return localAttachmentSet; } /** * Auto generated setter method * @param param AttachmentSet */ public void setAttachmentSet(com.amazon.ec2.AttachmentSetResponseType param){ this.localAttachmentSet=param; } /** * field for TagSet */ protected com.amazon.ec2.ResourceTagSetType localTagSet ; /* This tracker boolean wil be used to detect whether the user called the set method * for this attribute. It will be used to determine whether to include this field * in the serialized XML */ protected boolean localTagSetTracker = false ; /** * Auto generated getter method * @return com.amazon.ec2.ResourceTagSetType */ public com.amazon.ec2.ResourceTagSetType getTagSet(){ return localTagSet; } /** * Auto generated setter method * @param param TagSet */ public void setTagSet(com.amazon.ec2.ResourceTagSetType param){ if (param != null){ //update the setting tracker localTagSetTracker = true; } else { localTagSetTracker = false; } this.localTagSet=param; } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,parentQName){ public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { DescribeVolumesSetItemResponseType.this.serialize(parentQName,factory,xmlWriter); } }; return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl( parentQName,factory,dataSource); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,factory,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ java.lang.String prefix = null; java.lang.String namespace = null; prefix = parentQName.getPrefix(); namespace = parentQName.getNamespaceURI(); if ((namespace != null) && (namespace.trim().length() > 0)) { java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, parentQName.getLocalPart()); } else { if (prefix == null) { prefix = generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } else { xmlWriter.writeStartElement(parentQName.getLocalPart()); } if (serializeType){ java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2009-10-31/"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){ writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", namespacePrefix+":DescribeVolumesSetItemResponseType", xmlWriter); } else { writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", "DescribeVolumesSetItemResponseType", xmlWriter); } } namespace = "http://ec2.amazonaws.com/doc/2009-10-31/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"volumeId", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"volumeId"); } } else { xmlWriter.writeStartElement("volumeId"); } if (localVolumeId==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("volumeId cannot be null!!"); }else{ xmlWriter.writeCharacters(localVolumeId); } xmlWriter.writeEndElement(); namespace = "http://ec2.amazonaws.com/doc/2009-10-31/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"size", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"size"); } } else { xmlWriter.writeStartElement("size"); } if (localSize==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("size cannot be null!!"); }else{ xmlWriter.writeCharacters(localSize); } xmlWriter.writeEndElement(); namespace = "http://ec2.amazonaws.com/doc/2009-10-31/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"snapshotId", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"snapshotId"); } } else { xmlWriter.writeStartElement("snapshotId"); } if (localSnapshotId==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("snapshotId cannot be null!!"); }else{ xmlWriter.writeCharacters(localSnapshotId); } xmlWriter.writeEndElement(); namespace = "http://ec2.amazonaws.com/doc/2009-10-31/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"availabilityZone", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"availabilityZone"); } } else { xmlWriter.writeStartElement("availabilityZone"); } if (localAvailabilityZone==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("availabilityZone cannot be null!!"); }else{ xmlWriter.writeCharacters(localAvailabilityZone); } xmlWriter.writeEndElement(); namespace = "http://ec2.amazonaws.com/doc/2009-10-31/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"status", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"status"); } } else { xmlWriter.writeStartElement("status"); } if (localStatus==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("status cannot be null!!"); }else{ xmlWriter.writeCharacters(localStatus); } xmlWriter.writeEndElement(); namespace = "http://ec2.amazonaws.com/doc/2009-10-31/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"createTime", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"createTime"); } } else { xmlWriter.writeStartElement("createTime"); } if (localCreateTime==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("createTime cannot be null!!"); }else{ xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localCreateTime)); } xmlWriter.writeEndElement(); if (localAttachmentSet==null){ throw new org.apache.axis2.databinding.ADBException("attachmentSet cannot be null!!"); } localAttachmentSet.serialize(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","attachmentSet"), factory,xmlWriter); if (localTagSetTracker){ if (localTagSet==null){ throw new org.apache.axis2.databinding.ADBException("tagSet cannot be null!!"); } localTagSet.serialize(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","tagSet"), factory,xmlWriter); } xmlWriter.writeEndElement(); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ java.util.ArrayList elementList = new java.util.ArrayList(); java.util.ArrayList attribList = new java.util.ArrayList(); elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "volumeId")); if (localVolumeId != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localVolumeId)); } else { throw new org.apache.axis2.databinding.ADBException("volumeId cannot be null!!"); } elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "size")); if (localSize != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localSize)); } else { throw new org.apache.axis2.databinding.ADBException("size cannot be null!!"); } elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "snapshotId")); if (localSnapshotId != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localSnapshotId)); } else { throw new org.apache.axis2.databinding.ADBException("snapshotId cannot be null!!"); } elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "availabilityZone")); if (localAvailabilityZone != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localAvailabilityZone)); } else { throw new org.apache.axis2.databinding.ADBException("availabilityZone cannot be null!!"); } elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "status")); if (localStatus != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localStatus)); } else { throw new org.apache.axis2.databinding.ADBException("status cannot be null!!"); } elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "createTime")); if (localCreateTime != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localCreateTime)); } else { throw new org.apache.axis2.databinding.ADBException("createTime cannot be null!!"); } elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "attachmentSet")); if (localAttachmentSet==null){ throw new org.apache.axis2.databinding.ADBException("attachmentSet cannot be null!!"); } elementList.add(localAttachmentSet); if (localTagSetTracker){ elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/", "tagSet")); if (localTagSet==null){ throw new org.apache.axis2.databinding.ADBException("tagSet cannot be null!!"); } elementList.add(localTagSet); } return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray()); } /** * Factory class that keeps the parse method */ public static class Factory{ /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static DescribeVolumesSetItemResponseType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ DescribeVolumesSetItemResponseType object = new DescribeVolumesSetItemResponseType(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){ java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName!=null){ java.lang.String nsPrefix = null; if (fullTypeName.indexOf(":") > -1){ nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":")); } nsPrefix = nsPrefix==null?"":nsPrefix; java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1); if (!"DescribeVolumesSetItemResponseType".equals(type)){ //find namespace for the prefix java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (DescribeVolumesSetItemResponseType)com.amazon.ec2.ExtensionMapper.getTypeObject( nsUri,type,reader); } } } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); reader.next(); while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","volumeId").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setVolumeId( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","size").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setSize( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","snapshotId").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setSnapshotId( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","availabilityZone").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setAvailabilityZone( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","status").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setStatus( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","createTime").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setCreateTime( org.apache.axis2.databinding.utils.ConverterUtil.convertToDateTime(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","attachmentSet").equals(reader.getName())){ object.setAttachmentSet(com.amazon.ec2.AttachmentSetResponseType.Factory.parse(reader)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2009-10-31/","tagSet").equals(reader.getName())){ object.setTagSet(com.amazon.ec2.ResourceTagSetType.Factory.parse(reader)); reader.next(); } // End of if for expected property start element else { } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement()) // A start element we are not expecting indicates a trailing invalid property throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
/* * Copyright (C) 2017 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.loader.csv; import static java.util.stream.Collectors.toList; import java.time.LocalDate; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.TreeMap; import java.util.function.Predicate; import com.google.common.collect.BiMap; import com.google.common.collect.ImmutableBiMap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ListMultimap; import com.google.common.collect.Multimap; import com.google.common.io.CharSource; import com.opengamma.strata.basics.currency.Currency; import com.opengamma.strata.basics.date.DayCount; import com.opengamma.strata.collect.MapStream; import com.opengamma.strata.collect.Messages; import com.opengamma.strata.collect.io.CsvFile; import com.opengamma.strata.collect.io.CsvRow; import com.opengamma.strata.collect.io.ResourceLocator; import com.opengamma.strata.collect.tuple.Pair; import com.opengamma.strata.loader.LoaderUtils; import com.opengamma.strata.market.ValueType; import com.opengamma.strata.market.curve.Curve; import com.opengamma.strata.market.curve.CurveGroupName; import com.opengamma.strata.market.curve.CurveName; import com.opengamma.strata.market.curve.LegalEntityCurveGroup; import com.opengamma.strata.market.curve.LegalEntityGroup; import com.opengamma.strata.market.curve.RepoGroup; import com.opengamma.strata.market.curve.interpolator.CurveExtrapolator; import com.opengamma.strata.market.curve.interpolator.CurveInterpolator; /** * Loads a set of legal entity rates curves into memory by reading from CSV resources. * <p> * There are three type of CSV files. * <p> * The first file is the legal entity curve group metadata file. * This file has the following header row:<br /> * {@code Group Name, Curve Type, Reference, Currency, Curve Name}. * <ul> * <li>The 'Group Name' column is the name of the group of curves. * <li>The 'Curve Type' column is the type of the curve, "repo" or "issuer". * <li>The 'Reference' column is the reference group for which the curve is used, legal entity group or repo group. * <li>The 'Currency' column is the reference currency for which the curve is used. * <li>The 'Curve Name' column is the name of the curve. * </ul> * <p> * The second file is the curve settings metadata file. * This file has the following header row:<br /> * {@code Curve Name, Value Type, Day Count, Interpolator, Left Extrapolator, Right Extrapolator}. * <ul> * <li>The 'Curve Name' column is the name of the curve. * <li>The 'Value Type' column is the type of data in the curve, "zero" for zero rates, or "df" for discount factors. * <li>The 'Day Count' column is the name of the day count, such as "Act/365F". * <li>The 'Interpolator' column defines the interpolator to use. * <li>The 'Left Extrapolator' and 'Right Extrapolator' columns define the extrapolators to use. * </ul> * <p> * The third file is the curve values file. * This file has the following header row:<br /> * {@code Valuation Date, Curve Name, Date, Value, Label}. * <ul> * <li>The 'Valuation Date' column provides the valuation date, allowing data from different * days to be stored in the same file * <li>The 'Curve Name' column is the name of the curve. * <li>The 'Date' column is the date associated with the node. * <li>The 'Value' column is value of the curve at the date. * <li>The 'Label' column is the label used to refer to the node. * </ul> * <p> * Each curve must be contained entirely within a single file, but each file may contain more than * one curve. The curve points do not need to be ordered. * The files must contain at least one repo curve and one issuer curve. */ public class LegalEntityRatesCurvesCsvLoader { // Column headers for legal entity curve group private static final String GROUPS_NAME = "Group Name"; private static final String GROUPS_CURVE_TYPE = "Curve Type"; private static final String GROUPS_REFERENCE = "Reference"; private static final String GROUPS_CURRENCY = "Currency"; private static final String GROUPS_CURVE_NAME = "Curve Name"; // Names used in the curve type column in the legal entity curve group private static final String REPO = "repo"; private static final String ISSUER = "issuer"; // Column headers for curve setting private static final String SETTINGS_CURVE_NAME = "Curve Name"; private static final String SETTINGS_VALUE_TYPE = "Value Type"; private static final String SETTINGS_DAY_COUNT = "Day Count"; private static final String SETTINGS_INTERPOLATOR = "Interpolator"; private static final String SETTINGS_LEFT_EXTRAPOLATOR = "Left Extrapolator"; private static final String SETTINGS_RIGHT_EXTRAPOLATOR = "Right Extrapolator"; // Column headers for curve nodes private static final String CURVE_DATE = "Valuation Date"; private static final String CURVE_NAME = "Curve Name"; private static final String CURVE_POINT_DATE = "Date"; private static final String CURVE_POINT_VALUE = "Value"; private static final String CURVE_POINT_LABEL = "Label"; /** * Names used in CSV file for value types. */ private static final BiMap<String, ValueType> VALUE_TYPE_MAP = ImmutableBiMap.of( "zero", ValueType.ZERO_RATE, "df", ValueType.DISCOUNT_FACTOR); //------------------------------------------------------------------------- /** * Loads one or more CSV format curve files for a specific date. * <p> * Only those quotes that match the specified date will be loaded. * <p> * If the files contain a duplicate entry an exception will be thrown. * * @param marketDataDate the curve date to load * @param groupsResource the curve groups CSV resource * @param settingsResource the curve settings CSV resource * @param curveValueResources the CSV resources for curves * @return the loaded curves, mapped by an identifying key * @throws IllegalArgumentException if the files contain a duplicate entry */ public static ImmutableList<LegalEntityCurveGroup> load( LocalDate marketDataDate, ResourceLocator groupsResource, ResourceLocator settingsResource, Collection<ResourceLocator> curveValueResources) { Collection<CharSource> curveCharSources = curveValueResources.stream().map(r -> r.getCharSource()).collect(toList()); ListMultimap<LocalDate, LegalEntityCurveGroup> map = parse( d -> marketDataDate.equals(d), groupsResource.getCharSource(), settingsResource.getCharSource(), curveCharSources); return ImmutableList.copyOf(map.get(marketDataDate)); } /** * Loads one or more CSV format curve files for all available dates. * <p> * If the files contain a duplicate entry an exception will be thrown. * * @param groupsResource the curve groups CSV resource * @param settingsResource the curve settings CSV resource * @param curveValueResources the CSV resources for curves * @return the loaded curves, mapped by date and identifier * @throws IllegalArgumentException if the files contain a duplicate entry */ public static ImmutableListMultimap<LocalDate, LegalEntityCurveGroup> loadAllDates( ResourceLocator groupsResource, ResourceLocator settingsResource, Collection<ResourceLocator> curveValueResources) { Collection<CharSource> curveCharSources = curveValueResources.stream().map(r -> r.getCharSource()).collect(toList()); return parse(d -> true, groupsResource.getCharSource(), settingsResource.getCharSource(), curveCharSources); } /** * Parses one or more CSV format curve files for all available dates. * <p> * A predicate is specified that is used to filter the dates that are returned. * This could match a single date, a set of dates or all dates. * <p> * If the files contain a duplicate entry an exception will be thrown. * * @param datePredicate the predicate used to select the dates * @param groupsCharSource the curve groups CSV character source * @param settingsCharSource the curve settings CSV character source * @param curveValueCharSources the CSV character sources for curves * @return the loaded curves, mapped by date and identifier * @throws IllegalArgumentException if the files contain a duplicate entry */ public static ImmutableListMultimap<LocalDate, LegalEntityCurveGroup> parse( Predicate<LocalDate> datePredicate, CharSource groupsCharSource, CharSource settingsCharSource, Collection<CharSource> curveValueCharSources) { Map<CurveGroupName, Map<Pair<RepoGroup, Currency>, CurveName>> repoGroups = new LinkedHashMap<>(); Map<CurveGroupName, Map<Pair<LegalEntityGroup, Currency>, CurveName>> legalEntityGroups = new LinkedHashMap<>(); parseCurveMaps(groupsCharSource, repoGroups, legalEntityGroups); Map<LocalDate, Map<CurveName, Curve>> allCurves = parseCurves(datePredicate, settingsCharSource, curveValueCharSources); ImmutableListMultimap.Builder<LocalDate, LegalEntityCurveGroup> builder = ImmutableListMultimap.builder(); for (Map.Entry<LocalDate, Map<CurveName, Curve>> curveEntry : allCurves.entrySet()) { LocalDate date = curveEntry.getKey(); Map<CurveName, Curve> curves = curveEntry.getValue(); for (Map.Entry<CurveGroupName, Map<Pair<RepoGroup, Currency>, CurveName>> repoEntry : repoGroups.entrySet()) { CurveGroupName groupName = repoEntry.getKey(); Map<Pair<RepoGroup, Currency>, Curve> repoCurves = MapStream.of(repoEntry.getValue()) .mapValues(name -> queryCurve(name, curves, date, groupName, "Repo")) .toMap(); Map<Pair<LegalEntityGroup, Currency>, Curve> issuerCurves = MapStream.of(legalEntityGroups.get(groupName)) .mapValues(name -> queryCurve(name, curves, date, groupName, "Issuer")) .toMap(); builder.put(date, LegalEntityCurveGroup.of(groupName, repoCurves, issuerCurves)); } } return builder.build(); } //------------------------------------------------------------------------- private static Map<LocalDate, Map<CurveName, Curve>> parseCurves( Predicate<LocalDate> datePredicate, CharSource settingsResource, Collection<CharSource> curvesResources) { // load curve settings Map<CurveName, LoadedCurveSettings> settingsMap = parseCurveSettings(settingsResource); // load curves, ensuring curves only be seen once within a date Map<LocalDate, Map<CurveName, Curve>> resultMap = new TreeMap<>(); for (CharSource curvesResource : curvesResources) { Multimap<LocalDate, Curve> fileCurvesByDate = parseSingle(datePredicate, curvesResource, settingsMap); // Ensure curve names are unique, with a good error message for (LocalDate date : fileCurvesByDate.keySet()) { Collection<Curve> fileCurves = fileCurvesByDate.get(date); Map<CurveName, Curve> resultCurves = resultMap.computeIfAbsent(date, d -> new HashMap<>()); for (Curve fileCurve : fileCurves) { if (resultCurves.put(fileCurve.getName(), fileCurve) != null) { throw new IllegalArgumentException( "Rates curve loader found multiple curves with the same name: " + fileCurve.getName()); } } } } return resultMap; } private static Map<CurveName, LoadedCurveSettings> parseCurveSettings(CharSource settingsResource) { ImmutableMap.Builder<CurveName, LoadedCurveSettings> builder = ImmutableMap.builder(); CsvFile csv = CsvFile.of(settingsResource, true); for (CsvRow row : csv.rows()) { String curveNameStr = row.getField(SETTINGS_CURVE_NAME); String valueTypeStr = row.getField(SETTINGS_VALUE_TYPE); String dayCountStr = row.getField(SETTINGS_DAY_COUNT); String interpolatorStr = row.getField(SETTINGS_INTERPOLATOR); String leftExtrapolatorStr = row.getField(SETTINGS_LEFT_EXTRAPOLATOR); String rightExtrapolatorStr = row.getField(SETTINGS_RIGHT_EXTRAPOLATOR); if (!VALUE_TYPE_MAP.containsKey(valueTypeStr.toLowerCase(Locale.ENGLISH))) { throw new IllegalArgumentException( Messages.format("Unsupported {} in curve settings: {}", SETTINGS_VALUE_TYPE, valueTypeStr)); } CurveName curveName = CurveName.of(curveNameStr); ValueType valueType = VALUE_TYPE_MAP.get(valueTypeStr.toLowerCase(Locale.ENGLISH)); CurveInterpolator interpolator = CurveInterpolator.of(interpolatorStr); CurveExtrapolator leftExtrap = CurveExtrapolator.of(leftExtrapolatorStr); CurveExtrapolator rightExtrap = CurveExtrapolator.of(rightExtrapolatorStr); // ONE_ONE day count is not used DayCount dayCount = LoaderUtils.parseDayCount(dayCountStr); LoadedCurveSettings settings = LoadedCurveSettings.of( curveName, ValueType.YEAR_FRACTION, valueType, dayCount, interpolator, leftExtrap, rightExtrap); builder.put(curveName, settings); } return builder.build(); } private static Multimap<LocalDate, Curve> parseSingle( Predicate<LocalDate> datePredicate, CharSource curvesResource, Map<CurveName, LoadedCurveSettings> settingsMap) { CsvFile csv = CsvFile.of(curvesResource, true); Map<LoadedCurveKey, List<LoadedCurveNode>> allNodes = new HashMap<>(); for (CsvRow row : csv.rows()) { String dateStr = row.getField(CURVE_DATE); String curveNameStr = row.getField(CURVE_NAME); String pointDateStr = row.getField(CURVE_POINT_DATE); String pointValueStr = row.getField(CURVE_POINT_VALUE); String pointLabel = row.getField(CURVE_POINT_LABEL); LocalDate date = LoaderUtils.parseDate(dateStr); if (datePredicate.test(date)) { LocalDate pointDate = LoaderUtils.parseDate(pointDateStr); double pointValue = Double.valueOf(pointValueStr); LoadedCurveKey key = LoadedCurveKey.of(date, CurveName.of(curveNameStr)); List<LoadedCurveNode> curveNodes = allNodes.computeIfAbsent(key, k -> new ArrayList<>()); curveNodes.add(LoadedCurveNode.of(pointDate, pointValue, pointLabel)); } } return buildCurves(settingsMap, allNodes); } private static Multimap<LocalDate, Curve> buildCurves( Map<CurveName, LoadedCurveSettings> settingsMap, Map<LoadedCurveKey, List<LoadedCurveNode>> allNodes) { ImmutableMultimap.Builder<LocalDate, Curve> results = ImmutableMultimap.builder(); for (Map.Entry<LoadedCurveKey, List<LoadedCurveNode>> entry : allNodes.entrySet()) { LoadedCurveKey key = entry.getKey(); LoadedCurveSettings settings = settingsMap.get(key.getCurveName()); if (settings == null) { throw new IllegalArgumentException(Messages.format("Missing settings for curve: {}", key)); } results.put(key.getCurveDate(), settings.createCurve(key.getCurveDate(), entry.getValue())); } return results.build(); } //------------------------------------------------------------------------- private static void parseCurveMaps( CharSource groupsCharSource, Map<CurveGroupName, Map<Pair<RepoGroup, Currency>, CurveName>> repoGroups, Map<CurveGroupName, Map<Pair<LegalEntityGroup, Currency>, CurveName>> legalEntityGroups) { CsvFile csv = CsvFile.of(groupsCharSource, true); for (CsvRow row : csv.rows()) { String curveGroupStr = row.getField(GROUPS_NAME); String curveTypeStr = row.getField(GROUPS_CURVE_TYPE); String referenceStr = row.getField(GROUPS_REFERENCE); String currencyStr = row.getField(GROUPS_CURRENCY); String curveNameStr = row.getField(GROUPS_CURVE_NAME); CurveName curveName = CurveName.of(curveNameStr); createKey( curveName, CurveGroupName.of(curveGroupStr), curveTypeStr, referenceStr, currencyStr, repoGroups, legalEntityGroups); } } private static void createKey( CurveName curveName, CurveGroupName curveGroup, String curveTypeStr, String referenceStr, String currencyStr, Map<CurveGroupName, Map<Pair<RepoGroup, Currency>, CurveName>> repoGroups, Map<CurveGroupName, Map<Pair<LegalEntityGroup, Currency>, CurveName>> legalEntityGroups) { Currency currency = Currency.of(currencyStr); if (REPO.equalsIgnoreCase(curveTypeStr.toLowerCase(Locale.ENGLISH))) { RepoGroup repoGroup = RepoGroup.of(referenceStr); repoGroups.computeIfAbsent(curveGroup, k -> new LinkedHashMap<>()).put(Pair.of(repoGroup, currency), curveName); } else if (ISSUER.equalsIgnoreCase(curveTypeStr.toLowerCase(Locale.ENGLISH))) { LegalEntityGroup legalEntiryGroup = LegalEntityGroup.of(referenceStr); legalEntityGroups.computeIfAbsent( curveGroup, k -> new LinkedHashMap<>()).put(Pair.of(legalEntiryGroup, currency), curveName); } else { throw new IllegalArgumentException(Messages.format("Unsupported curve type: {}", curveTypeStr)); } } //------------------------------------------------------------------------- private static Curve queryCurve( CurveName name, Map<CurveName, Curve> curves, LocalDate date, CurveGroupName groupName, String curveType) { Curve curve = curves.get(name); if (curve == null) { throw new IllegalArgumentException( curveType + " curve values for " + name.toString() + " in group " + groupName.getName() + " are missing on " + date.toString()); } return curve; } //------------------------------------------------------------------------- // restricted constructor private LegalEntityRatesCurvesCsvLoader() { } }
/* * File: MySet.java * Author: Haofei Yan * Date: March.7 2016 */ package set_competiton.hy222ap2; import java.util.Arrays; import java.util.BitSet; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import set_competiton.math_set.MathSet; /** * A MathSet implementation based on HashSet. * * @author Haofei Yan * */ public class MySet implements MathSet { private BitSet values=new BitSet(); //private ArrayList<Object> arraylist = new ArrayList<Object>(); public MySet() { values = new BitSet(); } public MySet(Collection<Object> col) { // Remove duplicates // HashSet<Object> tmp = new HashSet<Object>(col); // // values = new HashSet<Object>(); values.addAll(col); } public MySet(Object ... elements) { // Remove duplicates List<Object> list = Arrays.asList(elements); // HashSet<Object> tmp = new HashSet<Object>(list); // // values = new HashSet<Object>(); values.addAll(list); } @Override public String toString() { // StringBuilder buf = new StringBuilder(); // buf.append("[ "); // // values.toString(); // // // for (Object o: values) { // buf.append(o.toString()+" "); // } // buf.append("]"); return values.toString(); } @Override public int hashCode() { // int hash = 0; // // for (Object obj : values) // hash += obj.hashCode(); return values.hashCode(); } @Override public int size() { return values.size(); } @Override public boolean isEmpty() { return size() == 0; } @Override public boolean equals(Object obj) { if (obj instanceof MySet) { MySet ms = (MySet) obj; if (values.size() != ms.values.size()) return false; else { for (Object o : ms.values) { if (!values.contains(o)) return false; } return true; } } return false; } @Override public boolean contains(Object element) { return values.contains(element); } @Override public Iterator<Object> iterator() { return values.iterator(); // arraylist.addAll(values); // // return arraylist.iterator(); } // class setIterator<Object> implements Iterator<Object>{ // // ArrayList<Object> tmp = new ArrayList<Object>(); // // int size=tmp.size(); // int count=0; // // @Override // public boolean hasNext() { // // return count<size; // } // // @Override // public Object next() { // // return tmp[count++]; // } // // } // @Override public MathSet intersection(MathSet ms) { MySet other = (MySet) ms; HashSet<Object> result = new HashSet<Object>(); HashSet<Object> small = new HashSet<Object>(); HashSet<Object> large = new HashSet<Object>(); if(values.size()>=other.values.size()) { large=values; small=other.values; } else { large=other.values; small=values; } for(Object o: small){ if(large.contains(o)) { result.add(o); } } return new MySet(result); } @Override public MathSet union(MathSet ms) { MySet other = (MySet) ms; HashSet<Object> result = new HashSet<Object>(values); result.addAll(other.values); // HashSet<Object> result = new HashSet<Object>(other.values); // Map<Object,Object> map= new HashMap<Object,Object>(); // // int i=0; // for(Object o: other.values) // { // map.put(o, i++); // } // // for(Object b: values) // { // Object o=map.get(b); // if(o==null) // { // result.add(b); // } // } return new MySet(result); } @Override public MathSet difference(MathSet ms) { MySet other = (MySet) ms; HashSet<Object> result = new HashSet<Object>(); for(Object o: values){ if(!other.values.contains(o)) { result.add(o); } } return new MySet(result); } @Override public MathSet copy() { // HashSet<Object> result = new HashSet<Object>(); // result.addAll(values); return new MySet(values); } /* * Private Help methods */ private MySet(HashSet<Object> list) { values = list; } }
package com.bearsoft.gwt.ui.containers; import com.bearsoft.gwt.ui.HasImageResource; import com.bearsoft.gwt.ui.XElement; import com.bearsoft.gwt.ui.menu.MenuItemImageText; import com.bearsoft.gwt.ui.widgets.ImageLabel; import com.google.gwt.core.client.GWT; import com.google.gwt.core.client.Scheduler; import com.google.gwt.core.client.Scheduler.ScheduledCommand; import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.Style; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.event.logical.shared.HasSelectionHandlers; import com.google.gwt.event.logical.shared.SelectionEvent; import com.google.gwt.event.logical.shared.SelectionHandler; import com.google.gwt.event.shared.HandlerRegistration; import com.google.gwt.resources.client.ImageResource; import com.google.gwt.safehtml.client.SafeHtmlTemplates; import com.google.gwt.safehtml.shared.SafeHtml; import com.google.gwt.safehtml.shared.SafeUri; import com.google.gwt.safehtml.shared.UriUtils; import com.google.gwt.user.client.ui.Button; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.HTML; import com.google.gwt.user.client.ui.HasHTML; import com.google.gwt.user.client.ui.HasText; import com.google.gwt.user.client.ui.Image; import com.google.gwt.user.client.ui.IndexedPanel; import com.google.gwt.user.client.ui.Label; import com.google.gwt.user.client.ui.LayoutPanel; import com.google.gwt.user.client.ui.MenuBar; import com.google.gwt.user.client.ui.PopupPanel; import com.google.gwt.user.client.ui.ProvidesResize; import com.google.gwt.user.client.ui.RequiresResize; import com.google.gwt.user.client.ui.SimplePanel; import com.google.gwt.user.client.ui.TabLayoutPanel; import com.google.gwt.user.client.ui.Widget; /** * * @author mg */ public class TabsDecoratedPanel extends SimplePanel implements RequiresResize, ProvidesResize, IndexedPanel, HasSelectionHandlers<Widget> { public interface Template extends SafeHtmlTemplates { @SafeHtmlTemplates.Template("<div class=\"{0}\"></div>") SafeHtml classedDiv(String aClasses); } private static final Template template = GWT.create(Template.class); protected boolean tabsOnTop = true; protected FlowPanel chevron = new FlowPanel(); protected Button scrollLeft; protected Button scrollRight; protected Button tabsList; protected TabLayoutPanel tabs; protected double barHeight; protected Style.Unit barUnit; // protected LayoutPanel tabBarContainer; protected Widget tabBar; protected Widget tabsContent; // protected Widget selected; public TabsDecoratedPanel(double aBarHeight, Style.Unit aBarUnit) { super(); barHeight = aBarHeight; barUnit = aBarUnit; tabs = new TabLayoutPanel(barHeight, barUnit) { @Override protected void initWidget(Widget w) { super.initWidget(w); assert w instanceof LayoutPanel; tabBarContainer = (LayoutPanel) w; } @Override public void insert(Widget child, Widget tab, int beforeIndex) { child.getElement().getStyle().clearWidth(); child.getElement().getStyle().clearHeight(); // if (child instanceof FocusWidget) { child.getElement().getStyle().clearRight(); child.getElement().getStyle().setWidth(100, Style.Unit.PCT); com.bearsoft.gwt.ui.CommonResources.INSTANCE.commons().ensureInjected(); child.getElement().addClassName(com.bearsoft.gwt.ui.CommonResources.INSTANCE.commons().borderSized()); // } super.insert(child, tab, beforeIndex); } @Override public void selectTab(final int index, boolean fireEvents) { super.selectTab(index, fireEvents); } }; tabs.addSelectionHandler(new SelectionHandler<Integer>() { @Override public void onSelection(SelectionEvent<Integer> event) { selected = event.getSelectedItem() != -1 ? tabs.getWidget(event.getSelectedItem()) : null; Scheduler.get().scheduleDeferred(new ScheduledCommand() { @Override public void execute() { if (selected instanceof RequiresResize) { ((RequiresResize) selected).onResize(); } } }); SelectionEvent.fire(TabsDecoratedPanel.this, selected); } }); tabBar = tabBarContainer.getWidget(0); tabsContent = tabBarContainer.getWidget(1); // GWT Layout animations are deprecated because of CSS3 transitions tabs.setAnimationDuration(0); scrollLeft = new Button(template.classedDiv("tabs-chevron-left"), new ClickHandler() { @Override public void onClick(ClickEvent event) { int offsetLeft = tabBar.getElement().getOffsetLeft(); if (offsetLeft < 0) { tabBar.getElement().getStyle().setLeft(Math.min(offsetLeft + 100, 0), Style.Unit.PX); } updateScrolls(); } }); scrollRight = new Button(template.classedDiv("tabs-chevron-right"), new ClickHandler() { @Override public void onClick(ClickEvent event) { int newTabBarLeft = calcNewScrollRightPosition(); tabBar.getElement().getStyle().setLeft(newTabBarLeft, Style.Unit.PX); updateScrolls(); } }); tabsList = new Button(template.classedDiv("tabs-chevron-list"), new ClickHandler() { @Override public void onClick(ClickEvent event) { final PopupPanel pp = new PopupPanel(); pp.setAutoHideEnabled(true); pp.setAutoHideOnHistoryEventsEnabled(true); pp.setAnimationEnabled(true); MenuBar menu = new MenuBar(true); for (int i = 0; i < tabs.getWidgetCount(); i++) { final Widget content = tabs.getWidget(i); Widget w = tabs.getTabWidget(i); if (w instanceof SimplePanel) { SimplePanel sp = (SimplePanel) w; w = sp.getWidget(); } ScheduledCommand tabSelector = new ScheduledCommand() { @Override public void execute() { tabs.selectTab(content); pp.hide(); Widget targetTab = tabs.getTabWidget(content); int tabCenterX = targetTab.getParent().getElement().getOffsetLeft() + targetTab.getParent().getElement().getOffsetWidth() / 2; int tabBarParentWidth = tabBar.getElement().getParentElement().getOffsetWidth() - chevron.getElement().getOffsetWidth(); int newOffsetLeft = tabBarParentWidth / 2 - tabCenterX; Widget lastTab = tabs.getTabWidget(tabs.getWidgetCount() - 1); int rightMostX = lastTab.getParent().getElement().getOffsetLeft() + lastTab.getParent().getElement().getOffsetWidth(); int width = rightMostX + newOffsetLeft; if (width > tabBarParentWidth) { tabBar.getElement().getStyle().setLeft(Math.min(newOffsetLeft, 0), Style.Unit.PX); } else { tabBar.getElement().getStyle().setLeft(Math.min(tabBarParentWidth - rightMostX, 0), Style.Unit.PX); } } }; SafeUri imageUri = null; if (w instanceof Image) { Image image = (Image) w; imageUri = UriUtils.fromTrustedString(image.getUrl()); } else if (w instanceof HasImageResource) { HasImageResource imageHost = (HasImageResource) w; if (imageHost.getImageResource() != null) { imageUri = imageHost.getImageResource().getSafeUri(); } } if (w instanceof HasHTML) { HasHTML h = (HasHTML) w; String textAsHtml = h.getHTML(); menu.addItem(new MenuItemImageText(textAsHtml != null ? textAsHtml : h.getText(), true, imageUri, tabSelector)); } else if (w instanceof HasText) { HasText l = (HasText) w; menu.addItem(new MenuItemImageText(l.getText(), false, imageUri, tabSelector)); } } pp.setWidget(menu); Widget lastWidget = chevron.getWidget(chevron.getWidgetCount() - 1); pp.setPopupPosition(lastWidget.getAbsoluteLeft(), lastWidget.getAbsoluteTop() + lastWidget.getElement().getOffsetHeight()); pp.showRelativeTo(lastWidget); } }); getElement().getStyle().setPosition(Style.Position.RELATIVE); tabs.getElement().getStyle().setPosition(Style.Position.ABSOLUTE); tabs.getElement().getStyle().setWidth(100, Style.Unit.PCT); tabs.getElement().getStyle().setHeight(100, Style.Unit.PCT); setWidget(tabs); scrollLeft.getElement().getStyle().setPadding(0, Style.Unit.PX); scrollLeft.getElement().getStyle().setMargin(0, Style.Unit.PX); scrollRight.getElement().getStyle().setPadding(0, Style.Unit.PX); scrollRight.getElement().getStyle().setMargin(0, Style.Unit.PX); tabsList.getElement().getStyle().setPadding(0, Style.Unit.PX); tabsList.getElement().getStyle().setMargin(0, Style.Unit.PX); chevron.add(scrollLeft); chevron.add(scrollRight); chevron.add(tabsList); chevron.getElement().addClassName("tabs-chevron"); chevron.getElement().getStyle().setPosition(Style.Position.ABSOLUTE); assert tabBarContainer != null; tabBarContainer.getWidgetContainerElement(tabBar).appendChild(chevron.getElement()); getElement().<XElement> cast().addResizingTransitionEnd(this); } public boolean isTabsOnTop() { return tabsOnTop; } public void setTabsOnTop(boolean aValue) { if (tabsOnTop != aValue) { tabsOnTop = aValue; applyTabsOnTop(); } } protected void applyTabsOnTop() { if (tabBar != null && tabBarContainer != null && tabsContent != null) { final String tabBarLeft = tabBar.getElement().getStyle().getLeft(); Element tabBarContainerElement = tabBarContainer.getWidgetContainerElement(tabBar); tabBarContainerElement.getStyle().clearTop(); tabBarContainerElement.getStyle().clearHeight(); tabBarContainerElement.getStyle().clearBottom(); Element tabContentContainerElement = tabBarContainer.getWidgetContainerElement(tabsContent); tabContentContainerElement.getStyle().clearTop(); tabContentContainerElement.getStyle().clearHeight(); tabContentContainerElement.getStyle().clearBottom(); if (tabsOnTop) { tabBarContainer.setWidgetTopHeight(tabBar, 0, Style.Unit.PX, barHeight, barUnit); tabBarContainer.setWidgetTopBottom(tabsContent, barHeight, barUnit, 0, Style.Unit.PX); } else { tabBarContainer.setWidgetBottomHeight(tabBar, 0, Style.Unit.PX, barHeight, barUnit); tabBarContainer.setWidgetTopBottom(tabsContent, 0, Style.Unit.PX, barHeight, barUnit); } Scheduler.get().scheduleDeferred(new ScheduledCommand() { @Override public void execute() { tabBar.getElement().getStyle().setProperty("left", tabBarLeft); } }); } } @Override protected void onAttach() { super.onAttach(); adopt(chevron); } @Override protected void onDetach() { orphan(chevron); super.onDetach(); } @Override public void onResize() { tabs.onResize(); Scheduler.get().scheduleDeferred(new ScheduledCommand(){ @Override public void execute() { updateScrolls(); } }); } protected int calcNewScrollRightPosition() { Widget lastTab = tabs.getTabWidget(tabs.getWidgetCount() - 1); int rightMostX = lastTab.getParent().getElement().getOffsetLeft() + lastTab.getParent().getElement().getOffsetWidth(); int tabBarParentWidth = tabBar.getElement().getParentElement().getOffsetWidth() - chevron.getElement().getOffsetWidth(); int tabBarMostLeft = Math.min(tabBarParentWidth - rightMostX, 0); int nextTabBarLeft = tabBar.getElement().getOffsetLeft() - 100; if (nextTabBarLeft < tabBarMostLeft) nextTabBarLeft = tabBarMostLeft; return nextTabBarLeft; } protected void updateScrolls() { int oldTabBarLeft = tabBar.getElement().getOffsetLeft(); scrollLeft.setEnabled(oldTabBarLeft < 0); // int newTabBarLeft = calcNewScrollRightPosition(); scrollRight.setEnabled(newTabBarLeft != oldTabBarLeft); } /** * Adds a widget to the panel. If the Widget is already attached, it will be * moved to the right-most index. * * @param child * the widget to be added * @param text * the text to be shown on its tab * @param asHtml * <code>true</code> to treat the specified text as HTML */ public void add(Widget child, String text, boolean asHtml) { tabs.insert(child, text, asHtml, tabs.getWidgetCount()); } /** * Adds a widget to the panel. If the Widget is already attached, it will be * moved to the right-most index. * * @param child * the widget to be added * @param text * the text to be shown on its tab * @param asHtml * <code>true</code> to treat the specified text as HTML */ public void add(Widget child, String text, boolean asHtml, ImageResource aImage) { tabs.insert(child, new ImageLabel(text, asHtml, aImage), tabs.getWidgetCount()); } /** * Adds a widget to the panel. If the Widget is already attached, it will be * moved to the right-most index. * * @param child * the widget to be added * @param text * the text to be shown on its tab */ public void add(Widget child, String text) { tabs.insert(child, text, tabs.getWidgetCount()); } /** * Adds a widget to the panel. If the Widget is already attached, it will be * moved to the right-most index. * * @param child * the widget to be added * @param html * the html to be shown on its tab */ public void add(Widget child, SafeHtml html) { tabs.add(child, html.asString(), true); } /** * Adds a widget to the panel. If the Widget is already attached, it will be * moved to the right-most index. * * @param child * the widget to be added * @param tab * the widget to be placed in the associated tab */ public void add(Widget child, Widget tab) { tabs.insert(child, tab, tabs.getWidgetCount()); } /** * Inserts a widget into the panel. If the Widget is already attached, it * will be moved to the requested index. * * @param child * the widget to be added * @param beforeIndex * the index before which it will be inserted */ public void insert(Widget child, int beforeIndex) { tabs.insert(child, "", beforeIndex); } /** * Inserts a widget into the panel. If the Widget is already attached, it * will be moved to the requested index. * * @param child * the widget to be added * @param html * the html to be shown on its tab * @param beforeIndex * the index before which it will be inserted */ public void insert(Widget child, SafeHtml html, int beforeIndex) { tabs.insert(child, html.asString(), true, beforeIndex); } /** * Inserts a widget into the panel. If the Widget is already attached, it * will be moved to the requested index. * * @param child * the widget to be added * @param text * the text to be shown on its tab * @param asHtml * <code>true</code> to treat the specified text as HTML * @param beforeIndex * the index before which it will be inserted */ public void insert(Widget child, String text, boolean asHtml, int beforeIndex) { Widget contents; if (asHtml) { contents = new HTML(text); } else { contents = new Label(text); } tabs.insert(child, contents, beforeIndex); } /** * Inserts a widget into the panel. If the Widget is already attached, it * will be moved to the requested index. * * @param child * the widget to be added * @param text * the text to be shown on its tab * @param beforeIndex * the index before which it will be inserted */ public void insert(Widget child, String text, int beforeIndex) { tabs.insert(child, text, false, beforeIndex); } /** * Inserts a widget into the panel. If the Widget is already attached, it * will be moved to the requested index. * * @param child * the widget to be added * @param tab * the widget to be placed in the associated tab * @param beforeIndex * the index before which it will be inserted */ public void insert(Widget child, Widget tab, int beforeIndex) { tabs.insert(child, tab, beforeIndex); } /** * Set the duration of the animated transition between tabs. * * @param duration * the duration in milliseconds. */ public void setAnimationDuration(int duration) { tabs.setAnimationDuration(duration); } /** * Set whether or not transitions slide in vertically or horizontally. * * @param isVertical * true for vertical transitions, false for horizontal */ public void setAnimationVertical(boolean isVertical) { tabs.setAnimationVertical(isVertical); } /** * Sets a tab's HTML contents. * * Use care when setting an object's HTML; it is an easy way to expose * script-based security problems. Consider using * {@link #setTabHTML(int, SafeHtml)} or {@link #setTabText(int, String)} * whenever possible. * * @param index * the index of the tab whose HTML is to be set * @param html * the tab's new HTML contents */ public void setTabHTML(int index, String html) { tabs.setTabHTML(index, html); } /** * Sets a tab's HTML contents. * * @param index * the index of the tab whose HTML is to be set * @param html * the tab's new HTML contents */ public void setTabHTML(int index, SafeHtml html) { tabs.setTabHTML(index, html); } /** * Sets a tab's text contents. * * @param index * the index of the tab whose text is to be set * @param text * the object's new text */ public void setTabText(int index, String text) { tabs.setTabText(index, text); } @Override public boolean remove(Widget w) { return tabs.remove(w); } @Override public Widget getWidget(int index) { return tabs.getWidget(index); } @Override public int getWidgetCount() { return tabs.getWidgetCount(); } @Override public int getWidgetIndex(Widget child) { return tabs.getWidgetIndex(child); } @Override public boolean remove(int aIndex) { return tabs.remove(aIndex); } /** * Programmatically selects the specified tab and fires events. * * @param child * the child whose tab is to be selected */ public void selectTab(Widget child) { tabs.selectTab(child); } /** * Programmatically selects the specified tab. * * @param child * the child whose tab is to be selected * @param fireEvents * true to fire events, false not to */ public void selectTab(Widget child, boolean fireEvents) { tabs.selectTab(child, fireEvents); } /** * Programmatically selects the specified tab and fires events. * * @param index * the index of the tab to be selected */ public void selectTab(int index) { tabs.selectTab(index); } /** * Programmatically selects the specified tab. * * @param index * the index of the tab to be selected * @param fireEvents * true to fire events, false not to */ public void selectTab(int index, boolean fireEvents) { tabs.selectTab(index, fireEvents); } @Override public HandlerRegistration addSelectionHandler(SelectionHandler<Widget> handler) { return addHandler(handler, SelectionEvent.getType()); } }
/* JAT: Java Astrodynamics Toolkit * * Copyright (c) 2002 The JAT Project. All rights reserved. * * This file is part of JAT. JAT is free software; you can * redistribute it and/or modify it under the terms of the * NASA Open Source Agreement, version 1.3 or later. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * NASA Open Source Agreement for more details. * * You should have received a copy of the NASA Open Source Agreement * along with this program; if not, write to the NASA Goddard * Space Flight Center at opensource@gsfc.nasa.gov. * */ package jat.forces; import jat.matvec.data.*; import jat.spacecraft.Spacecraft; import jat.spacetime.BodyRef; import jat.spacetime.EarthTrueOfDateRef; import jat.spacetime.ReferenceFrameTranslater; import jat.spacetime.Time; import jat.timeRef.*; import jat.audio.*; /** * <P> * The CIRA_ExponentialDrag class computes the acceleration due to drag on a satellite * using an exponential Earth atmosphere model. The min altitude is * currently 200 km. To go lower, just need to add more values from * the table. * * Reference: Vallado, Table 8-4. * * @author <a href="mailto:dgaylor@users.sourceforge.net">Dave Gaylor * @version 1.0 */ public class CIRA_ExponentialDrag extends AtmosphericDrag{ private int brack; private Matrix dadr; private final static double[] rho_0 = { 2.789E-10, 7.248E-11, 2.418E-11, 9.158E-12, 3.725E-12, 1.585E-12, 6.967E-13, 1.454E-13, 3.614E-14, 1.170E-14, 5.245E-15, 3.019E-15}; private final static double[] H = { 37.105, 45.546, 53.628, 53.298, 58.515, 60.828, 63.822, 71.835, 88.667, 124.64, 181.05, 268.0}; private final static double[] h0 = { 200.0, 250.0, 300.0, 350.0, 400.0, 450.0, 500.0, 600.0, 700.0, 800.0, 900.0, 1000.0}; /** * Constructor * @param cd coefficient of drag * @param area drag cross-sectional area * @param mass mass */ public CIRA_ExponentialDrag (double cd, double area, double mass){ super(cd, area, mass); } /** * Constructor * @param sc Spacecraft parameters */ public CIRA_ExponentialDrag(Spacecraft sc) { super(sc); } /** Compute the atmospheric density using an exponential atmosphere model. * @param ref EarthRef object. Not used. * @param r ECI position vector in meters. * @return Atmospheric density in kg/m^3. */ public double computeDensity(EarthRef ref, VectorN r){ r.checkVectorDimensions(3); // Get the J2000 to TOD transformation Matrix N = ref.TOD(); // Transform r from J2000 to TOD VectorN r_tod = N.times(r); double rmag = r_tod.mag(); // Satellite height Geodetic geod = new Geodetic(r_tod); double height = geod.getHAE()/1000.0; // convert to [km] // check to see if too low if (height < h0[0]) { System.out.println("CIRA_ExponentialDrag: altitude = "+height+" too low. Min altitude = "+h0[0]); SoundPlayer.play("C:\\Jat\\jat\\jat\\audio\\sounds\\humanerror.wav"); System.exit(99); // return 0.0; } // find the right height bracket int n = h0.length; int bracket = 0; if (height >= h0[n-1]) { bracket = n - 1; } else { for (int i = 0; i < (n-1); i++) { if ((height >= h0[i]) && (height < h0[i+1])){ bracket = i; } } } // compute the density this.brack = bracket; double rho = rho_0[bracket] * Math.exp((h0[bracket] - height)/H[bracket]); // System.out.println("ced density: "+rho); return rho; } /** Compute the atmospheric density using an exponential atmosphere model. * @param t Time reference object. Not used. * @param ref EarthRef object. Not used. * @param r ECI position vector in meters. * @return Atmospheric density in kg/m^3. */ public double computeDensity(Time t, BodyRef ref, VectorN r) { r.checkVectorDimensions(3); // Translate from J2000 to TOD ReferenceFrameTranslater xlater = new ReferenceFrameTranslater(ref, new EarthTrueOfDateRef(), t); VectorN r_tod = xlater.translatePoint(r); double rmag = r_tod.mag(); // Satellite height Geodetic geod = new Geodetic(r_tod); double height = geod.getHAE()/1000.0; // convert to [km] // check to see if too low if (height < h0[0]) { System.out.println("CIRA_ExponentialDrag: altitude = "+height+" too low. Min altitude = "+h0[0]); SoundPlayer.play("C:\\Jat\\jat\\jat\\audio\\sounds\\humanerror.wav"); System.exit(99); // return 0.0; } // find the right height bracket int n = h0.length; int bracket = 0; if (height >= h0[n-1]) { bracket = n - 1; } else { for (int i = 0; i < (n-1); i++) { if ((height >= h0[i]) && (height < h0[i+1])){ bracket = i; } } } // compute the density this.brack = bracket; double rho = rho_0[bracket] * Math.exp((h0[bracket] - height)/H[bracket]); // System.out.println("ced density: "+rho); return rho; } /** Computes the acceleration due to drag in m/s^2. * @param ref EarthRef object. * @param beta Satellite ballistic coefficient (Cd*A/m) * @param r ECI position vector in meters. * @param v ECI velocity vector in meters. * @return acceleration due to drag in m/s^2. */ public void compute(EarthRef ref, VectorN r, VectorN v){ r.checkVectorDimensions(3); v.checkVectorDimensions(3); double rmag = r.mag(); double beta = cd * area / mass; // compute the atmospheric density double rho = computeDensity(ref, r); // compute the relative velocity vector and magnitude VectorN we = new VectorN(0.0, 0.0, omega_e); VectorN wxr = we.crossProduct(r); VectorN vr = v.minus(wxr); double vrmag = vr.mag(); // form -1/2 (Cd*A/m) rho double coeff = -0.5 * beta * rho; double coeff2 = coeff * vrmag; // compute the acceleration in ECI frame (km/s^2) this.drag = vr.times(coeff2); // form partial of drag wrt v Matrix vrvrt = vr.outerProduct(vr); vrvrt = vrvrt.divide(vrmag); Matrix vrm = new Matrix(3); vrm = vrm.times(vrmag); this.dadv = (vrvrt.plus(vrm)).times(coeff); // form partial of drag wrt cd double coeff3 = coeff2 / this.cd; this.dadcd = vr.times(coeff3); // form partial of drag wrt r, see Montenbruck, p. 249 double Hh = H[this.brack]; double coeff4 = -1.0 / (Hh * rmag); VectorN drhodr = r.times(coeff4); Matrix part1 = vr.outerProduct(drhodr); part1 = part1.times(coeff2); Matrix cross = we.cross(); Matrix part2 = this.dadv.times(cross); this.dadr = part1.minus(part2); } /** * Return the partial derivative of acceleration wrt position * @return Matrix containing the partial derivative of acceleration wrt position */ public Matrix partialR(){ return this.dadr; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.codecommit.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Returns information about errors in a BatchDisassociateApprovalRuleTemplateFromRepositories operation. * </p> * * @see <a * href="http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/BatchDisassociateApprovalRuleTemplateFromRepositoriesError" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class BatchDisassociateApprovalRuleTemplateFromRepositoriesError implements Serializable, Cloneable, StructuredPojo { /** * <p> * The name of the repository where the association with the template was not able to be removed. * </p> */ private String repositoryName; /** * <p> * An error code that specifies whether the repository name was not valid or not found. * </p> */ private String errorCode; /** * <p> * An error message that provides details about why the repository name was either not found or not valid. * </p> */ private String errorMessage; /** * <p> * The name of the repository where the association with the template was not able to be removed. * </p> * * @param repositoryName * The name of the repository where the association with the template was not able to be removed. */ public void setRepositoryName(String repositoryName) { this.repositoryName = repositoryName; } /** * <p> * The name of the repository where the association with the template was not able to be removed. * </p> * * @return The name of the repository where the association with the template was not able to be removed. */ public String getRepositoryName() { return this.repositoryName; } /** * <p> * The name of the repository where the association with the template was not able to be removed. * </p> * * @param repositoryName * The name of the repository where the association with the template was not able to be removed. * @return Returns a reference to this object so that method calls can be chained together. */ public BatchDisassociateApprovalRuleTemplateFromRepositoriesError withRepositoryName(String repositoryName) { setRepositoryName(repositoryName); return this; } /** * <p> * An error code that specifies whether the repository name was not valid or not found. * </p> * * @param errorCode * An error code that specifies whether the repository name was not valid or not found. */ public void setErrorCode(String errorCode) { this.errorCode = errorCode; } /** * <p> * An error code that specifies whether the repository name was not valid or not found. * </p> * * @return An error code that specifies whether the repository name was not valid or not found. */ public String getErrorCode() { return this.errorCode; } /** * <p> * An error code that specifies whether the repository name was not valid or not found. * </p> * * @param errorCode * An error code that specifies whether the repository name was not valid or not found. * @return Returns a reference to this object so that method calls can be chained together. */ public BatchDisassociateApprovalRuleTemplateFromRepositoriesError withErrorCode(String errorCode) { setErrorCode(errorCode); return this; } /** * <p> * An error message that provides details about why the repository name was either not found or not valid. * </p> * * @param errorMessage * An error message that provides details about why the repository name was either not found or not valid. */ public void setErrorMessage(String errorMessage) { this.errorMessage = errorMessage; } /** * <p> * An error message that provides details about why the repository name was either not found or not valid. * </p> * * @return An error message that provides details about why the repository name was either not found or not valid. */ public String getErrorMessage() { return this.errorMessage; } /** * <p> * An error message that provides details about why the repository name was either not found or not valid. * </p> * * @param errorMessage * An error message that provides details about why the repository name was either not found or not valid. * @return Returns a reference to this object so that method calls can be chained together. */ public BatchDisassociateApprovalRuleTemplateFromRepositoriesError withErrorMessage(String errorMessage) { setErrorMessage(errorMessage); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getRepositoryName() != null) sb.append("RepositoryName: ").append(getRepositoryName()).append(","); if (getErrorCode() != null) sb.append("ErrorCode: ").append(getErrorCode()).append(","); if (getErrorMessage() != null) sb.append("ErrorMessage: ").append(getErrorMessage()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof BatchDisassociateApprovalRuleTemplateFromRepositoriesError == false) return false; BatchDisassociateApprovalRuleTemplateFromRepositoriesError other = (BatchDisassociateApprovalRuleTemplateFromRepositoriesError) obj; if (other.getRepositoryName() == null ^ this.getRepositoryName() == null) return false; if (other.getRepositoryName() != null && other.getRepositoryName().equals(this.getRepositoryName()) == false) return false; if (other.getErrorCode() == null ^ this.getErrorCode() == null) return false; if (other.getErrorCode() != null && other.getErrorCode().equals(this.getErrorCode()) == false) return false; if (other.getErrorMessage() == null ^ this.getErrorMessage() == null) return false; if (other.getErrorMessage() != null && other.getErrorMessage().equals(this.getErrorMessage()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getRepositoryName() == null) ? 0 : getRepositoryName().hashCode()); hashCode = prime * hashCode + ((getErrorCode() == null) ? 0 : getErrorCode().hashCode()); hashCode = prime * hashCode + ((getErrorMessage() == null) ? 0 : getErrorMessage().hashCode()); return hashCode; } @Override public BatchDisassociateApprovalRuleTemplateFromRepositoriesError clone() { try { return (BatchDisassociateApprovalRuleTemplateFromRepositoriesError) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.codecommit.model.transform.BatchDisassociateApprovalRuleTemplateFromRepositoriesErrorMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Class DebuggerTreeBase * @author Jeka */ package com.intellij.debugger.ui.impl; import com.intellij.debugger.impl.DebuggerUtilsEx; import com.intellij.debugger.ui.impl.watch.DebuggerTreeNodeImpl; import com.intellij.debugger.ui.impl.watch.NodeDescriptorImpl; import com.intellij.debugger.ui.impl.watch.ValueDescriptorImpl; import com.intellij.ide.dnd.aware.DnDAwareTree; import com.intellij.openapi.Disposable; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.JDOMUtil; import com.intellij.ui.ScreenUtil; import com.intellij.ui.ScrollPaneFactory; import com.intellij.util.text.StringTokenizer; import com.intellij.util.ui.GeometryUtil; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.tree.TreeUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.Border; import javax.swing.tree.TreeModel; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; public class DebuggerTreeBase extends DnDAwareTree implements Disposable { private final Project myProject; private DebuggerTreeNodeImpl myCurrentTooltipNode; private JComponent myCurrentTooltip; protected final TipManager myTipManager; public DebuggerTreeBase(TreeModel model, Project project) { super(model); myProject = project; myTipManager = new TipManager(this, new TipManager.TipFactory() { @Override public JComponent createToolTip(MouseEvent e) { return DebuggerTreeBase.this.createToolTip(e); } @Override public MouseEvent createTooltipEvent(MouseEvent candidateEvent) { return DebuggerTreeBase.this.createTooltipEvent(candidateEvent); } @Override public boolean isFocusOwner() { return DebuggerTreeBase.this.isFocusOwner(); } }); Disposer.register(this, myTipManager); setRootVisible(false); setShowsRootHandles(true); setCellRenderer(new DebuggerTreeRenderer()); updateUI(); TreeUtil.installActions(this); } private JComponent createTipContent(String tipText, DebuggerTreeNodeImpl node) { final JToolTip tooltip = new JToolTip(); if (tipText == null) { tooltip.setTipText(tipText); } else { Dimension rootSize = getVisibleRect().getSize(); Insets borderInsets = tooltip.getBorder().getBorderInsets(tooltip); rootSize.width -= (borderInsets.left + borderInsets.right) * 2; rootSize.height -= (borderInsets.top + borderInsets.bottom) * 2; @NonNls StringBuilder tipBuilder = new StringBuilder(); final String markupText = node.getMarkupTooltipText(); if (markupText != null) { tipBuilder.append(markupText); } if (!tipText.isEmpty()) { final StringTokenizer tokenizer = new StringTokenizer(tipText, "\n ", true); while (tokenizer.hasMoreElements()) { final String each = tokenizer.nextElement(); if ("\n".equals(each)) { tipBuilder.append("<br>"); } else if (" ".equals(each)) { tipBuilder.append("&nbsp "); } else { tipBuilder.append(JDOMUtil.legalizeText(each)); } } } tooltip.setTipText(UIUtil.toHtml(tipBuilder.toString(), 0)); } tooltip.setBorder(null); return tooltip; } public MouseEvent createTooltipEvent(MouseEvent candidate) { TreePath path = null; if (candidate != null) { final Point treePoint = SwingUtilities.convertPoint(candidate.getComponent(), candidate.getPoint(), this); if (GeometryUtil.isWithin(new Rectangle(0, 0, getWidth(), getHeight()), treePoint)) { path = getPathForLocation(treePoint.x, treePoint.y); } } if (path == null) { if (isFocusOwner()) { path = getSelectionPath(); } } if (path == null) return null; final int row = getRowForPath(path); if (row == -1) return null; final Rectangle bounds = getRowBounds(row); return new MouseEvent(this, MouseEvent.MOUSE_MOVED, System.currentTimeMillis(), 0, bounds.x, bounds.y + bounds.height - bounds.height / 4, 0, false); } @Nullable public JComponent createToolTip(MouseEvent e) { final DebuggerTreeNodeImpl node = getNodeToShowTip(e); if (node == null) { return null; } if (myCurrentTooltip != null && myCurrentTooltip.isShowing() && myCurrentTooltipNode == node) { return myCurrentTooltip; } final String toolTipText = getTipText(node); if (toolTipText == null) { return null; } final JComponent tipContent = createTipContent(toolTipText, node); final JScrollPane scrollPane = ScrollPaneFactory.createScrollPane(tipContent); scrollPane.setBorder(null); scrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_AS_NEEDED); scrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED); final Point point = e.getPoint(); SwingUtilities.convertPointToScreen(point, e.getComponent()); Rectangle tipRectangle = new Rectangle(point, tipContent.getPreferredSize()); final Rectangle screen = ScreenUtil.getScreenRectangle(point.x, point.y); final JToolTip toolTip = new JToolTip(); tipContent.addMouseListener(new HideTooltip(toolTip)); final Border tooltipBorder = toolTip.getBorder(); if (tooltipBorder != null) { final Insets borderInsets = tooltipBorder.getBorderInsets(this); tipRectangle .setSize(tipRectangle.width + borderInsets.left + borderInsets.right, tipRectangle.height + borderInsets.top + borderInsets.bottom); } toolTip.setLayout(new BorderLayout()); toolTip.add(scrollPane, BorderLayout.CENTER); tipRectangle.height += scrollPane.getHorizontalScrollBar().getPreferredSize().height; tipRectangle.width += scrollPane.getVerticalScrollBar().getPreferredSize().width; final int maxWidth = (int)(screen.width - screen.width * .25); if (tipRectangle.width > maxWidth) { tipRectangle.width = maxWidth; } final Dimension prefSize = tipRectangle.getSize(); ScreenUtil.cropRectangleToFitTheScreen(tipRectangle); if (prefSize.width > tipRectangle.width) { final int delta = prefSize.width - tipRectangle.width; tipRectangle.x -= delta; if (tipRectangle.x < screen.x) { tipRectangle.x = screen.x + maxWidth / 2; tipRectangle.width = screen.width - maxWidth / 2; } else { tipRectangle.width += delta; } } toolTip.setPreferredSize(tipRectangle.getSize()); myCurrentTooltip = toolTip; myCurrentTooltipNode = node; return myCurrentTooltip; } @Nullable private String getTipText(DebuggerTreeNodeImpl node) { NodeDescriptorImpl descriptor = node.getDescriptor(); if (descriptor instanceof ValueDescriptorImpl) { String text = ((ValueDescriptorImpl)descriptor).getValueText(); final String tipText = DebuggerUtilsEx.prepareValueText(text, myProject); if (!tipText.isEmpty() && (tipText.indexOf('\n') >= 0 || !getVisibleRect().contains(getRowBounds(getRowForPath(new TreePath(node.getPath())))))) { return tipText; } } return node.getMarkupTooltipText() != null? "" : null; } @Nullable private DebuggerTreeNodeImpl getNodeToShowTip(MouseEvent event) { TreePath path = getPathForLocation(event.getX(), event.getY()); if (path != null) { Object last = path.getLastPathComponent(); if (last instanceof DebuggerTreeNodeImpl) { return (DebuggerTreeNodeImpl)last; } } return null; } @Override public void dispose() { final JComponent tooltip = myCurrentTooltip; if (tooltip != null) { tooltip.setVisible(false); } myCurrentTooltip = null; myCurrentTooltipNode = null; } public Project getProject() { return myProject; } private static class HideTooltip extends MouseAdapter { private final JToolTip myToolTip; HideTooltip(JToolTip toolTip) { myToolTip = toolTip; } @Override public void mouseReleased(MouseEvent e) { if (UIUtil.isActionClick(e)) { final Window wnd = SwingUtilities.getWindowAncestor(myToolTip); if (wnd instanceof JWindow) { wnd.setVisible(false); } } } } }
/* * Copyright (c) 2021, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.apimgt.gateway; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.synapse.commons.throttle.core.DistributedCounterManager; import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisPool; import redis.clients.jedis.Response; import redis.clients.jedis.Transaction; /** * Redis Base Distributed Counter Manager for Throttler. */ public class RedisBaseDistributedCountManager implements DistributedCounterManager { private static final Log log = LogFactory.getLog(RedisBaseDistributedCountManager.class); JedisPool redisPool; public RedisBaseDistributedCountManager(JedisPool redisPool) { this.redisPool = redisPool; } @Override public long getCounter(String key) { long startTime = 0; try { String count; startTime = System.currentTimeMillis(); try (Jedis jedis = redisPool.getResource()) { count = jedis.get(key); if (count != null) { long l = Long.parseLong(count); if (log.isDebugEnabled()) { log.debug(String.format("%s Key already exist in redis with value %s", key, l)); } return l; } return 0; } } finally { if (log.isDebugEnabled()) { log.debug("Time Taken to getDistributedCounter :" + (System.currentTimeMillis() - startTime)); } } } @Override public void setCounter(String key, long value) { long startTime = 0; try { startTime = System.currentTimeMillis(); asyncGetAndAlterCounter(key, value); } finally { if (log.isDebugEnabled()){ log.debug("Time Taken to setDistributedCounter :" + (System.currentTimeMillis() - startTime)); } } } @Override public long addAndGetCounter(String key, long value) { long startTime = 0; try { startTime = System.currentTimeMillis(); try (Jedis jedis = redisPool.getResource()) { Transaction transaction = jedis.multi(); Response<String> previousResponse = transaction.get(key); Response<Long> incrementedValueResponse = transaction.incrBy(key, value); transaction.exec(); Long incrementedValue = incrementedValueResponse.get(); if (log.isDebugEnabled()) { log.debug(String.format("%s Key increased from %s to %s", key, previousResponse.get(), incrementedValue)); } return incrementedValue; } } finally { if (log.isDebugEnabled()) { log.debug("Time Taken to addAndGetDistributedCounter :" + (System.currentTimeMillis() - startTime)); } } } @Override public void removeCounter(String key) { long startTime = 0; try { startTime = System.currentTimeMillis(); try (Jedis jedis = redisPool.getResource()) { Transaction transaction = jedis.multi(); transaction.del(key); transaction.exec(); if (log.isDebugEnabled()) { log.debug(String.format("%s Key Removed", key)); } } } finally { if (log.isDebugEnabled()) { log.debug("Time Taken to removeCounter :" + (System.currentTimeMillis() - startTime)); } } } @Override public long asyncGetAndAddCounter(String key, long value) { long startTime = 0; try { startTime = System.currentTimeMillis(); try (Jedis jedis = redisPool.getResource()) { long current = 0; Transaction transaction = jedis.multi(); Response<String> currentValue = transaction.get(key); Response<Long> incrementedValue = transaction.incrBy(key, value); transaction.exec(); if (currentValue != null) { current = Long.parseLong(currentValue.get()); } if (log.isDebugEnabled()) { log.info(String.format("%s Key increased from %s to %s", key, current, incrementedValue.get())); } return current; } } finally { if (log.isDebugEnabled()) { log.debug("Time Taken to asyncGetAndAddDistributedCounter :" + (System.currentTimeMillis() - startTime)); } } } @Override public long asyncGetAndAlterCounter(String key, long value) { long startTime = 0; try { startTime = System.currentTimeMillis(); try (Jedis jedis = redisPool.getResource()) { long current = 0; Transaction transaction = jedis.multi(); Response<String> currentValue = transaction.get(key); transaction.del(key); Response<Long> incrementedValue = transaction.incrBy(key, value); transaction.exec(); if (currentValue != null) { current = Long.parseLong(currentValue.get()); } if (log.isDebugEnabled()) { log.info(String.format("%s Key increased from %s to %s", key, current, incrementedValue.get())); } return current; } } finally { if (log.isDebugEnabled()) { log.debug("Time Taken to asyncGetAndAlterDistributedCounter :" + (System.currentTimeMillis() - startTime)); } } } @Override public long getTimestamp(String key) { long startTime = 0; try { startTime = System.currentTimeMillis(); try (Jedis jedis = redisPool.getResource()) { String timeStamp = jedis.get(key); if (timeStamp != null) { return Long.parseLong(timeStamp); } return 0; } } finally { if (log.isDebugEnabled()) { log.debug("Time Taken to getSharedTimestamp :" + (System.currentTimeMillis() - startTime)); } } } @Override public void setTimestamp(String key, long timeStamp) { long startTime = 0; try { startTime = System.currentTimeMillis(); try (Jedis jedis = redisPool.getResource()) { Transaction transaction = jedis.multi(); transaction.set(key, String.valueOf(timeStamp)); transaction.exec(); } } finally { if (log.isDebugEnabled()) { log.debug("Time Taken to setTimestamp :" + (System.currentTimeMillis() - startTime)); } } } @Override public void removeTimestamp(String key) { long startTime = 0; try { startTime = System.currentTimeMillis(); try (Jedis jedis = redisPool.getResource()) { Transaction transaction = jedis.multi(); transaction.del(key); transaction.exec(); } } finally { if (log.isDebugEnabled()) { log.debug("Time Taken to removeTimestamp :" + (System.currentTimeMillis() - startTime)); } } } @Override public void setExpiry(String key, long expiryTimeStamp) { long startTime = 0; try { startTime = System.currentTimeMillis(); try (Jedis jedis = redisPool.getResource()) { Transaction transaction = jedis.multi(); transaction.pexpireAt(key, expiryTimeStamp); transaction.exec(); } } finally { if (log.isDebugEnabled()) { log.debug("Time Taken to setExpiry :" + (System.currentTimeMillis() - startTime)); } } } @Override public boolean isEnable() { return true; } @Override public String getType() { return "redis"; } }
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jsqrl.server; import lombok.extern.slf4j.Slf4j; import net.i2p.crypto.eddsa.EdDSAEngine; import net.i2p.crypto.eddsa.EdDSAPublicKey; import net.i2p.crypto.eddsa.spec.EdDSANamedCurveTable; import net.i2p.crypto.eddsa.spec.EdDSAParameterSpec; import net.i2p.crypto.eddsa.spec.EdDSAPublicKeySpec; import org.jsqrl.config.SqrlConfig; import org.jsqrl.error.SqrlException; import org.jsqrl.model.*; import org.jsqrl.nut.SqrlNut; import org.jsqrl.service.SqrlAuthenticationService; import org.jsqrl.service.SqrlNutService; import org.jsqrl.service.SqrlUserService; import org.jsqrl.util.SqrlUtil; import java.security.*; import java.time.Duration; import java.time.LocalDateTime; import java.util.HashSet; import java.util.Set; /** * The main service class that follows the SQRL protocol to * processes a client SQRL request. * * Created by Brent Nichols */ @Slf4j public class JSqrlServer { private final SqrlUserService userService; private final SqrlAuthenticationService sqrlAuthenticationService; private final SqrlConfig config; private final SqrlNutService nutService; private final EdDSAParameterSpec edDsaSpec; public JSqrlServer(final SqrlUserService userService, final SqrlAuthenticationService sqrlAuthenticationService, final SqrlConfig config, final SqrlNutService nutService) { this.userService = userService; this.sqrlAuthenticationService = sqrlAuthenticationService; this.config = config; this.nutService = nutService; edDsaSpec = EdDSANamedCurveTable.getByName(EdDSANamedCurveTable.CURVE_ED25519_SHA512); } /** * This method should be the method called when a user is requesting * a nut. This creates the nut that signifies the authentication request, * which should get marked as authenticated when the user uses a SQRL client * to authenticate. * * @param ipAddress The requesting IP Address * @param qr If the user used the QR code or not * @return Returns the nut to be provided to the user */ public String createAuthenticationRequest(final String ipAddress, final Boolean qr) { SqrlNut nut = nutService.createNut(ipAddress, qr); String nutString = nutService.getNutString(nut); sqrlAuthenticationService.createAuthenticationRequest(nutString, ipAddress); log.debug("Creating nut {}", nutString); return nutString; } /** * This method should be used to check the authentication status of a nut. * This should be a nut created by the createAuthenticationRequest method * that was marked as authenticated by using a SQRL client. * * @param nut The nut provided by the user * @param ipAddress The requesting IP Address * @return Returns true if the nut was marked as authenticated */ public Boolean checkAuthenticationStatus(final String nut, final String ipAddress) { return nutService.nutBelongsToIp(nut, ipAddress) && sqrlAuthenticationService.getAuthenticatedSqrlIdentityKey(nut, ipAddress) != null; } public SqrlAuthResponse handleClientRequest(final SqrlClientRequest request, final String nut, final String ipAddress) { //Build the new nut for this request, retain the QR code SqrlNut requestNut = nutService.createNutFromString(nut); log.debug("Handling client request for nut {}", nut); SqrlNut responseNut = nutService.createNut(ipAddress, requestNut.isQr()); String responseNutString = nutService.getNutString(responseNut); //Prepare the server unlock key value for the response String sukResponse = null; //Check protocol version first if (!request.getRequestVersion().equals(config.getSqrlVersion())) { return createResponse(responseNutString, null, TransactionInformationFlag.CLIENT_FAILURE); } //Validate IDS and PIDS request signatures Signature verifier; try { verifier = new EdDSAEngine(MessageDigest.getInstance("SHA-512")); verifyIdSignature(request, verifier); verifyPreviousIdSignature(request, verifier); } catch (SqrlException | NoSuchAlgorithmException e) { log.debug("Unable to verify signature", e); return createResponse(responseNutString, null, TransactionInformationFlag.CLIENT_FAILURE); } String identityKey = request.getIdentityKey(); String previousIdentityKey = request.getPreviousIdentityKey(); Set<TransactionInformationFlag> tifs = new HashSet<>(); //Check nut expiration Long nutAge = Duration.between(requestNut.getCreated(), LocalDateTime.now()).getSeconds(); if (nutAge > config.getNutExpirationSeconds()) { tifs.add(TransactionInformationFlag.TRANSIENT_ERROR); } else { //Correlate the requesting nut with the new one that was generated sqrlAuthenticationService.linkNut(nut, responseNutString); //Add the TIF for an IP match if (requestNut.checkIpMatch(responseNut)) { tifs.add(TransactionInformationFlag.IP_MATCHED); } SqrlUser sqrlUser = userService.getUserBySqrlKey(identityKey); Boolean sqrlEnabled = true; if (sqrlUser != null) { //If the user is found, add the TIF for identity match tifs.add(TransactionInformationFlag.ID_MATCH); } else if (previousIdentityKey != null) { //Try their previous identity key if they are carrying one sqrlUser = userService.getUserBySqrlKey(previousIdentityKey); if (sqrlUser != null) { userService.updateIdentityKey(previousIdentityKey, identityKey); tifs.add(TransactionInformationFlag.PREVIOUS_ID_MATCH); } } if (sqrlUser != null && request.getOptionFlags().contains(SqrlOptionFlag.SERVER_UNLOCK_KEY)) { sukResponse = sqrlUser.getServerUnlockKey(); } //Check for disabled status if (sqrlUser != null && !sqrlUser.sqrlEnabled()) { sqrlEnabled = false; tifs.add(TransactionInformationFlag.SQRL_DISABLED); } //Determine the command SqrlCommand command = request.getCommand(); if (command == null) { //Unrecognized command tifs.add(TransactionInformationFlag.FUNCTION_NOT_SUPPORTED); } else if (command == SqrlCommand.QUERY && sqrlEnabled) { //Don't authenticate the user, just provide the client //with information on what we know about the user via //the transaction information flags. } else if (command == SqrlCommand.IDENT && sqrlEnabled) { //Authenticate the user //Register if needed if (sqrlUser == null) { userService.registerSqrlUser(identityKey, request.getServerUnlockKey(), request.getVerifyUnlockKey()); } //Authenticate the user sqrlAuthenticationService.authenticateNut(responseNutString, identityKey); tifs.add(TransactionInformationFlag.ID_MATCH); } else if (command == SqrlCommand.DISABLE && sqrlEnabled) { //Disable the user's account userService.disableSqrlUser(identityKey); } else if (command == SqrlCommand.REMOVE && sqrlEnabled) { if (sqrlUser != null) { //Remove the user's account verifyUnlockRequestSignature(request, sqrlUser.getVerifyUnlockKey(), verifier); userService.removeSqrlUser(identityKey); } else { tifs.add(TransactionInformationFlag.CLIENT_FAILURE); } } else if (command == SqrlCommand.ENABLE) { if (sqrlUser != null) { //Re-enable the user's account verifyUnlockRequestSignature(request, sqrlUser.getVerifyUnlockKey(), verifier); userService.enableSqrlUser(identityKey); } else { tifs.add(TransactionInformationFlag.CLIENT_FAILURE); } } } SqrlAuthResponse response = createResponse( responseNutString, sukResponse, tifs.toArray(new TransactionInformationFlag[tifs.size()])); log.debug("Response: {}", response); return response; } private SqrlAuthResponse createResponse(final String nut, final String suk, final TransactionInformationFlag... tifs) { return SqrlAuthResponse.builder() .nut(nut) .qry(config.getSqrlBaseUri() + "?nut=" + nut) .addTifs(tifs) .ver(config.getSqrlVersion()) .suk(suk).build(); } private void verifyIdSignature(final SqrlClientRequest request, final Signature verifier) { verifySqrlRequestSignature( request, verifier, SqrlUtil.base64UrlDecode(request.getIdentityKey()), request.getDecodedIdentitySignature(), "Unable to verify ID Signature"); } private void verifyUnlockRequestSignature(final SqrlClientRequest request, final String verifyUnlockKey, final Signature verifier) { verifySqrlRequestSignature( request, verifier, SqrlUtil.base64UrlDecode(verifyUnlockKey), request.getDecodedUnlockRequestSignature(), "Unable to verify Unlock Request Signature"); } private void verifyPreviousIdSignature(final SqrlClientRequest request, final Signature verifier) { if (request.getPreviousIdentityKey() != null) { verifySqrlRequestSignature( request, verifier, SqrlUtil.base64UrlDecode(request.getPreviousIdentityKey()), request.getDecodedPreviousIdSignature(), "Unable to verify Previous ID Signature"); } } private void verifySqrlRequestSignature(final SqrlClientRequest request, final Signature verifier, final byte[] key, final byte[] signature, final String errorMessage) { byte[] requestMessage = (request.getClient() + request.getServer()).getBytes(); try { if (!verifyEdDSASignature(verifier, key, requestMessage, signature)) { throw new SqrlException(errorMessage); } } catch (InvalidKeyException | SignatureException e) { throw new SqrlException("Unable to verify message signature", e); } } private Boolean verifyEdDSASignature(final Signature verifier, final byte[] key, final byte[] message, final byte[] signature) throws InvalidKeyException, SignatureException { verifier.initVerify(new EdDSAPublicKey(new EdDSAPublicKeySpec(key, edDsaSpec))); verifier.update(message); return verifier.verify(signature); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.ttl.turtle; import org.apache.jena.datatypes.RDFDatatype ; import org.apache.jena.datatypes.TypeMapper ; import org.apache.jena.datatypes.xsd.XSDDatatype ; import org.apache.jena.graph.Node ; import org.apache.jena.graph.NodeFactory ; import org.apache.jena.graph.Triple ; import org.apache.jena.shared.PrefixMapping ; import org.apache.jena.shared.impl.PrefixMappingImpl ; import org.apache.jena.ttl.JenaURIException; import org.apache.jena.ttl.N3IRIResolver; import org.apache.jena.vocabulary.RDF ; @SuppressWarnings("deprecation") public class TurtleParserBase { // Should be the same as ARQ ParserBase and Prologues. protected final Node XSD_TRUE = NodeFactory.createLiteral("true", null, XSDDatatype.XSDboolean) ; protected final Node XSD_FALSE = NodeFactory.createLiteral("false", null, XSDDatatype.XSDboolean) ; protected final Node nRDFtype = RDF.type.asNode() ; protected final Node nRDFnil = RDF.nil.asNode() ; protected final Node nRDFfirst = RDF.first.asNode() ; protected final Node nRDFrest = RDF.rest.asNode() ; protected final Node nRDFsubject = RDF.subject.asNode() ; protected final Node nRDFpredicate = RDF.predicate.asNode() ; protected final Node nRDFobject = RDF.object.asNode() ; protected final String SWAP_NS = "http://www.w3.org/2000/10/swap/" ; protected final String SWAP_LOG_NS = "http://www.w3.org/2000/10/swap/log#" ; protected final Node nLogImplies = NodeFactory.createURI(SWAP_LOG_NS+"implies") ; protected final Node nOwlSameAs = NodeFactory.createURI("http://www.w3.org/2002/07/owl#sameAs") ; protected boolean strictTurtle = true ; protected boolean skolomizedBNodes = true ; public TurtleParserBase() {} PrefixMapping prefixMapping = new PrefixMappingImpl() ; N3IRIResolver resolver = new N3IRIResolver() ; protected String getBaseURI() { return resolver.getBaseIRI() ; } public void setBaseURI(String u) { resolver = new N3IRIResolver(u) ; } protected void setBase(String iriStr , int line, int column) { // Already resolved. setBaseURI(iriStr) ; } public PrefixMapping getPrefixMapping() { return prefixMapping ; } // label => bNode for construct templates patterns LabelToNodeMap bNodeLabels = new LabelToNodeMap() ; TurtleEventHandler handler = null ; public void setEventHandler(TurtleEventHandler h) { handler = h ; } protected void emitTriple(int line, int col, Triple triple) { handler.triple(line, col, triple) ; } protected void startFormula(int line, int col) { handler.startFormula(line, col) ; } protected void endFormula(int line, int col) {handler.endFormula(line, col) ; } protected void setPrefix(int line, int col, String prefix, String uri) { prefixMapping.setNsPrefix(prefix, uri) ; handler.prefix(line, col, prefix, uri) ; } protected int makePositiveInteger(String lexicalForm) { if ( lexicalForm == null ) return -1 ; return Integer.parseInt(lexicalForm) ; } protected Node createLiteralInteger(String lexicalForm) { return NodeFactory.createLiteral(lexicalForm, null, XSDDatatype.XSDinteger) ; } protected Node createLiteralDouble(String lexicalForm) { return NodeFactory.createLiteral(lexicalForm, null, XSDDatatype.XSDdouble) ; } protected Node createLiteralDecimal(String lexicalForm) { return NodeFactory.createLiteral(lexicalForm, null, XSDDatatype.XSDdecimal) ; } protected Node createLiteral(String lexicalForm, String langTag, Node datatype) { String uri = (datatype==null) ? null : datatype.getURI() ; return createLiteral(lexicalForm, langTag, uri) ; } protected Node createLiteral(String lexicalForm, String langTag, String datatypeURI) { Node n = null ; // Can't have type and lang tag. if ( datatypeURI != null) { RDFDatatype dType = TypeMapper.getInstance().getSafeTypeByName(datatypeURI) ; n = NodeFactory.createLiteral(lexicalForm, null, dType) ; } else n = NodeFactory.createLiteral(lexicalForm, langTag, null) ; return n ; } protected long integerValue(String s) { if ( s.startsWith("+") ) s = s.substring(1) ; if ( s.startsWith("0x") ) { // Hex s = s.substring(2) ; return Long.parseLong(s, 16) ; } return Long.parseLong(s) ; } protected double doubleValue(String s) { if ( s.startsWith("+") ) s = s.substring(1) ; double valDouble = Double.parseDouble(s) ; return valDouble ; } protected String stripQuotes(String s) { return s.substring(1,s.length()-1) ; } protected String stripQuotes3(String s) { return s.substring(3,s.length()-3) ; } protected String stripChars(String s, int n) { return s.substring(n, s.length()) ; } protected String resolveQuotedIRI(String iriStr ,int line, int column) { iriStr = stripQuotes(iriStr) ; return resolveIRI(iriStr, line, column) ; } protected String resolveIRI(String iriStr , int line, int column) { if ( isBNodeIRI(iriStr) ) return iriStr ; if ( resolver != null ) iriStr = _resolveIRI(iriStr, line, column) ; return iriStr ; } private String _resolveIRI(String iriStr , int line, int column) { try { iriStr = resolver.resolve(iriStr) ; } catch (JenaURIException ex) { throwParseException(ex.getMessage(), line, column) ; } return iriStr ; } protected String resolvePName(String qname, int line, int column) { String s = myExpandPrefix(prefixMapping, qname) ; if ( s == null ) throwParseException("Unresolved prefixed name: "+qname, line, column) ; return s ; } private static String myExpandPrefix(PrefixMapping prefixMapping, String qname) { String s = prefixMapping.expandPrefix(qname) ; if ( s == null ) return null ; if ( s.equals(qname) ) { // The contract of expandPrefix is to return the original name if // there is no prefix but what s the expanded and original form are // actually the same character string ? int colon = qname.indexOf( ':' ); if (colon < 0) return null ; String prefix = qname.substring( 0, colon ) ; if ( prefixMapping.getNsPrefixURI(prefix) != null ) // The original and resolved forms are the same. return s ; return null ; } return s ; } final static String bNodeLabelStart = "_:" ; protected Node createListNode() { return createBNode() ; } // Unlabelled bNode. protected Node createBNode() { return bNodeLabels.allocNode() ; } // Labelled bNode. protected Node createBNode(String label, int line, int column) { return bNodeLabels.asNode(label) ; } protected Node createVariable(String s, int line, int column) { s = s.substring(1) ; // Drop the marker return NodeFactory.createVariable(s) ; } protected Node createNode(String iri) { // Is it a bNode label? i.e. <_:xyz> if ( isBNodeIRI(iri) ) { String s = iri.substring(bNodeLabelStart.length()) ; Node n = NodeFactory.createBlankNode(s) ; return n ; } return NodeFactory.createURI(iri) ; } protected boolean isBNodeIRI(String iri) { return skolomizedBNodes && iri.startsWith(bNodeLabelStart) ; } // protected Node createNodeFromURI(String s, int line, int column) // { // s = stripQuotes(s) ; // String uriStr = s ; // Mutated // // try { // uriStr = resolver.resolve(uriStr) ; // } catch (JenaURIException ex) // { // throw new TurtleParseException(exMsg(ex.getMessage(), line, column)) ; // } // return Node.createURI(uriStr) ; // } protected void throwParseException(String s , int line, int column) { throw new TurtleParseException(exMsg(s, line, column)) ; } protected String fixupPrefix(String prefix, int line, int column) { if ( prefix.endsWith(":") ) prefix = prefix.substring(0, prefix.length()-1) ; return prefix ; } // Utilities to remove escapes // Testing interface public static String unescapeStr(String s) { return unescape(s, '\\', false, 1, 1) ; } // public static String unescapeCodePoint(String s) // { return unescape(s, '\\', true, 1, 1) ; } // // protected String unescapeCodePoint(String s, int line, int column) // { return unescape(s, '\\', true, line, column) ; } protected String unescapeStr(String s, int line, int column) { return unescape(s, '\\', false, line, column) ; } // Worker function private static String unescape(String s, char escape, boolean pointCodeOnly, int line, int column) { int i = s.indexOf(escape) ; if ( i == -1 ) return s ; // Dump the initial part straight into the string buffer StringBuilder sb = new StringBuilder(s.substring(0,i)) ; int len = s.length() ; for ( ; i < len ; i++ ) { char ch = s.charAt(i) ; // Keep line and column numbers. switch (ch) { case '\n': case '\r': line++ ; column = 1 ; break ; default: column++ ; break ; } if ( ch != escape ) { sb.append(ch) ; continue ; } // Escape if ( i >= len-1 ) throw new TurtleParseException(exMsg("Illegal escape at end of string", line, column)) ; char ch2 = s.charAt(i+1) ; column = column+1 ; i = i + 1 ; // \\u and \\U if ( ch2 == 'u' ) { // i points to the \ so i+6 is next character if ( i+4 >= len ) throw new TurtleParseException(exMsg("\\u escape too short", line, column)) ; int x = hex(s, i+1, 4, line, column) ; sb.append((char)x) ; // Jump 1 2 3 4 -- already skipped \ and u i = i+4 ; column = column+4 ; continue ; } if ( ch2 == 'U' ) { // i points to the \ so i+6 is next character if ( i+8 >= len ) throw new TurtleParseException(exMsg("\\U escape too short", line, column)) ; int x = hex(s, i+1, 8, line, column) ; sb.append((char)x) ; // Jump 1 2 3 4 5 6 7 8 -- already skipped \ and u i = i+8 ; column = column+8 ; continue ; } // Are we doing just point code escapes? // If so, \X-anything else is legal as a literal "\" and "X" if ( pointCodeOnly ) { sb.append('\\') ; sb.append(ch2) ; i = i + 1 ; continue ; } // Not just codepoints. Must be a legal escape. char ch3 = 0 ; switch (ch2) { case 'n': ch3 = '\n' ; break ; case 't': ch3 = '\t' ; break ; case 'r': ch3 = '\r' ; break ; case 'b': ch3 = '\b' ; break ; case 'f': ch3 = '\f' ; break ; case '\'': ch3 = '\'' ; break ; case '\"': ch3 = '\"' ; break ; case '\\': ch3 = '\\' ; break ; default: throw new TurtleParseException(exMsg("Unknown escape: \\"+ch2, line, column)) ; } sb.append(ch3) ; } return sb.toString() ; } // Line and column that started the escape static private int hex(String s, int i, int len, int line, int column) { // if ( i+len >= s.length() ) // { // // } int x = 0 ; for ( int j = i ; j < i+len ; j++ ) { char ch = s.charAt(j) ; column++ ; int k = 0 ; switch (ch) { case '0': k = 0 ; break ; case '1': k = 1 ; break ; case '2': k = 2 ; break ; case '3': k = 3 ; break ; case '4': k = 4 ; break ; case '5': k = 5 ; break ; case '6': k = 6 ; break ; case '7': k = 7 ; break ; case '8': k = 8 ; break ; case '9': k = 9 ; break ; case 'A': case 'a': k = 10 ; break ; case 'B': case 'b': k = 11 ; break ; case 'C': case 'c': k = 12 ; break ; case 'D': case 'd': k = 13 ; break ; case 'E': case 'e': k = 14 ; break ; case 'F': case 'f': k = 15 ; break ; default: throw new TurtleParseException(exMsg("Illegal hex escape: "+ch, line, column)) ; } x = (x<<4)+k ; } return x ; } protected static String exMsg(String msg, int line, int column) { return "Line " + line + ", column " + column + ": " + msg ; } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.idea.maven.project; import com.intellij.ProjectTopics; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.Result; import com.intellij.openapi.application.WriteAction; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.editor.event.DocumentAdapter; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ModuleRootEvent; import com.intellij.openapi.roots.ModuleRootListener; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.openapi.vfs.newvfs.BulkFileListener; import com.intellij.openapi.vfs.newvfs.NewVirtualFile; import com.intellij.openapi.vfs.newvfs.events.*; import com.intellij.openapi.vfs.pointers.VirtualFilePointer; import com.intellij.openapi.vfs.pointers.VirtualFilePointerListener; import com.intellij.openapi.vfs.pointers.VirtualFilePointerManager; import com.intellij.psi.PsiDocumentManager; import com.intellij.util.PathUtil; import com.intellij.util.messages.MessageBusConnection; import com.intellij.util.ui.update.Update; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import org.jetbrains.idea.maven.model.MavenConstants; import org.jetbrains.idea.maven.utils.MavenMergingUpdateQueue; import org.jetbrains.idea.maven.utils.MavenUtil; import java.io.File; import java.util.*; public class MavenProjectsManagerWatcher { public static final Key<Boolean> FORCE_IMPORT_AND_RESOLVE_ON_REFRESH = Key.create(MavenProjectsManagerWatcher.class + "FORCE_IMPORT_AND_RESOLVE_ON_REFRESH"); private static final int DOCUMENT_SAVE_DELAY = 1000; private final Project myProject; private final MavenProjectsManager myManager; private final MavenProjectsTree myProjectsTree; private final MavenGeneralSettings myGeneralSettings; private final MavenProjectsProcessor myReadingProcessor; private final MavenEmbeddersManager myEmbeddersManager; private final List<VirtualFilePointer> mySettingsFilesPointers = new ArrayList<VirtualFilePointer>(); private final List<LocalFileSystem.WatchRequest> myWatchedRoots = new ArrayList<LocalFileSystem.WatchRequest>(); private final Set<Document> myChangedDocuments = new THashSet<Document>(); private final MavenMergingUpdateQueue myChangedDocumentsQueue; public MavenProjectsManagerWatcher(Project project, MavenProjectsManager manager, MavenProjectsTree projectsTree, MavenGeneralSettings generalSettings, MavenProjectsProcessor readingProcessor, MavenEmbeddersManager embeddersManager) { myProject = project; myManager = manager; myProjectsTree = projectsTree; myGeneralSettings = generalSettings; myReadingProcessor = readingProcessor; myEmbeddersManager = embeddersManager; myChangedDocumentsQueue = new MavenMergingUpdateQueue(getClass() + ": Document changes queue", DOCUMENT_SAVE_DELAY, false, myProject); } public synchronized void start() { final MessageBusConnection myBusConnection = myProject.getMessageBus().connect(myChangedDocumentsQueue); myBusConnection.subscribe(VirtualFileManager.VFS_CHANGES, new MyFileChangeListener()); myBusConnection.subscribe(ProjectTopics.PROJECT_ROOTS, new MyRootChangesListener()); myChangedDocumentsQueue.makeUserAware(myProject); myChangedDocumentsQueue.activate(); DocumentAdapter myDocumentListener = new DocumentAdapter() { public void documentChanged(DocumentEvent event) { Document doc = event.getDocument(); VirtualFile file = FileDocumentManager.getInstance().getFile(doc); if (file == null) return; boolean isMavenFile = file.getName().equals(MavenConstants.POM_XML) || file.getName().equals(MavenConstants.PROFILES_XML) || isSettingsFile(file); if (!isMavenFile) return; synchronized (myChangedDocuments) { myChangedDocuments.add(doc); } myChangedDocumentsQueue.queue(new Update(MavenProjectsManagerWatcher.this) { public void run() { final Set<Document> copy; synchronized (myChangedDocuments) { copy = new THashSet<Document>(myChangedDocuments); myChangedDocuments.clear(); } MavenUtil.invokeLater(myProject, new Runnable() { public void run() { new WriteAction() { protected void run(Result result) throws Throwable { for (Document each : copy) { PsiDocumentManager.getInstance(myProject).commitDocument(each); FileDocumentManager.getInstance().saveDocument(each); } } }.execute(); } }); } }); } }; EditorFactory.getInstance().getEventMulticaster().addDocumentListener(myDocumentListener, myBusConnection); final MavenGeneralSettings.Listener mySettingsPathsChangesListener = new MavenGeneralSettings.Listener() { public void changed() { updateSettingsFilePointers(); onSettingsChange(); } }; myGeneralSettings.addListener(mySettingsPathsChangesListener); Disposer.register(myChangedDocumentsQueue, new Disposable() { public void dispose() { myGeneralSettings.removeListener(mySettingsPathsChangesListener); mySettingsFilesPointers.clear(); } }); updateSettingsFilePointers(); } private void updateSettingsFilePointers() { LocalFileSystem.getInstance().removeWatchedRoots(myWatchedRoots); mySettingsFilesPointers.clear(); addFilePointer(myGeneralSettings.getEffectiveUserSettingsIoFile()); addFilePointer(myGeneralSettings.getEffectiveGlobalSettingsIoFile()); } private void addFilePointer(File settingsFile) { if (settingsFile == null) return; File parentFile = settingsFile.getParentFile(); if (parentFile != null) { String path = getNormalizedPath(parentFile); if (path != null) { myWatchedRoots.add(LocalFileSystem.getInstance().addRootToWatch(path, false)); } } String path = getNormalizedPath(settingsFile); if (path != null) { String url = VfsUtil.pathToUrl(path); mySettingsFilesPointers.add( VirtualFilePointerManager.getInstance().create(url, myChangedDocumentsQueue, new VirtualFilePointerListener() { public void beforeValidityChanged(VirtualFilePointer[] pointers) { } public void validityChanged(VirtualFilePointer[] pointers) { } })); } } @Nullable private static String getNormalizedPath(@NotNull File settingsFile) { String canonized = PathUtil.getCanonicalPath(settingsFile.getAbsolutePath()); return canonized == null ? null : FileUtil.toSystemIndependentName(canonized); } public synchronized void stop() { Disposer.dispose(myChangedDocumentsQueue); } public synchronized void addManagedFilesWithProfiles(List<VirtualFile> files, List<String> explicitProfiles) { myProjectsTree.addManagedFilesWithProfiles(files, explicitProfiles); scheduleUpdateAll(); } @TestOnly public synchronized void resetManagedFilesAndProfilesInTests(List<VirtualFile> files, List<String> explicitProfiles) { myProjectsTree.resetManagedFilesAndProfiles(files, explicitProfiles); scheduleUpdateAll(); } public synchronized void removeManagedFiles(List<VirtualFile> files) { myProjectsTree.removeManagedFiles(files); scheduleUpdateAll(); } public synchronized void setExplicitProfiles(Collection<String> profiles) { myProjectsTree.setExplicitProfiles(profiles); scheduleUpdateAll(); } private void scheduleUpdateAll() { scheduleUpdateAll(false, true); } public void scheduleUpdateAll(boolean force, final boolean forceImportAndResolve) { Runnable onCompletion = new Runnable() { @Override public void run() { if (forceImportAndResolve || myManager.getImportingSettings().isImportAutomatically()) { myManager.scheduleImportAndResolve(); } } }; myReadingProcessor.scheduleTask(new MavenProjectsProcessorReadingTask(force, myProjectsTree, myGeneralSettings, onCompletion)); } public void scheduleUpdate(List<VirtualFile> filesToUpdate, List<VirtualFile> filesToDelete, boolean force, final boolean forceImportAndResolve) { Runnable onCompletion = new Runnable() { @Override public void run() { if (forceImportAndResolve || myManager.getImportingSettings().isImportAutomatically()) { myManager.scheduleImportAndResolve(); } } }; myReadingProcessor.scheduleTask(new MavenProjectsProcessorReadingTask(filesToUpdate, filesToDelete, force, myProjectsTree, myGeneralSettings, onCompletion)); } private void onSettingsChange() { myEmbeddersManager.reset(); scheduleUpdateAll(true, false); } private void onSettingsXmlChange() { myGeneralSettings.changed(); // onSettingsChange() will be called indirectly by pathsChanged listener on GeneralSettings object } private class MyRootChangesListener implements ModuleRootListener { public void beforeRootsChange(ModuleRootEvent event) { } public void rootsChanged(ModuleRootEvent event) { // todo is this logic necessary? List<VirtualFile> existingFiles = myProjectsTree.getProjectsFiles(); List<VirtualFile> newFiles = new ArrayList<VirtualFile>(); List<VirtualFile> deletedFiles = new ArrayList<VirtualFile>(); for (VirtualFile f : myProjectsTree.getExistingManagedFiles()) { if (!existingFiles.contains(f)) { newFiles.add(f); } } for (VirtualFile f : existingFiles) { if (!f.isValid()) deletedFiles.add(f); } scheduleUpdate(newFiles, deletedFiles, false, false); } } private boolean isPomFile(String path) { if (!path.endsWith("/" + MavenConstants.POM_XML)) return false; return myProjectsTree.isPotentialProject(path); } private boolean isProfilesFile(String path) { String suffix = "/" + MavenConstants.PROFILES_XML; if (!path.endsWith(suffix)) return false; int pos = path.lastIndexOf(suffix); return myProjectsTree.isPotentialProject(path.substring(0, pos) + "/" + MavenConstants.POM_XML); } private boolean isSettingsFile(String path) { for (VirtualFilePointer each : mySettingsFilesPointers) { VirtualFile f = each.getFile(); if (f != null && FileUtil.pathsEqual(path, f.getPath())) return true; } return false; } private boolean isSettingsFile(VirtualFile f) { for (VirtualFilePointer each : mySettingsFilesPointers) { if (each.getFile() == f) return true; } return false; } private class MyFileChangeListener extends MyFileChangeListenerBase { private List<VirtualFile> filesToUpdate; private List<VirtualFile> filesToRemove; private boolean settingsHaveChanged; private boolean forceImportAndResolve; protected boolean isRelevant(String path) { return isPomFile(path) || isProfilesFile(path) || isSettingsFile(path); } protected void updateFile(VirtualFile file) { doUpdateFile(file, false); } protected void deleteFile(VirtualFile file) { doUpdateFile(file, true); } private void doUpdateFile(VirtualFile file, boolean remove) { initLists(); if (isSettingsFile(file)) { settingsHaveChanged = true; return; } if (file.getUserData(FORCE_IMPORT_AND_RESOLVE_ON_REFRESH) == Boolean.TRUE) { forceImportAndResolve = true; } VirtualFile pom = getPomFileProfilesFile(file); if (pom != null) { filesToUpdate.add(pom); return; } if (remove) { filesToRemove.add(file); } else { filesToUpdate.add(file); } } private VirtualFile getPomFileProfilesFile(VirtualFile f) { if (!f.getName().equals(MavenConstants.PROFILES_XML)) return null; return f.getParent().findChild(MavenConstants.POM_XML); } protected void apply() { // the save may occur during project close. in this case the background task // can not be started since the window has already been closed. if (areFileSetsInitialised()) { if (settingsHaveChanged) { onSettingsXmlChange(); } else { filesToUpdate.removeAll(filesToRemove); scheduleUpdate(filesToUpdate, filesToRemove, false, forceImportAndResolve); } } clearLists(); } private boolean areFileSetsInitialised() { return filesToUpdate != null; } private void initLists() { // Do not use before() method to initialize the lists // since the listener can be attached during the update // and before method can be skipped. // The better way to fix if, of course, is to do simething with // subscription - add listener not during postStartupActivity // but on project initialization to avoid this situation. if (areFileSetsInitialised()) return; filesToUpdate = new ArrayList<VirtualFile>(); filesToRemove = new ArrayList<VirtualFile>(); settingsHaveChanged = false; forceImportAndResolve = false; } private void clearLists() { filesToUpdate = null; filesToRemove = null; } } private static abstract class MyFileChangeListenerBase implements BulkFileListener { protected abstract boolean isRelevant(String path); protected abstract void updateFile(VirtualFile file); protected abstract void deleteFile(VirtualFile file); protected abstract void apply(); public void before(List<? extends VFileEvent> events) { for (VFileEvent each : events) { if (each instanceof VFileDeleteEvent) { deleteRecursively(((VFileDeleteEvent)each).getFile()); } else { if (!isRelevant(each.getPath())) continue; if (each instanceof VFilePropertyChangeEvent) { if (((VFilePropertyChangeEvent)each).getPropertyName().equals(VirtualFile.PROP_NAME)) { deleteRecursively(((VFilePropertyChangeEvent)each).getFile()); } } else if (each instanceof VFileMoveEvent) { VFileMoveEvent moveEvent = (VFileMoveEvent)each; String newPath = moveEvent.getNewParent().getPath() + "/" + moveEvent.getFile().getName(); if (!isRelevant(newPath)) { deleteRecursively(moveEvent.getFile()); } } } } } private void deleteRecursively(VirtualFile f) { if (isRelevant(f.getPath())) deleteFile(f); if (f.isDirectory()) { // prevent reading directories content if not already cached. Iterable<VirtualFile> children = f instanceof NewVirtualFile ? ((NewVirtualFile)f).iterInDbChildren() : Arrays.asList(f.getChildren()); for (VirtualFile each : children) { deleteRecursively(each); } } } public void after(List<? extends VFileEvent> events) { for (VFileEvent each : events) { if (!isRelevant(each.getPath())) continue; if (each instanceof VFileCreateEvent) { VFileCreateEvent createEvent = (VFileCreateEvent)each; VirtualFile newChild = createEvent.getParent().findChild(createEvent.getChildName()); if (newChild != null) { updateFile(newChild); } } else if (each instanceof VFileCopyEvent) { VFileCopyEvent copyEvent = (VFileCopyEvent)each; VirtualFile newChild = copyEvent.getNewParent().findChild(copyEvent.getNewChildName()); if (newChild != null) { updateFile(newChild); } } else if (each instanceof VFileContentChangeEvent) { updateFile(((VFileContentChangeEvent)each).getFile()); } else if (each instanceof VFilePropertyChangeEvent) { if (((VFilePropertyChangeEvent)each).getPropertyName().equals(VirtualFile.PROP_NAME)) { updateFile(((VFilePropertyChangeEvent)each).getFile()); } } else if (each instanceof VFileMoveEvent) { updateFile(((VFileMoveEvent)each).getFile()); } } apply(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.runtime.functionobjects; import java.util.HashMap; import org.apache.commons.math3.distribution.AbstractRealDistribution; import org.apache.commons.math3.distribution.ChiSquaredDistribution; import org.apache.commons.math3.distribution.ExponentialDistribution; import org.apache.commons.math3.distribution.FDistribution; import org.apache.commons.math3.distribution.NormalDistribution; import org.apache.commons.math3.distribution.TDistribution; import org.apache.commons.math3.exception.MathArithmeticException; import org.apache.sysml.runtime.DMLRuntimeException; import org.apache.sysml.runtime.util.UtilFunctions; /** * Function object for builtin function that takes a list of name=value parameters. * This class can not be instantiated elsewhere. */ public class ParameterizedBuiltin extends ValueFunction { private static final long serialVersionUID = -5966242955816522697L; public enum ParameterizedBuiltinCode { CDF, INVCDF, RMEMPTY, REPLACE, REXPAND, TRANSFORMAPPLY, TRANSFORMDECODE } public enum ProbabilityDistributionCode { INVALID, NORMAL, EXP, CHISQ, F, T } public ParameterizedBuiltinCode bFunc; public ProbabilityDistributionCode distFunc; static public HashMap<String, ParameterizedBuiltinCode> String2ParameterizedBuiltinCode; static { String2ParameterizedBuiltinCode = new HashMap<>(); String2ParameterizedBuiltinCode.put( "cdf", ParameterizedBuiltinCode.CDF); String2ParameterizedBuiltinCode.put( "invcdf", ParameterizedBuiltinCode.INVCDF); String2ParameterizedBuiltinCode.put( "rmempty", ParameterizedBuiltinCode.RMEMPTY); String2ParameterizedBuiltinCode.put( "replace", ParameterizedBuiltinCode.REPLACE); String2ParameterizedBuiltinCode.put( "rexpand", ParameterizedBuiltinCode.REXPAND); String2ParameterizedBuiltinCode.put( "transformapply", ParameterizedBuiltinCode.TRANSFORMAPPLY); String2ParameterizedBuiltinCode.put( "transformdecode", ParameterizedBuiltinCode.TRANSFORMDECODE); } static public HashMap<String, ProbabilityDistributionCode> String2DistCode; static { String2DistCode = new HashMap<>(); String2DistCode.put("normal" , ProbabilityDistributionCode.NORMAL); String2DistCode.put("exp" , ProbabilityDistributionCode.EXP); String2DistCode.put("chisq" , ProbabilityDistributionCode.CHISQ); String2DistCode.put("f" , ProbabilityDistributionCode.F); String2DistCode.put("t" , ProbabilityDistributionCode.T); } // We should create one object for every builtin function that we support private static ParameterizedBuiltin normalObj = null, expObj = null, chisqObj = null, fObj = null, tObj = null; private static ParameterizedBuiltin inormalObj = null, iexpObj = null, ichisqObj = null, ifObj = null, itObj = null; private ParameterizedBuiltin(ParameterizedBuiltinCode bf) { bFunc = bf; distFunc = ProbabilityDistributionCode.INVALID; } private ParameterizedBuiltin(ParameterizedBuiltinCode bf, ProbabilityDistributionCode dist) { bFunc = bf; distFunc = dist; } public static ParameterizedBuiltin getParameterizedBuiltinFnObject (String str) throws DMLRuntimeException { return getParameterizedBuiltinFnObject (str, null); } public static ParameterizedBuiltin getParameterizedBuiltinFnObject (String str, String str2) throws DMLRuntimeException { ParameterizedBuiltinCode code = String2ParameterizedBuiltinCode.get(str); switch ( code ) { case CDF: // str2 will point the appropriate distribution ProbabilityDistributionCode dcode = String2DistCode.get(str2.toLowerCase()); switch(dcode) { case NORMAL: if ( normalObj == null ) normalObj = new ParameterizedBuiltin(ParameterizedBuiltinCode.CDF, dcode); return normalObj; case EXP: if ( expObj == null ) expObj = new ParameterizedBuiltin(ParameterizedBuiltinCode.CDF, dcode); return expObj; case CHISQ: if ( chisqObj == null ) chisqObj = new ParameterizedBuiltin(ParameterizedBuiltinCode.CDF, dcode); return chisqObj; case F: if ( fObj == null ) fObj = new ParameterizedBuiltin(ParameterizedBuiltinCode.CDF, dcode); return fObj; case T: if ( tObj == null ) tObj = new ParameterizedBuiltin(ParameterizedBuiltinCode.CDF, dcode); return tObj; default: throw new DMLRuntimeException("Invalid distribution code: " + dcode); } case INVCDF: // str2 will point the appropriate distribution ProbabilityDistributionCode distcode = String2DistCode.get(str2.toLowerCase()); switch(distcode) { case NORMAL: if ( inormalObj == null ) inormalObj = new ParameterizedBuiltin(ParameterizedBuiltinCode.INVCDF, distcode); return inormalObj; case EXP: if ( iexpObj == null ) iexpObj = new ParameterizedBuiltin(ParameterizedBuiltinCode.INVCDF, distcode); return iexpObj; case CHISQ: if ( ichisqObj == null ) ichisqObj = new ParameterizedBuiltin(ParameterizedBuiltinCode.INVCDF, distcode); return ichisqObj; case F: if ( ifObj == null ) ifObj = new ParameterizedBuiltin(ParameterizedBuiltinCode.INVCDF, distcode); return ifObj; case T: if ( itObj == null ) itObj = new ParameterizedBuiltin(ParameterizedBuiltinCode.INVCDF, distcode); return itObj; default: throw new DMLRuntimeException("Invalid distribution code: " + distcode); } case RMEMPTY: return new ParameterizedBuiltin(ParameterizedBuiltinCode.RMEMPTY); case REPLACE: return new ParameterizedBuiltin(ParameterizedBuiltinCode.REPLACE); case REXPAND: return new ParameterizedBuiltin(ParameterizedBuiltinCode.REXPAND); case TRANSFORMAPPLY: return new ParameterizedBuiltin(ParameterizedBuiltinCode.TRANSFORMAPPLY); case TRANSFORMDECODE: return new ParameterizedBuiltin(ParameterizedBuiltinCode.TRANSFORMDECODE); default: throw new DMLRuntimeException("Invalid parameterized builtin code: " + code); } } @Override public double execute(HashMap<String,String> params) throws DMLRuntimeException { switch(bFunc) { case CDF: case INVCDF: switch(distFunc) { case NORMAL: case EXP: case CHISQ: case F: case T: return computeFromDistribution(distFunc, params, (bFunc==ParameterizedBuiltinCode.INVCDF)); default: throw new DMLRuntimeException("Unsupported distribution (" + distFunc + ")."); } default: throw new DMLRuntimeException("ParameterizedBuiltin.execute(): Unknown operation: " + bFunc); } } /** * Helper function to compute distribution-specific cdf (both lowertail and uppertail) and inverse cdf. * * @param dcode probablility distribution code * @param params map of parameters * @param inverse true if inverse * @return cdf or inverse cdf * @throws MathArithmeticException if MathArithmeticException occurs * @throws DMLRuntimeException if DMLRuntimeException occurs */ private static double computeFromDistribution (ProbabilityDistributionCode dcode, HashMap<String,String> params, boolean inverse ) throws MathArithmeticException, DMLRuntimeException { // given value is "quantile" when inverse=false, and it is "probability" when inverse=true double val = Double.parseDouble(params.get("target")); boolean lowertail = true; if(params.get("lower.tail") != null) { lowertail = Boolean.parseBoolean(params.get("lower.tail")); } AbstractRealDistribution distFunction = null; switch(dcode) { case NORMAL: double mean = 0.0, sd = 1.0; // default values for mean and sd String mean_s = params.get("mean"), sd_s = params.get("sd"); if(mean_s != null) mean = Double.parseDouble(mean_s); if(sd_s != null) sd = Double.parseDouble(sd_s); if ( sd <= 0 ) throw new DMLRuntimeException("Standard deviation for Normal distribution must be positive (" + sd + ")"); distFunction = new NormalDistribution(mean, sd); break; case EXP: double exp_rate = 1.0; // default value for 1/mean or rate if(params.get("rate") != null) exp_rate = Double.parseDouble(params.get("rate")); if ( exp_rate <= 0 ) { throw new DMLRuntimeException("Rate for Exponential distribution must be positive (" + exp_rate + ")"); } // For exponential distribution: mean = 1/rate distFunction = new ExponentialDistribution(1.0/exp_rate); break; case CHISQ: if ( params.get("df") == null ) { throw new DMLRuntimeException("" + "Degrees of freedom must be specified for chi-squared distribution " + "(e.g., q=qchisq(0.5, df=20); p=pchisq(target=q, df=1.2))"); } int df = UtilFunctions.parseToInt(params.get("df")); if ( df <= 0 ) { throw new DMLRuntimeException("Degrees of Freedom for chi-squared distribution must be positive (" + df + ")"); } distFunction = new ChiSquaredDistribution(df); break; case F: if ( params.get("df1") == null || params.get("df2") == null ) { throw new DMLRuntimeException("" + "Degrees of freedom must be specified for F distribution " + "(e.g., q = qf(target=0.5, df1=20, df2=30); p=pf(target=q, df1=20, df2=30))"); } int df1 = UtilFunctions.parseToInt(params.get("df1")); int df2 = UtilFunctions.parseToInt(params.get("df2")); if ( df1 <= 0 || df2 <= 0) { throw new DMLRuntimeException("Degrees of Freedom for F distribution must be positive (" + df1 + "," + df2 + ")"); } distFunction = new FDistribution(df1, df2); break; case T: if ( params.get("df") == null ) { throw new DMLRuntimeException("" + "Degrees of freedom is needed to compute probabilities from t distribution " + "(e.g., q = qt(target=0.5, df=10); p = pt(target=q, df=10))"); } int t_df = UtilFunctions.parseToInt(params.get("df")); if ( t_df <= 0 ) { throw new DMLRuntimeException("Degrees of Freedom for t distribution must be positive (" + t_df + ")"); } distFunction = new TDistribution(t_df); break; default: throw new DMLRuntimeException("Invalid distribution code: " + dcode); } double ret = Double.NaN; if(inverse) { // inverse cdf ret = distFunction.inverseCumulativeProbability(val); } else if(lowertail) { // cdf (lowertail) ret = distFunction.cumulativeProbability(val); } else { // cdf (upper tail) // TODO: more accurate distribution-specific computation of upper tail probabilities ret = 1.0 - distFunction.cumulativeProbability(val); } return ret; } }
/* Derby - Class org.apache.derbyTesting.functionTests.tests.lang.RoutineTest Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License */ package org.apache.derbyTesting.functionTests.tests.lang; import java.io.UnsupportedEncodingException; import java.sql.CallableStatement; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Time; import java.sql.Types; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.derbyTesting.junit.BaseJDBCTestCase; import org.apache.derbyTesting.junit.CleanDatabaseTestSetup; import org.apache.derbyTesting.junit.JDBC; /** * Set of tests for SQL routines. * This tests mainly the SQL definition of routines * and the server-side behaviour of routines. * Calling of procedures is tested in ProcedureTest. * */ public class RoutineTest extends BaseJDBCTestCase { private static final String CANNOT_STUFF_NULL_INTO_PRIMITIVE = "39004"; public RoutineTest(String name) { super(name); } public static Test suite() { TestSuite suite = new TestSuite(RoutineTest.class, "RoutineTest"); return new CleanDatabaseTestSetup(suite); } /** * Test that function result data types are resolved correctly for numeric * types that Derby supports that are simply mappable or object mappable. */ public void testFunctionResultDataTypeValidation() throws SQLException { Statement s = createStatement(); // SMALLINT -> short s.executeUpdate( "CREATE FUNCTION SMALLINT_P_SHORT(VARCHAR(10)) RETURNS SMALLINT " + "EXTERNAL NAME 'java.lang.Short.parseShort' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); PreparedStatement ps = prepareStatement("VALUES SMALLINT_P_SHORT(?)"); ps.setString(1, "123"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123"); // SMALLINT -> Integer s.executeUpdate( "CREATE FUNCTION SMALLINT_O_INTEGER(VARCHAR(10)) RETURNS SMALLINT " + "EXTERNAL NAME 'java.lang.Integer.valueOf' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); ps = prepareStatement("VALUES SMALLINT_O_INTEGER(?)"); ps.setString(1, "123"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123"); // INTEGER -> int s.executeUpdate( "CREATE FUNCTION INTEGER_P_INT(VARCHAR(10)) RETURNS INTEGER " + "EXTERNAL NAME 'java.lang.Integer.parseInt' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); ps = prepareStatement("VALUES INTEGER_P_INT(?)"); ps.setString(1, "123"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123"); // INTEGER -> Integer s.executeUpdate( "CREATE FUNCTION INTEGER_O_INTEGER(VARCHAR(10)) RETURNS INTEGER " + "EXTERNAL NAME 'java.lang.Integer.valueOf' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); ps = prepareStatement("VALUES INTEGER_O_INTEGER(?)"); ps.setString(1, "123"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123"); // BIGINT -> long s.executeUpdate( "CREATE FUNCTION BIGINT_P_LONG(VARCHAR(10)) RETURNS BIGINT " + "EXTERNAL NAME 'java.lang.Long.parseLong' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); ps = prepareStatement("VALUES BIGINT_P_LONG(?)"); ps.setString(1, "123"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123"); // BIGINT -> Long s.executeUpdate( "CREATE FUNCTION BIGINT_O_LONG(VARCHAR(10)) RETURNS BIGINT " + "EXTERNAL NAME 'java.lang.Long.valueOf' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); ps = prepareStatement("VALUES BIGINT_O_LONG(?)"); ps.setString(1, "123"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123"); // REAL -> float s.executeUpdate( "CREATE FUNCTION REAL_P_FLOAT(VARCHAR(10)) RETURNS REAL " + "EXTERNAL NAME 'java.lang.Float.parseFloat' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); ps = prepareStatement("VALUES REAL_P_FLOAT(?)"); ps.setString(1, "123.0"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123.0"); // REAL -> Float s.executeUpdate( "CREATE FUNCTION REAL_O_FLOAT(VARCHAR(10)) RETURNS REAL " + "EXTERNAL NAME 'java.lang.Float.valueOf' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); ps = prepareStatement("VALUES REAL_O_FLOAT(?)"); ps.setString(1, "123.0"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123.0"); // DOUBLE -> double s.executeUpdate( "CREATE FUNCTION DOUBLE_P_DOUBLE(VARCHAR(10)) RETURNS DOUBLE " + "EXTERNAL NAME 'java.lang.Double.parseDouble' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); ps = prepareStatement("VALUES DOUBLE_P_DOUBLE(?)"); ps.setString(1, "123.0"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123.0"); // DOBULE -> Double s.executeUpdate( "CREATE FUNCTION DOUBLE_O_DOUBLE(VARCHAR(10)) RETURNS DOUBLE " + "EXTERNAL NAME 'java.lang.Double.valueOf' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); ps = prepareStatement("VALUES DOUBLE_O_DOUBLE(?)"); ps.setString(1, "123.0"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123.0"); ps.close(); s.close(); } /** * Test that RETURNS NULL ON NULL INPUT works properly with * numeric datatypes for null and non-null values. */ public void testFunctionReturnsNullOnNullInput() throws SQLException { Statement s = createStatement(); // SMALLINT -> short s.executeUpdate( "CREATE FUNCTION SMALLINT_P_SHORT_RN(VARCHAR(10)) RETURNS SMALLINT " + "EXTERNAL NAME 'java.lang.Short.parseShort' " + "LANGUAGE JAVA PARAMETER STYLE JAVA " + "RETURNS NULL ON NULL INPUT"); PreparedStatement ps = prepareStatement("VALUES SMALLINT_P_SHORT_RN(?)"); ps.setString(1, "123"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123"); ps.setString(1,null); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); // SMALLINT -> Integer s.executeUpdate( "CREATE FUNCTION SMALLINT_O_INTEGER_RN(VARCHAR(10)) RETURNS SMALLINT " + "EXTERNAL NAME 'java.lang.Integer.valueOf' " + "LANGUAGE JAVA PARAMETER STYLE JAVA " + "RETURNS NULL ON NULL INPUT"); ps = prepareStatement("VALUES SMALLINT_O_INTEGER_RN(?)"); ps.setString(1, "123"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123"); ps.setString(1, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); // INTEGER -> int s.executeUpdate( "CREATE FUNCTION INTEGER_P_INT_RN(VARCHAR(10)) RETURNS INTEGER " + "EXTERNAL NAME 'java.lang.Integer.parseInt' " + "LANGUAGE JAVA PARAMETER STYLE JAVA " + "RETURNS NULL ON NULL INPUT"); ps = prepareStatement("VALUES INTEGER_P_INT_RN(?)"); ps.setString(1, "123"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123"); // INTEGER -> Integer s.executeUpdate( "CREATE FUNCTION INTEGER_O_INTEGER_RN(VARCHAR(10)) RETURNS INTEGER " + "EXTERNAL NAME 'java.lang.Integer.valueOf' " + "LANGUAGE JAVA PARAMETER STYLE JAVA " + "RETURNS NULL ON NULL INPUT"); ps = prepareStatement("VALUES INTEGER_O_INTEGER_RN(?)"); ps.setString(1, "123"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123"); ps.setString(1, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); // BIGINT -> long s.executeUpdate( "CREATE FUNCTION BIGINT_P_LONG_RN(VARCHAR(10)) RETURNS BIGINT " + "EXTERNAL NAME 'java.lang.Long.parseLong' " + "LANGUAGE JAVA PARAMETER STYLE JAVA " + "RETURNS NULL ON NULL INPUT"); ps = prepareStatement("VALUES BIGINT_P_LONG_RN(?)"); ps.setString(1, "123"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123"); ps.setString(1, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); // BIGINT -> Long s.executeUpdate( "CREATE FUNCTION BIGINT_O_LONG_NR(VARCHAR(10)) RETURNS BIGINT " + "EXTERNAL NAME 'java.lang.Long.valueOf' " + "LANGUAGE JAVA PARAMETER STYLE JAVA " + "RETURNS NULL ON NULL INPUT"); ps = prepareStatement("VALUES BIGINT_O_LONG_NR(?)"); ps.setString(1, "123"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123"); ps.setString(1, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); // REAL -> float s.executeUpdate( "CREATE FUNCTION REAL_P_FLOAT_NR(VARCHAR(10)) RETURNS REAL " + "EXTERNAL NAME 'java.lang.Float.parseFloat' " + "LANGUAGE JAVA PARAMETER STYLE JAVA " + "RETURNS NULL ON NULL INPUT"); ps = prepareStatement("VALUES REAL_P_FLOAT_NR(?)"); ps.setString(1, "123.0"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123.0"); ps.setString(1, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); // REAL -> Float s.executeUpdate( "CREATE FUNCTION REAL_O_FLOAT_NR(VARCHAR(10)) RETURNS REAL " + "EXTERNAL NAME 'java.lang.Float.valueOf' " + "LANGUAGE JAVA PARAMETER STYLE JAVA " + "RETURNS NULL ON NULL INPUT"); ps = prepareStatement("VALUES REAL_O_FLOAT_NR(?)"); ps.setString(1, "123.0"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123.0"); ps.setString(1, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); // DOUBLE -> double s.executeUpdate( "CREATE FUNCTION DOUBLE_P_DOUBLE_NR(VARCHAR(10)) RETURNS DOUBLE " + "EXTERNAL NAME 'java.lang.Double.parseDouble' " + "LANGUAGE JAVA PARAMETER STYLE JAVA " + "RETURNS NULL ON NULL INPUT"); ps = prepareStatement("VALUES DOUBLE_P_DOUBLE_NR(?)"); ps.setString(1, "123.0"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123.0"); ps.setString(1, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); // DOBULE -> Double s.executeUpdate( "CREATE FUNCTION DOUBLE_O_DOUBLE_NR(VARCHAR(10)) RETURNS DOUBLE " + "EXTERNAL NAME 'java.lang.Double.valueOf' " + "LANGUAGE JAVA PARAMETER STYLE JAVA " + "RETURNS NULL ON NULL INPUT"); ps = prepareStatement("VALUES DOUBLE_O_DOUBLE_NR(?)"); ps.setString(1, "123.0"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "123.0"); ps.setString(1, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); ps.close(); s.close(); } /** * Test that functions handle being called or not called * when it is passed a NULL argument correctly. * A function can be declared: * RETURNS NULL ON NULL INPUT - any argument being NULL means the * function is returns NULL without being called. * CALLED ON NULL INPUT (default) - function is always called regardless * of any arguement being NULL. */ public void testFunctionNullHandling() throws SQLException, UnsupportedEncodingException { Statement s = createStatement(); // Create three simple functions that take an integer and // return its value as a VARCHAR(). s.executeUpdate( "CREATE FUNCTION SV_NOCALL(INTEGER) RETURNS VARCHAR(10) " + "RETURNS NULL ON NULL INPUT " + "EXTERNAL NAME 'java.lang.String.valueOf' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); s.executeUpdate("CREATE FUNCTION SV_CALL(INTEGER) RETURNS VARCHAR(10) " + "CALLED ON NULL INPUT " + "EXTERNAL NAME 'java.lang.String.valueOf' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); s.executeUpdate("CREATE FUNCTION SV_DEFAULT(INTEGER) RETURNS VARCHAR(10) " + "EXTERNAL NAME 'java.lang.String.valueOf' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); // Simple cases of calling each function individually // Test each function with non-NULL and NULL values. PreparedStatement ps = prepareStatement("VALUES SV_NOCALL(?)"); ps.setInt(1, 42); JDBC.assertSingleValueResultSet(ps.executeQuery(), "42"); ps.setNull(1, Types.INTEGER); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); ps.close(); ps = prepareStatement("VALUES SV_CALL(?)"); ps.setInt(1, 52); JDBC.assertSingleValueResultSet(ps.executeQuery(), "52"); // NULL will attempt to call the function but be blocked // because the Java parameter is a primitive. Since // the call attempt it made it is enough to show the // correct behaviour. ps.setNull(1, Types.INTEGER); assertStatementError("39004", ps); ps.close(); // Default behaviour maps to CALLED ON NULL INPUT ps = prepareStatement("VALUES SV_DEFAULT(?)"); ps.setInt(1, 62); JDBC.assertSingleValueResultSet(ps.executeQuery(), "62"); ps.setNull(1, Types.INTEGER); assertStatementError("39004", ps); ps.close(); // Test that any single argument being null causes NULL to be returned. s.executeUpdate( "CREATE FUNCTION CONCAT_NOCALL(VARCHAR(10), VARCHAR(10)) " + "RETURNS VARCHAR(20) " + "RETURNS NULL ON NULL INPUT " + "EXTERNAL NAME '" + RoutineTest.class.getName() + ".concat' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); s.executeUpdate( "CREATE FUNCTION CONCAT_CALL(VARCHAR(10), VARCHAR(10)) " + "RETURNS VARCHAR(20) " + "CALLED ON NULL INPUT " + "EXTERNAL NAME '" + RoutineTest.class.getName() + ".concat' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); ps = prepareStatement("VALUES CONCAT_NOCALL(?, ?)"); ps.setString(1, "good"); ps.setString(2, "bye"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "goodbye"); ps.setString(1, null); ps.setString(2, "bye"); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); ps.setString(1, "good"); ps.setString(2, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); ps.setString(1, null); ps.setString(2, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); ps.close(); ps = prepareStatement("VALUES CONCAT_CALL(?, ?)"); ps.setString(1, "good"); ps.setString(2, "bye"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "goodbye"); ps.setString(1, null); ps.setString(2, "bye"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "s1NULLbye"); ps.setString(1, "good"); ps.setString(2, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), "goods2NULL"); ps.setString(1, null); ps.setString(2, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), "s1NULLs2NULL"); ps.close(); // Now nested calls ps = prepareStatement( "VALUES CONCAT_NOCALL(CONCAT_NOCALL(?, 'RNNI'), CONCAT_CALL(?, 'CONI'))"); ps.setString(1, "p1"); ps.setString(2, "p2"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "p1RNNIp2CONI"); ps.setString(1, null); ps.setString(2, "p2"); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); ps.setString(1, "p1"); ps.setString(2, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), "p1RNNIs1NULLCONI"); ps.setString(1, null); ps.setString(2, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); ps.close(); ps = prepareStatement( "VALUES CONCAT_CALL(CONCAT_NOCALL(?, 'RNNI'), CONCAT_CALL(?, 'CONI'))"); ps.setString(1, "p1"); ps.setString(2, "p2"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "p1RNNIp2CONI"); ps.setString(1, null); ps.setString(2, "p2"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "s1NULLp2CONI"); ps.setString(1, "p1"); ps.setString(2, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), "p1RNNIs1NULLCONI"); ps.setString(1, null); ps.setString(2, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), "s1NULLs1NULLCONI"); ps.close(); // Nested calls with SQL types that do not need casts // and map to primitive types. This had issues see DERBY-479 s.executeUpdate( "CREATE FUNCTION SAME_NOCALL(INTEGER) " + "RETURNS INTEGER " + "RETURNS NULL ON NULL INPUT " + "EXTERNAL NAME '" + RoutineTest.class.getName() + ".same' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); s.executeUpdate( "CREATE FUNCTION SAME_CALL(INTEGER) " + "RETURNS INTEGER " + "CALLED ON NULL INPUT " + "EXTERNAL NAME '" + RoutineTest.class.getName() + ".same' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); ps = prepareStatement("VALUES SAME_NOCALL(SAME_NOCALL(?))"); ps.setInt(1, 41); JDBC.assertSingleValueResultSet(ps.executeQuery(), "41"); ps.setNull(1, Types.INTEGER); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); ps.close(); ps = prepareStatement("VALUES SAME_NOCALL(SAME_CALL(?))"); ps.setInt(1, 47); JDBC.assertSingleValueResultSet(ps.executeQuery(), "47"); ps.setNull(1, Types.INTEGER); assertStatementError("39004", ps); // Can't pass NULL into primitive type ps.close(); ps = prepareStatement("VALUES SAME_CALL(SAME_NOCALL(?))"); ps.setInt(1, 41); JDBC.assertSingleValueResultSet(ps.executeQuery(), "41"); ps.setNull(1, Types.INTEGER); assertStatementError("39004", ps); // Can't pass NULL into primitive type ps.close(); ps = prepareStatement("VALUES SAME_CALL(SAME_CALL(?))"); ps.setInt(1, 53); JDBC.assertSingleValueResultSet(ps.executeQuery(), "53"); ps.setNull(1, Types.INTEGER); assertStatementError("39004", ps); // Can't pass NULL into primitive type ps.close(); s.executeUpdate( "CREATE FUNCTION NOON_NOCALL(TIME) " + "RETURNS TIME " + "RETURNS NULL ON NULL INPUT " + "EXTERNAL NAME '" + RoutineTest.class.getName() + ".nullAtNoon' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); s.executeUpdate( "CREATE FUNCTION NOON_CALL(TIME) " + "RETURNS TIME " + "CALLED ON NULL INPUT " + "EXTERNAL NAME '" + RoutineTest.class.getName() + ".nullAtNoon' " + "LANGUAGE JAVA PARAMETER STYLE JAVA"); // Function maps: // NULL to 11:00:00 (if null can be passed) // 11:00:00 to 11:30:00 // 12:00:00 to NULL // any other time to itself Time noon = Time.valueOf("12:00:00"); // mapped to null by the function Time tea = Time.valueOf("15:30:00"); ps = prepareStatement("VALUES NOON_NOCALL(?)"); ps.setTime(1, tea); JDBC.assertSingleValueResultSet(ps.executeQuery(), tea.toString()); ps.setTime(1, noon); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); ps.setTime(1, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); ps.close(); ps = prepareStatement("VALUES NOON_CALL(?)"); ps.setTime(1, tea); JDBC.assertSingleValueResultSet(ps.executeQuery(), tea.toString()); ps.setTime(1, noon); JDBC.assertSingleValueResultSet(ps.executeQuery(), null); ps.setTime(1, null); JDBC.assertSingleValueResultSet(ps.executeQuery(), "11:00:00"); ps.close(); // All the nested calls in these cases take take the // value 'tea' will return the same value. ps = prepareStatement("VALUES NOON_NOCALL(NOON_NOCALL(?))"); ps.setTime(1, tea); JDBC.assertSingleValueResultSet(ps.executeQuery(), tea.toString()); ps.setTime(1, noon); // noon->NULL->NULL JDBC.assertSingleValueResultSet(ps.executeQuery(), null); ps.setTime(1, null); // NULL->NULL->NULL JDBC.assertSingleValueResultSet(ps.executeQuery(), null); ps.close(); ps = prepareStatement("VALUES NOON_NOCALL(NOON_CALL(?))"); ps.setTime(1, tea); JDBC.assertSingleValueResultSet(ps.executeQuery(), tea.toString()); // DERBY-1030 RESULT SHOULD BE NULL // noon->NULL by inner function // NULL->NULL by outer due to RETURN NULL ON NULL INPUT ps.setTime(1, noon); // noon->NULL->NULL JDBC.assertSingleValueResultSet(ps.executeQuery(), null); ps.setTime(1, null); // NULL->11:00:00->11:30:00 JDBC.assertSingleValueResultSet(ps.executeQuery(), "11:30:00"); ps.close(); ps = prepareStatement("VALUES NOON_CALL(NOON_NOCALL(?))"); ps.setTime(1, tea); JDBC.assertSingleValueResultSet(ps.executeQuery(), tea.toString()); ps.setTime(1, noon); // noon->NULL->11:00:00 JDBC.assertSingleValueResultSet(ps.executeQuery(), "11:00:00"); ps.setTime(1, null); // NULL->NULL->11:00:00 JDBC.assertSingleValueResultSet(ps.executeQuery(), "11:00:00"); ps.close(); ps = prepareStatement("VALUES NOON_CALL(NOON_CALL(?))"); ps.setTime(1, tea); JDBC.assertSingleValueResultSet(ps.executeQuery(), tea.toString()); ps.setTime(1, noon); // noon->NULL->11:00:00 JDBC.assertSingleValueResultSet(ps.executeQuery(), "11:00:00"); ps.setTime(1, null); // NULL->11:00:00->11:30:00 JDBC.assertSingleValueResultSet(ps.executeQuery(), "11:30:00"); ps.close(); s.close(); } /** * Test function with an aggregate argument. DERBY-3649 * @throws SQLException */ public void testAggregateArgument() throws SQLException { Statement s = createStatement(); s.executeUpdate("CREATE TABLE TEST (I INT)"); s.executeUpdate("INSERT INTO TEST VALUES(1)"); s.executeUpdate("INSERT INTO TEST VALUES(2)"); s.executeUpdate("CREATE FUNCTION CheckCount(count integer) RETURNS INTEGER PARAMETER STYLE JAVA NO SQL LANGUAGE JAVA EXTERNAL NAME 'org.apache.derbyTesting.functionTests.tests.lang.RoutineTest.checkCount'"); ResultSet rs = s.executeQuery("select checkCount(count(*)) from test"); JDBC.assertSingleValueResultSet(rs, "2"); } /** * Test that we don't get verification errors trying to cram nulls * into primitive args. See DERBY-4459. */ public void test_4459() throws Exception { Statement s = createStatement(); s.executeUpdate ( "create function getNullInt() returns int language java parameter style java\n" + "external name '" + RoutineTest.class.getName() + ".getNullInt'" ); s.executeUpdate ( "create function negateInt( a int ) returns int language java parameter style java\n" + "external name '" + RoutineTest.class.getName() + ".negateInt'" ); assertStatementError( CANNOT_STUFF_NULL_INTO_PRIMITIVE, s, "values( negateInt( cast( null as int) ) )" ); assertStatementError( CANNOT_STUFF_NULL_INTO_PRIMITIVE, s, "values( negateInt( getNullInt() ) )" ); } /** * DERBY-5749: Too long (non-blank) argument for VARCHAR parameter does not * throw as expected. */ public void test_5749() throws SQLException { Statement s = createStatement(); s.executeUpdate("create table t5749(v varchar(5))"); s.executeUpdate( "create procedure p5749 (a varchar(5)) modifies sql data " + "external name '" + RoutineTest.class.getName() + ".p5749' " + "language java parameter style java"); CallableStatement cs = prepareCall("call p5749(?)"); cs.setString(1, "123456"); // This silently truncates before fix of DERBY-5749 try { cs.execute(); fail(); } catch (SQLException e) { assertSQLState("22001", e); } // This silently truncates also try { s.executeUpdate("call p5749('123456')"); fail(); } catch (SQLException e) { assertSQLState("22001", e); } PreparedStatement ps = prepareStatement("insert into t5749 values(?)"); ps.setString(1, "123456"); // This does not truncate try { ps.execute(); fail(); } catch (SQLException e) { assertSQLState("22001", e); } } /** * DERBY-6511: Make sure that conversions between primitive and wrapper * types work properly. */ public void test_6511() throws Exception { Connection conn = getConnection(); vet_6511( conn, "boolean", "booleanpToBoolean", "booleanToBooleanp", "true" ); vet_6511( conn, "int", "intToInteger", "integerToInt", "1" ); vet_6511( conn, "bigint", "longpToLong", "longToLongp", "1" ); vet_6511( conn, "smallint", "shortpToInteger", "integerToShortp", "1" ); vet_6511( conn, "double", "doublepToDouble", "doubleToDoublep", "1.0" ); vet_6511( conn, "real", "floatpToFloat", "floatToFloatp", "1.0" ); } private void vet_6511 ( Connection conn, String sqlDatatype, String primitiveToWrapperName, String wrapperToPrimitiveName, String dataValue ) throws Exception { createFunction_6511( conn, sqlDatatype, primitiveToWrapperName ); createFunction_6511( conn, sqlDatatype, wrapperToPrimitiveName ); vetChaining_6511( conn, primitiveToWrapperName, primitiveToWrapperName, dataValue ); vetChaining_6511( conn, primitiveToWrapperName, wrapperToPrimitiveName, dataValue ); vetChaining_6511( conn, wrapperToPrimitiveName, primitiveToWrapperName, dataValue ); vetChaining_6511( conn, wrapperToPrimitiveName, wrapperToPrimitiveName, dataValue ); dropFunction_6511( conn, primitiveToWrapperName ); dropFunction_6511( conn, wrapperToPrimitiveName ); } private void createFunction_6511 ( Connection conn, String sqlDatatype, String functionName ) throws Exception { goodStatement ( conn, "create function " + functionName + "( val " + sqlDatatype + " ) returns " + sqlDatatype + "\n" + "language java parameter style java deterministic no sql\n" + "external name '" + getClass().getName() + "." + functionName + "'" ); } private void dropFunction_6511 ( Connection conn, String functionName ) throws Exception { goodStatement( conn, "drop function " + functionName ); } private void vetChaining_6511 ( Connection conn, String innerFunctionName, String outerFunctionName, String dataValue ) throws Exception { assertResults ( conn, "values " + outerFunctionName + "( " + innerFunctionName + "( " + dataValue + " ) )", new String[][] { { dataValue }, }, false ); } /* ** Routine implementations called from the tests but do * not use DriverManager so that this test can be used on * J2ME/CDC/Foundation with JSR169. */ public static String concat(String s1, String s2) { if (s1 == null) s1 = "s1NULL"; if (s2 == null) s2 = "s2NULL"; return s1.concat(s2); } public static int same(int i) { return i; } public static Time nullAtNoon(Time t) { if (t == null) return Time.valueOf("11:00:00"); String s = t.toString(); if ("11:00:00".equals(s)) return Time.valueOf("11:30:00"); if ("12:00:00".equals(s)) return null; return t; } public static int checkCount(int count) throws SQLException { // throws ZeroException { if (count == 0) { //throw new ZeroException(); throw new SQLException("No results found", "38777"); } return count; } public static int negateInt( int arg ) { return -arg; } public static Integer getNullInt() { return null; } public static void p5749 (String s) { } // functions for converting between primitive and wrapper types public static Boolean booleanpToBoolean( boolean val ) { return new Boolean( val ); } public static boolean booleanToBooleanp( Boolean val ) throws Exception { if ( val == null ) { throw new Exception( "This method does not allow nulls!" ); } else { return val.booleanValue(); } } public static Integer intToInteger( int val ) { return new Integer( val ); } public static int integerToInt( Integer val ) throws Exception { if ( val == null ) { throw new Exception( "This method does not allow nulls!" ); } else { return val.intValue(); } } public static Long longpToLong( long val ) { return new Long( val ); } public static long longToLongp( Long val ) throws Exception { if ( val == null ) { throw new Exception( "This method does not allow nulls!" ); } else { return val.longValue(); } } public static Integer shortpToInteger( short val ) { return new Integer( val ); } public static short integerToShortp( Integer val ) throws Exception { if ( val == null ) { throw new Exception( "This method does not allow nulls!" ); } else { return val.shortValue(); } } public static Float floatpToFloat( float val ) { return new Float( val ); } public static float floatToFloatp( Float val ) throws Exception { if ( val == null ) { throw new Exception( "This method does not allow nulls!" ); } else { return val.floatValue(); } } public static Double doublepToDouble( double val ) { return new Double( val ); } public static double doubleToDoublep( Double val ) throws Exception { if ( val == null ) { throw new Exception( "This method does not allow nulls!" ); } else { return val.doubleValue(); } } }
/* * Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.concurrent.atomicreference; import com.hazelcast.concurrent.atomicreference.operations.AlterAndGetOperation; import com.hazelcast.concurrent.atomicreference.operations.AlterOperation; import com.hazelcast.concurrent.atomicreference.operations.ApplyOperation; import com.hazelcast.concurrent.atomicreference.operations.CompareAndSetOperation; import com.hazelcast.concurrent.atomicreference.operations.ContainsOperation; import com.hazelcast.concurrent.atomicreference.operations.GetAndAlterOperation; import com.hazelcast.concurrent.atomicreference.operations.GetAndSetOperation; import com.hazelcast.concurrent.atomicreference.operations.GetOperation; import com.hazelcast.concurrent.atomicreference.operations.IsNullOperation; import com.hazelcast.concurrent.atomicreference.operations.SetAndGetOperation; import com.hazelcast.concurrent.atomicreference.operations.SetOperation; import com.hazelcast.core.AsyncAtomicReference; import com.hazelcast.core.IFunction; import com.hazelcast.spi.AbstractDistributedObject; import com.hazelcast.spi.InternalCompletableFuture; import com.hazelcast.spi.NodeEngine; import com.hazelcast.spi.Operation; import static com.hazelcast.util.Preconditions.isNotNull; public class AtomicReferenceProxy<E> extends AbstractDistributedObject<AtomicReferenceService> implements AsyncAtomicReference<E> { private final String name; private final int partitionId; public AtomicReferenceProxy(String name, NodeEngine nodeEngine, AtomicReferenceService service) { super(nodeEngine, service); this.name = name; this.partitionId = nodeEngine.getPartitionService().getPartitionId(getNameAsPartitionAwareData()); } @Override public void alter(IFunction<E, E> function) { alterAsync(function).join(); } @Override public InternalCompletableFuture<Void> alterAsync(IFunction<E, E> function) { isNotNull(function, "function"); Operation operation = new AlterOperation(name, toData(function)) .setPartitionId(partitionId); return invokeOnPartition(operation); } @Override public InternalCompletableFuture<Void> asyncAlter(IFunction<E, E> function) { return alterAsync(function); } @Override public E alterAndGet(IFunction<E, E> function) { return alterAndGetAsync(function).join(); } @Override public InternalCompletableFuture<E> alterAndGetAsync(IFunction<E, E> function) { isNotNull(function, "function"); Operation operation = new AlterAndGetOperation(name, toData(function)) .setPartitionId(partitionId); return invokeOnPartition(operation); } @Override public InternalCompletableFuture<E> asyncAlterAndGet(IFunction<E, E> function) { return alterAndGetAsync(function); } @Override public E getAndAlter(IFunction<E, E> function) { return getAndAlterAsync(function).join(); } @Override public InternalCompletableFuture<E> getAndAlterAsync(IFunction<E, E> function) { isNotNull(function, "function"); Operation operation = new GetAndAlterOperation(name, toData(function)) .setPartitionId(partitionId); return invokeOnPartition(operation); } @Override public InternalCompletableFuture<E> asyncGetAndAlter(IFunction<E, E> function) { return getAndAlterAsync(function); } @Override public <R> R apply(IFunction<E, R> function) { return applyAsync(function).join(); } @Override public <R> InternalCompletableFuture<R> applyAsync(IFunction<E, R> function) { isNotNull(function, "function"); Operation operation = new ApplyOperation(name, toData(function)) .setPartitionId(partitionId); return invokeOnPartition(operation); } @Override public <R> InternalCompletableFuture<R> asyncApply(IFunction<E, R> function) { return applyAsync(function); } @Override public void clear() { clearAsync().join(); } @Override public InternalCompletableFuture<Void> clearAsync() { return setAsync(null); } @Override public InternalCompletableFuture<Void> asyncClear() { return clearAsync(); } @Override public boolean compareAndSet(E expect, E update) { return compareAndSetAsync(expect, update).join(); } @Override public InternalCompletableFuture<Boolean> compareAndSetAsync(E expect, E update) { Operation operation = new CompareAndSetOperation(name, toData(expect), toData(update)) .setPartitionId(partitionId); return invokeOnPartition(operation); } @Override public InternalCompletableFuture<Boolean> asyncCompareAndSet(E expect, E update) { return compareAndSetAsync(expect, update); } @Override public E get() { return getAsync().join(); } @Override public InternalCompletableFuture<E> getAsync() { Operation operation = new GetOperation(name) .setPartitionId(partitionId); return invokeOnPartition(operation); } @Override public InternalCompletableFuture<E> asyncGet() { return getAsync(); } @Override public boolean contains(E expected) { return containsAsync(expected).join(); } @Override public InternalCompletableFuture<Boolean> containsAsync(E expected) { Operation operation = new ContainsOperation(name, toData(expected)) .setPartitionId(partitionId); return invokeOnPartition(operation); } @Override public InternalCompletableFuture<Boolean> asyncContains(E value) { return containsAsync(value); } @Override public void set(E newValue) { setAsync(newValue).join(); } @Override public InternalCompletableFuture<Void> setAsync(E newValue) { Operation operation = new SetOperation(name, toData(newValue)) .setPartitionId(partitionId); return invokeOnPartition(operation); } @Override public InternalCompletableFuture<Void> asyncSet(E newValue) { Operation operation = new SetOperation(name, toData(newValue)) .setPartitionId(partitionId); return invokeOnPartition(operation); } @Override public E getAndSet(E newValue) { return getAndSetAsync(newValue).join(); } @Override public InternalCompletableFuture<E> getAndSetAsync(E newValue) { Operation operation = new GetAndSetOperation(name, toData(newValue)) .setPartitionId(partitionId); return invokeOnPartition(operation); } @Override public InternalCompletableFuture<E> asyncGetAndSet(E newValue) { return getAndSetAsync(newValue); } @Override public E setAndGet(E update) { return asyncSetAndGet(update).join(); } @Override public InternalCompletableFuture<E> asyncSetAndGet(E update) { Operation operation = new SetAndGetOperation(name, toData(update)) .setPartitionId(partitionId); return invokeOnPartition(operation); } @Override public boolean isNull() { return isNullAsync().join(); } @Override public InternalCompletableFuture<Boolean> isNullAsync() { Operation operation = new IsNullOperation(name) .setPartitionId(partitionId); return invokeOnPartition(operation); } @Override public InternalCompletableFuture<Boolean> asyncIsNull() { return isNullAsync(); } @Override public String getName() { return name; } public int getPartitionId() { return partitionId; } @Override public String getServiceName() { return AtomicReferenceService.SERVICE_NAME; } @Override public String toString() { return "IAtomicReference{" + "name='" + name + '\'' + '}'; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.typeMigration.rules.guava; import com.intellij.codeInspection.AnonymousCanBeLambdaInspection; import com.intellij.codeInspection.java18StreamApi.StreamApiConstants; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.codeStyle.SuggestedNameInfo; import com.intellij.psi.codeStyle.VariableKind; import com.intellij.psi.util.InheritanceUtil; import com.intellij.psi.util.PsiTypesUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.refactoring.typeMigration.TypeConversionDescriptor; import com.intellij.refactoring.typeMigration.TypeConversionDescriptorBase; import com.intellij.util.Function; import com.intellij.util.IncorrectOperationException; import com.intellij.util.SmartList; import com.intellij.util.text.UniqueNameGenerator; import com.siyeh.ig.psiutils.ParenthesesUtils; import com.siyeh.ipp.types.ReplaceMethodRefWithLambdaIntention; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; /** * @author Dmitry Batkovich */ public class FluentIterableConversionUtil { private final static Logger LOG = Logger.getInstance(FluentIterableConversionUtil.class); static class CopyIntoDescriptor extends TypeConversionDescriptor { public CopyIntoDescriptor() { super("$it$.copyInto($c$)", "$it$.forEach(($c$)::add)"); } @Override public PsiExpression replace(PsiExpression expression) { //TODO check parenthesis return super.replace(expression); } } @Nullable static TypeConversionDescriptor getToArrayDescriptor(PsiType initialType, PsiExpression expression) { if (!(initialType instanceof PsiClassType)) { return null; } final PsiType[] parameters = ((PsiClassType)initialType).getParameters(); if (parameters.length != 1) { return null; } final PsiElement methodCall = expression.getParent(); if (!(methodCall instanceof PsiMethodCallExpression)) { return null; } final PsiExpression[] expressions = ((PsiMethodCallExpression)methodCall).getArgumentList().getExpressions(); if (expressions.length != 1) { return null; } final PsiExpression classTypeExpression = expressions[0]; final PsiType targetType = classTypeExpression.getType(); if (!(targetType instanceof PsiClassType)) { return null; } final PsiType[] targetParameters = ((PsiClassType)targetType).getParameters(); if (targetParameters.length != 1) { return null; } if (PsiTypesUtil.compareTypes(parameters[0], targetParameters[0], false)) { return new TypeConversionDescriptor("$q$.toArray($type$)", null) { PsiType myType = parameters[0]; @Override public PsiExpression replace(PsiExpression expression) throws IncorrectOperationException { final UniqueNameGenerator nameGenerator = new UniqueNameGenerator(); final JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(expression.getProject()); final String name = codeStyleManager.suggestUniqueVariableName( codeStyleManager.suggestVariableName(VariableKind.LOCAL_VARIABLE, null, null, PsiType.INT).names[0], expression, false); final String chosenName = nameGenerator.generateUniqueName(name); setReplaceByString("$q$.toArray(" + chosenName + " -> " + " new " + myType.getCanonicalText(false) + "[" + chosenName + "])"); return super.replace(expression); } }; } return null; } @Nullable static TypeConversionDescriptor getFilterDescriptor(PsiMethod method) { LOG.assertTrue("filter".equals(method.getName())); final PsiParameter[] parameters = method.getParameterList().getParameters(); if (parameters.length != 1) return null; final PsiParameter parameter = parameters[0]; final PsiType type = parameter.getType(); if (!(type instanceof PsiClassType)) return null; final PsiClass resolvedClass = ((PsiClassType)type).resolve(); if (resolvedClass == null) return null; if (CommonClassNames.JAVA_LANG_CLASS.equals(resolvedClass.getQualifiedName())) { return new GuavaFilterInstanceOfConversionDescriptor(); } else if (GuavaPredicateConversionRule.GUAVA_PREDICATE.equals(resolvedClass.getQualifiedName())) { return new LambdaParametersTypeConversionDescriptor("$it$.filter($p$)", "$it$." + StreamApiConstants.FILTER + "($p$)"); } return null; } static class TransformAndConcatConversionRule extends LambdaParametersTypeConversionDescriptor { public TransformAndConcatConversionRule() { super("$q$.transformAndConcat($params$)", "$q$.flatMap($params$)"); } @Override public PsiExpression replace(PsiExpression expression) { PsiExpression argument = ((PsiMethodCallExpression)expression).getArgumentList().getExpressions()[0]; PsiAnonymousClass anonymousClass; if (argument instanceof PsiNewExpression && (anonymousClass = ((PsiNewExpression)argument).getAnonymousClass()) != null) { if (AnonymousCanBeLambdaInspection.canBeConvertedToLambda(anonymousClass, true)) { argument = AnonymousCanBeLambdaInspection.replacePsiElementWithLambda(argument, true, true); }; } final JavaPsiFacade javaPsiFacade = JavaPsiFacade.getInstance(expression.getProject()); if (argument != null && !(argument instanceof PsiFunctionalExpression)) { argument = (PsiExpression)argument.replace(javaPsiFacade.getElementFactory().createExpressionFromText("(" + argument.getText() + ")::apply", null)); } if (argument instanceof PsiMethodReferenceExpression) { argument = ReplaceMethodRefWithLambdaIntention.convertMethodReferenceToLambda((PsiMethodReferenceExpression)argument); } if (argument instanceof PsiLambdaExpression) { List<Pair<PsiExpression, Boolean>> iterableReturnValues = new SmartList<Pair<PsiExpression, Boolean>>(); final PsiElement body = ((PsiLambdaExpression)argument).getBody(); final PsiClass collection = javaPsiFacade.findClass(CommonClassNames.JAVA_UTIL_COLLECTION, expression.getResolveScope()); if (collection == null) return expression; final PsiClass iterable = javaPsiFacade.findClass(CommonClassNames.JAVA_LANG_ITERABLE, expression.getResolveScope()); if (iterable == null) return expression; if (body instanceof PsiCodeBlock) { for (PsiReturnStatement statement : PsiUtil.findReturnStatements((PsiCodeBlock)body)) { final PsiExpression retValue = statement.getReturnValue(); if (!determineType(retValue, iterableReturnValues, iterable, collection)) { return expression; } } } else if (!(body instanceof PsiExpression) || !determineType((PsiExpression)body, iterableReturnValues, iterable, collection)) { return expression; } for (Pair<PsiExpression, Boolean> returnValueAndIsCollection : iterableReturnValues) { convertToStream(returnValueAndIsCollection.getFirst(), returnValueAndIsCollection.getSecond()); } } else { return expression; } return super.replace(expression); } private static boolean determineType(PsiExpression retValue, List<Pair<PsiExpression, Boolean>> iterableReturnValues, PsiClass iterable, PsiClass collection) { if (retValue == null) return false; final PsiType type = retValue.getType(); if (PsiType.NULL.equals(type)) { return true; } if (type instanceof PsiClassType) { final PsiClass resolvedClass = ((PsiClassType)type).resolve(); if (InheritanceUtil.isInheritorOrSelf(resolvedClass, iterable, true)) { final boolean isCollection = InheritanceUtil.isInheritorOrSelf(resolvedClass, collection, true); iterableReturnValues.add(Pair.create(retValue, isCollection)); return true; } } return false; } private static void convertToStream(@NotNull PsiExpression returnValue, boolean isCollection) { final PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(returnValue.getProject()); PsiExpression newExpression; if (isCollection) { String expressionAsText = "(" + returnValue.getText() + ").stream()"; newExpression = elementFactory.createExpressionFromText(expressionAsText, returnValue); ParenthesesUtils.removeParentheses(newExpression, false); } else { final String methodCall = "(" + returnValue.getText() + ")"; final boolean needParentheses = ParenthesesUtils .areParenthesesNeeded((PsiParenthesizedExpression)elementFactory.createExpressionFromText(methodCall, null), false); String expressionAsText = "java.util.stream.StreamSupport.stream(" + (needParentheses ? methodCall : methodCall.substring(1, methodCall.length() - 1)) + ".spliterator(), false)"; newExpression = elementFactory.createExpressionFromText(expressionAsText, returnValue); } returnValue.replace(newExpression); } } private static class GuavaFilterInstanceOfConversionDescriptor extends TypeConversionDescriptor { public GuavaFilterInstanceOfConversionDescriptor() { super("$it$.filter($p$)", "$it$." + StreamApiConstants.FILTER + "($p$)"); } @Override public PsiExpression replace(PsiExpression expression) { final PsiExpression argument = ((PsiMethodCallExpression)expression).getArgumentList().getExpressions()[0]; final PsiExpression newArgument = JavaPsiFacade.getElementFactory(expression.getProject()).createExpressionFromText("(" + argument.getText() + ")::isInstance", argument); ParenthesesUtils.removeParentheses((PsiExpression)((PsiMethodReferenceExpression)newArgument).getQualifier(), false); argument.replace(newArgument); return super.replace(expression); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs; import java.io.FileNotFoundException; import java.io.IOException; import junit.framework.TestCase; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; /** * <p> * A collection of tests for the contract of the {@link FileSystem}. * This test should be used for general-purpose implementations of * {@link FileSystem}, that is, implementations that provide implementations * of all of the functionality of {@link FileSystem}. * </p> * <p> * To test a given {@link FileSystem} implementation create a subclass of this * test and override {@link #setUp()} to initialize the <code>fs</code> * {@link FileSystem} instance variable. * </p> */ public abstract class FileSystemContractBaseTest extends TestCase { protected FileSystem fs; private byte[] data = new byte[getBlockSize() * 2]; // two blocks of data { for (int i = 0; i < data.length; i++) { data[i] = (byte) (i % 10); } } @Override protected void tearDown() throws Exception { fs.delete(path("/test"), true); } protected int getBlockSize() { return 1024; } protected String getDefaultWorkingDirectory() { return "/user/" + System.getProperty("user.name"); } protected boolean renameSupported() { return true; } public void testWorkingDirectory() throws Exception { Path workDir = path(getDefaultWorkingDirectory()); assertEquals(workDir, fs.getWorkingDirectory()); fs.setWorkingDirectory(path(".")); assertEquals(workDir, fs.getWorkingDirectory()); fs.setWorkingDirectory(path("..")); assertEquals(workDir.getParent(), fs.getWorkingDirectory()); Path relativeDir = path("hadoop"); fs.setWorkingDirectory(relativeDir); assertEquals(relativeDir, fs.getWorkingDirectory()); Path absoluteDir = path("/test/hadoop"); fs.setWorkingDirectory(absoluteDir); assertEquals(absoluteDir, fs.getWorkingDirectory()); } public void testMkdirs() throws Exception { Path testDir = path("/test/hadoop"); assertFalse(fs.exists(testDir)); assertFalse(fs.isFile(testDir)); assertTrue(fs.mkdirs(testDir)); assertTrue(fs.exists(testDir)); assertFalse(fs.isFile(testDir)); assertTrue(fs.mkdirs(testDir)); assertTrue(fs.exists(testDir)); assertFalse(fs.isFile(testDir)); Path parentDir = testDir.getParent(); assertTrue(fs.exists(parentDir)); assertFalse(fs.isFile(parentDir)); Path grandparentDir = parentDir.getParent(); assertTrue(fs.exists(grandparentDir)); assertFalse(fs.isFile(grandparentDir)); } public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception { Path testDir = path("/test/hadoop"); assertFalse(fs.exists(testDir)); assertTrue(fs.mkdirs(testDir)); assertTrue(fs.exists(testDir)); createFile(path("/test/hadoop/file")); Path testSubDir = path("/test/hadoop/file/subdir"); try { fs.mkdirs(testSubDir); fail("Should throw IOException."); } catch (IOException e) { // expected } assertFalse(fs.exists(testSubDir)); Path testDeepSubDir = path("/test/hadoop/file/deep/sub/dir"); try { fs.mkdirs(testDeepSubDir); fail("Should throw IOException."); } catch (IOException e) { // expected } assertFalse(fs.exists(testDeepSubDir)); } public void testGetFileStatusThrowsExceptionForNonExistentFile() throws Exception { try { fs.getFileStatus(path("/test/hadoop/file")); fail("Should throw FileNotFoundException"); } catch (FileNotFoundException e) { // expected } } public void testListStatusReturnsNullForNonExistentFile() throws Exception { assertNull(fs.listStatus(path("/test/hadoop/file"))); } public void testListStatus() throws Exception { Path[] testDirs = { path("/test/hadoop/a"), path("/test/hadoop/b"), path("/test/hadoop/c/1"), }; assertFalse(fs.exists(testDirs[0])); for (Path path : testDirs) { assertTrue(fs.mkdirs(path)); } FileStatus[] paths = fs.listStatus(path("/test")); assertEquals(1, paths.length); assertEquals(path("/test/hadoop"), paths[0].getPath()); paths = fs.listStatus(path("/test/hadoop")); assertEquals(3, paths.length); assertEquals(path("/test/hadoop/a"), paths[0].getPath()); assertEquals(path("/test/hadoop/b"), paths[1].getPath()); assertEquals(path("/test/hadoop/c"), paths[2].getPath()); paths = fs.listStatus(path("/test/hadoop/a")); assertEquals(0, paths.length); } public void testWriteReadAndDeleteEmptyFile() throws Exception { writeReadAndDelete(0); } public void testWriteReadAndDeleteHalfABlock() throws Exception { writeReadAndDelete(getBlockSize() / 2); } public void testWriteReadAndDeleteOneBlock() throws Exception { writeReadAndDelete(getBlockSize()); } public void testWriteReadAndDeleteOneAndAHalfBlocks() throws Exception { writeReadAndDelete(getBlockSize() + (getBlockSize() / 2)); } public void testWriteReadAndDeleteTwoBlocks() throws Exception { writeReadAndDelete(getBlockSize() * 2); } private void writeReadAndDelete(int len) throws IOException { Path path = path("/test/hadoop/file"); fs.mkdirs(path.getParent()); FSDataOutputStream out = fs.create(path, false, fs.getConf().getInt("io.file.buffer.size", 4096), (short) 1, getBlockSize()); out.write(data, 0, len); out.close(); assertTrue("Exists", fs.exists(path)); assertEquals("Length", len, fs.getFileStatus(path).getLen()); FSDataInputStream in = fs.open(path); byte[] buf = new byte[len]; in.readFully(0, buf); in.close(); assertEquals(len, buf.length); for (int i = 0; i < buf.length; i++) { assertEquals("Position " + i, data[i], buf[i]); } assertTrue("Deleted", fs.delete(path, false)); assertFalse("No longer exists", fs.exists(path)); } public void testOverwrite() throws IOException { Path path = path("/test/hadoop/file"); fs.mkdirs(path.getParent()); createFile(path); assertTrue("Exists", fs.exists(path)); assertEquals("Length", data.length, fs.getFileStatus(path).getLen()); try { fs.create(path, false); fail("Should throw IOException."); } catch (IOException e) { // Expected } FSDataOutputStream out = fs.create(path, true); out.write(data, 0, data.length); out.close(); assertTrue("Exists", fs.exists(path)); assertEquals("Length", data.length, fs.getFileStatus(path).getLen()); } public void testWriteInNonExistentDirectory() throws IOException { Path path = path("/test/hadoop/file"); assertFalse("Parent doesn't exist", fs.exists(path.getParent())); createFile(path); assertTrue("Exists", fs.exists(path)); assertEquals("Length", data.length, fs.getFileStatus(path).getLen()); assertTrue("Parent exists", fs.exists(path.getParent())); } public void testDeleteNonExistentFile() throws IOException { Path path = path("/test/hadoop/file"); assertFalse("Doesn't exist", fs.exists(path)); assertFalse("No deletion", fs.delete(path, true)); } public void testDeleteRecursively() throws IOException { Path dir = path("/tmp/test/hadoop"); Path file = path("/tmp/test/hadoop/file"); Path subdir = path("/tmp/test/hadoop/subdir"); createFile(file); assertTrue("Created subdir", fs.mkdirs(subdir)); assertTrue("File exists", fs.exists(file)); assertTrue("Dir exists", fs.exists(dir)); assertTrue("Subdir exists", fs.exists(subdir)); try { fs.delete(dir, false); fail("Should throw IOException."); } catch (IOException e) { // expected } assertTrue("File still exists", fs.exists(file)); assertTrue("Dir still exists", fs.exists(dir)); assertTrue("Subdir still exists", fs.exists(subdir)); assertTrue("Deleted", fs.delete(dir, true)); assertFalse("File doesn't exist", fs.exists(file)); assertFalse("Dir doesn't exist", fs.exists(dir)); assertFalse("Subdir doesn't exist", fs.exists(subdir)); } public void testDeleteEmptyDirectory() throws IOException { Path dir = path("/tmp/test/hadoop"); assertTrue(fs.mkdirs(dir)); assertTrue("Dir exists", fs.exists(dir)); assertTrue("Deleted", fs.delete(dir, false)); assertFalse("Dir doesn't exist", fs.exists(dir)); } public void testRenameNonExistentPath() throws Exception { if (!renameSupported()) return; Path src = path("/test/hadoop/path"); Path dst = path("/test/new/newpath"); rename(src, dst, false, false, false); } public void testRenameFileMoveToNonExistentDirectory() throws Exception { if (!renameSupported()) return; Path src = path("/test/hadoop/file"); createFile(src); Path dst = path("/test/new/newfile"); rename(src, dst, false, true, false); } public void testRenameFileMoveToExistingDirectory() throws Exception { if (!renameSupported()) return; Path src = path("/test/hadoop/file"); createFile(src); Path dst = path("/test/new/newfile"); fs.mkdirs(dst.getParent()); rename(src, dst, true, false, true); } public void testRenameFileAsExistingFile() throws Exception { if (!renameSupported()) return; Path src = path("/test/hadoop/file"); createFile(src); Path dst = path("/test/new/newfile"); createFile(dst); rename(src, dst, false, true, true); } public void testRenameFileAsExistingDirectory() throws Exception { if (!renameSupported()) return; Path src = path("/test/hadoop/file"); createFile(src); Path dst = path("/test/new/newdir"); fs.mkdirs(dst); rename(src, dst, true, false, true); assertTrue("Destination changed", fs.exists(path("/test/new/newdir/file"))); } public void testRenameDirectoryMoveToNonExistentDirectory() throws Exception { if (!renameSupported()) return; Path src = path("/test/hadoop/dir"); fs.mkdirs(src); Path dst = path("/test/new/newdir"); rename(src, dst, false, true, false); } public void testRenameDirectoryMoveToExistingDirectory() throws Exception { if (!renameSupported()) return; Path src = path("/test/hadoop/dir"); fs.mkdirs(src); createFile(path("/test/hadoop/dir/file1")); createFile(path("/test/hadoop/dir/subdir/file2")); Path dst = path("/test/new/newdir"); fs.mkdirs(dst.getParent()); rename(src, dst, true, false, true); assertFalse("Nested file1 exists", fs.exists(path("/test/hadoop/dir/file1"))); assertFalse("Nested file2 exists", fs.exists(path("/test/hadoop/dir/subdir/file2"))); assertTrue("Renamed nested file1 exists", fs.exists(path("/test/new/newdir/file1"))); assertTrue("Renamed nested exists", fs.exists(path("/test/new/newdir/subdir/file2"))); } public void testRenameDirectoryAsExistingFile() throws Exception { if (!renameSupported()) return; Path src = path("/test/hadoop/dir"); fs.mkdirs(src); Path dst = path("/test/new/newfile"); createFile(dst); rename(src, dst, false, true, true); } public void testRenameDirectoryAsExistingDirectory() throws Exception { if (!renameSupported()) return; Path src = path("/test/hadoop/dir"); fs.mkdirs(src); createFile(path("/test/hadoop/dir/file1")); createFile(path("/test/hadoop/dir/subdir/file2")); Path dst = path("/test/new/newdir"); fs.mkdirs(dst); rename(src, dst, true, false, true); assertTrue("Destination changed", fs.exists(path("/test/new/newdir/dir"))); assertFalse("Nested file1 exists", fs.exists(path("/test/hadoop/dir/file1"))); assertFalse("Nested file2 exists", fs.exists(path("/test/hadoop/dir/subdir/file2"))); assertTrue("Renamed nested file1 exists", fs.exists(path("/test/new/newdir/dir/file1"))); assertTrue("Renamed nested exists", fs.exists(path("/test/new/newdir/dir/subdir/file2"))); } public void testInputStreamClosedTwice() throws IOException { //HADOOP-4760 according to Closeable#close() closing already-closed //streams should have no effect. Path src = path("/test/hadoop/file"); createFile(src); FSDataInputStream in = fs.open(src); in.close(); in.close(); } public void testOutputStreamClosedTwice() throws IOException { //HADOOP-4760 according to Closeable#close() closing already-closed //streams should have no effect. Path src = path("/test/hadoop/file"); FSDataOutputStream out = fs.create(src); out.writeChar('H'); //write some data out.close(); out.close(); } protected Path path(String pathString) { return new Path(pathString).makeQualified(fs); } protected void createFile(Path path) throws IOException { FSDataOutputStream out = fs.create(path); out.write(data, 0, data.length); out.close(); } private void rename(Path src, Path dst, boolean renameSucceeded, boolean srcExists, boolean dstExists) throws IOException { assertEquals("Rename result", renameSucceeded, fs.rename(src, dst)); assertEquals("Source exists", srcExists, fs.exists(src)); assertEquals("Destination exists", dstExists, fs.exists(dst)); } }
/** * Copyright 2016 Yahoo Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.yahoo.pulsar.broker.admin; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; import java.net.URL; import java.util.ArrayList; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import org.apache.bookkeeper.test.PortManager; import org.apache.zookeeper.KeeperException.NoNodeException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.Assert; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import com.google.common.collect.BoundType; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Range; import com.google.common.collect.Sets; import com.google.common.hash.Hashing; import com.yahoo.pulsar.broker.PulsarServerException; import com.yahoo.pulsar.broker.PulsarService; import com.yahoo.pulsar.broker.ServiceConfiguration; import com.yahoo.pulsar.broker.auth.MockedPulsarServiceBaseTest; import com.yahoo.pulsar.broker.namespace.NamespaceEphemeralData; import com.yahoo.pulsar.broker.namespace.NamespaceService; import com.yahoo.pulsar.client.admin.PulsarAdmin; import com.yahoo.pulsar.client.admin.PulsarAdminException; import com.yahoo.pulsar.client.admin.PulsarAdminException.ConflictException; import com.yahoo.pulsar.client.admin.PulsarAdminException.NotAuthorizedException; import com.yahoo.pulsar.client.admin.PulsarAdminException.NotFoundException; import com.yahoo.pulsar.client.admin.PulsarAdminException.PreconditionFailedException; import com.yahoo.pulsar.client.admin.internal.PropertiesImpl; import com.yahoo.pulsar.client.api.Authentication; import com.yahoo.pulsar.client.api.ClientConfiguration; import com.yahoo.pulsar.client.api.Consumer; import com.yahoo.pulsar.client.api.ConsumerConfiguration; import com.yahoo.pulsar.client.api.Message; import com.yahoo.pulsar.client.api.Producer; import com.yahoo.pulsar.client.api.ProducerConfiguration; import com.yahoo.pulsar.client.api.ProducerConfiguration.MessageRoutingMode; import com.yahoo.pulsar.client.api.PulsarClient; import com.yahoo.pulsar.client.api.SubscriptionType; import com.yahoo.pulsar.common.naming.DestinationName; import com.yahoo.pulsar.common.naming.NamespaceBundle; import com.yahoo.pulsar.common.naming.NamespaceBundleFactory; import com.yahoo.pulsar.common.naming.NamespaceBundles; import com.yahoo.pulsar.common.naming.NamespaceName; import com.yahoo.pulsar.common.policies.data.AuthAction; import com.yahoo.pulsar.common.policies.data.AutoFailoverPolicyData; import com.yahoo.pulsar.common.policies.data.AutoFailoverPolicyType; import com.yahoo.pulsar.common.policies.data.BacklogQuota; import com.yahoo.pulsar.common.policies.data.BacklogQuota.BacklogQuotaType; import com.yahoo.pulsar.common.policies.data.BacklogQuota.RetentionPolicy; import com.yahoo.pulsar.common.policies.data.BrokerAssignment; import com.yahoo.pulsar.common.policies.data.ClusterData; import com.yahoo.pulsar.common.policies.data.NamespaceIsolationData; import com.yahoo.pulsar.common.policies.data.NamespaceOwnershipStatus; import com.yahoo.pulsar.common.policies.data.PartitionedTopicStats; import com.yahoo.pulsar.common.policies.data.PersistencePolicies; import com.yahoo.pulsar.common.policies.data.PersistentTopicInternalStats; import com.yahoo.pulsar.common.policies.data.PersistentTopicStats; import com.yahoo.pulsar.common.policies.data.Policies; import com.yahoo.pulsar.common.policies.data.PropertyAdmin; import com.yahoo.pulsar.common.policies.data.RetentionPolicies; import com.yahoo.pulsar.common.util.ObjectMapperFactory; public class AdminApiTest extends MockedPulsarServiceBaseTest { private static final Logger LOG = LoggerFactory.getLogger(AdminApiTest.class); private PulsarService otherPulsar; private PulsarAdmin otheradmin; private NamespaceBundleFactory bundleFactory; private final int SECONDARY_BROKER_PORT = PortManager.nextFreePort(); private final int SECONDARY_BROKER_WEBSERVICE_PORT = PortManager.nextFreePort(); @BeforeMethod @Override public void setup() throws Exception { conf.setLoadBalancerEnabled(true); super.internalSetup(); bundleFactory = new NamespaceBundleFactory(pulsar, Hashing.crc32()); // create otherbroker to test redirect on calls that need // namespace ownership ServiceConfiguration otherconfig = new ServiceConfiguration(); otherconfig.setBrokerServicePort(SECONDARY_BROKER_PORT); otherconfig.setWebServicePort(SECONDARY_BROKER_WEBSERVICE_PORT); otherconfig.setLoadBalancerEnabled(false); otherconfig.setBindOnLocalhost(true); otherconfig.setClusterName("test"); otherPulsar = startBroker(otherconfig); otheradmin = new PulsarAdmin(new URL("http://127.0.0.1" + ":" + SECONDARY_BROKER_WEBSERVICE_PORT), (Authentication) null); // Setup namespaces admin.clusters().createCluster("use", new ClusterData("http://127.0.0.1" + ":" + BROKER_WEBSERVICE_PORT)); PropertyAdmin propertyAdmin = new PropertyAdmin(Lists.newArrayList("role1", "role2"), Sets.newHashSet("use")); admin.properties().createProperty("prop-xyz", propertyAdmin); admin.namespaces().createNamespace("prop-xyz/use/ns1"); } @AfterMethod @Override public void cleanup() throws Exception { super.internalCleanup(); otheradmin.close(); otherPulsar.close(); } @DataProvider(name = "numBundles") public static Object[][] numBundles() { return new Object[][] { { 1 }, { 4 } }; } @DataProvider(name = "bundling") public static Object[][] bundling() { return new Object[][] { { 0 }, { 4 } }; } @Test public void clusters() throws PulsarAdminException { admin.clusters().createCluster("usw", new ClusterData("http://broker.messaging.use.example.com" + ":" + BROKER_WEBSERVICE_PORT)); assertEquals(admin.clusters().getClusters(), Lists.newArrayList("use", "usw")); assertEquals(admin.clusters().getCluster("use"), new ClusterData("http://127.0.0.1" + ":" + BROKER_WEBSERVICE_PORT)); admin.clusters().updateCluster("usw", new ClusterData("http://new-broker.messaging.usw.example.com" + ":" + BROKER_WEBSERVICE_PORT)); assertEquals(admin.clusters().getClusters(), Lists.newArrayList("use", "usw")); assertEquals(admin.clusters().getCluster("usw"), new ClusterData("http://new-broker.messaging.usw.example.com" + ":" + BROKER_WEBSERVICE_PORT)); admin.clusters().updateCluster("usw", new ClusterData("http://new-broker.messaging.usw.example.com" + ":" + BROKER_WEBSERVICE_PORT, "https://new-broker.messaging.usw.example.com" + ":" + BROKER_WEBSERVICE_PORT_TLS)); assertEquals(admin.clusters().getClusters(), Lists.newArrayList("use", "usw")); assertEquals(admin.clusters().getCluster("usw"), new ClusterData("http://new-broker.messaging.usw.example.com" + ":" + BROKER_WEBSERVICE_PORT, "https://new-broker.messaging.usw.example.com" + ":" + BROKER_WEBSERVICE_PORT_TLS)); admin.clusters().deleteCluster("usw"); assertEquals(admin.clusters().getClusters(), Lists.newArrayList("use")); admin.namespaces().deleteNamespace("prop-xyz/use/ns1"); admin.clusters().deleteCluster("use"); assertEquals(admin.clusters().getClusters(), Lists.newArrayList()); // Check name validation try { admin.clusters().createCluster("bf!", new ClusterData("http://dummy.messaging.example.com")); fail("should have failed"); } catch (PulsarAdminException e) { assertTrue(e instanceof PreconditionFailedException); } } @Test public void clusterNamespaceIsolationPolicies() throws PulsarAdminException { try { // create String policyName1 = "policy-1"; NamespaceIsolationData nsPolicyData1 = new NamespaceIsolationData(); nsPolicyData1.namespaces = new ArrayList<String>(); nsPolicyData1.namespaces.add("other/use/other.*"); nsPolicyData1.primary = new ArrayList<String>(); nsPolicyData1.primary.add("prod1-broker[4-6].messaging.use.example.com"); nsPolicyData1.secondary = new ArrayList<String>(); nsPolicyData1.secondary.add("prod1-broker.*.messaging.use.example.com"); nsPolicyData1.auto_failover_policy = new AutoFailoverPolicyData(); nsPolicyData1.auto_failover_policy.policy_type = AutoFailoverPolicyType.min_available; nsPolicyData1.auto_failover_policy.parameters = new HashMap<String, String>(); nsPolicyData1.auto_failover_policy.parameters.put("min_limit", "1"); nsPolicyData1.auto_failover_policy.parameters.put("usage_threshold", "100"); admin.clusters().createNamespaceIsolationPolicy("use", policyName1, nsPolicyData1); String policyName2 = "policy-2"; NamespaceIsolationData nsPolicyData2 = new NamespaceIsolationData(); nsPolicyData2.namespaces = new ArrayList<String>(); nsPolicyData2.namespaces.add("other/use/other.*"); nsPolicyData2.primary = new ArrayList<String>(); nsPolicyData2.primary.add("prod1-broker[4-6].messaging.use.example.com"); nsPolicyData2.secondary = new ArrayList<String>(); nsPolicyData2.secondary.add("prod1-broker.*.messaging.use.example.com"); nsPolicyData2.auto_failover_policy = new AutoFailoverPolicyData(); nsPolicyData2.auto_failover_policy.policy_type = AutoFailoverPolicyType.min_available; nsPolicyData2.auto_failover_policy.parameters = new HashMap<String, String>(); nsPolicyData2.auto_failover_policy.parameters.put("min_limit", "1"); nsPolicyData2.auto_failover_policy.parameters.put("usage_threshold", "100"); admin.clusters().createNamespaceIsolationPolicy("use", policyName2, nsPolicyData2); // verify create indirectly with get Map<String, NamespaceIsolationData> policiesMap = admin.clusters().getNamespaceIsolationPolicies("use"); assertEquals(policiesMap.get(policyName1), nsPolicyData1); assertEquals(policiesMap.get(policyName2), nsPolicyData2); // verify update of primary nsPolicyData1.primary.remove(0); nsPolicyData1.primary.add("prod1-broker[1-2].messaging.use.example.com"); admin.clusters().updateNamespaceIsolationPolicy("use", policyName1, nsPolicyData1); // verify primary change policiesMap = admin.clusters().getNamespaceIsolationPolicies("use"); assertEquals(policiesMap.get(policyName1), nsPolicyData1); // verify update of secondary nsPolicyData1.secondary.remove(0); nsPolicyData1.secondary.add("prod1-broker[3-4].messaging.use.example.com"); admin.clusters().updateNamespaceIsolationPolicy("use", policyName1, nsPolicyData1); // verify secondary change policiesMap = admin.clusters().getNamespaceIsolationPolicies("use"); assertEquals(policiesMap.get(policyName1), nsPolicyData1); // verify update of failover policy limit nsPolicyData1.auto_failover_policy.parameters.put("min_limit", "10"); admin.clusters().updateNamespaceIsolationPolicy("use", policyName1, nsPolicyData1); // verify min_limit change policiesMap = admin.clusters().getNamespaceIsolationPolicies("use"); assertEquals(policiesMap.get(policyName1), nsPolicyData1); // verify update of failover usage_threshold limit nsPolicyData1.auto_failover_policy.parameters.put("usage_threshold", "80"); admin.clusters().updateNamespaceIsolationPolicy("use", policyName1, nsPolicyData1); // verify usage_threshold change policiesMap = admin.clusters().getNamespaceIsolationPolicies("use"); assertEquals(policiesMap.get(policyName1), nsPolicyData1); // verify single get NamespaceIsolationData policy1Data = admin.clusters().getNamespaceIsolationPolicy("use", policyName1); assertEquals(policy1Data, nsPolicyData1); // verify creation of more than one policy admin.clusters().createNamespaceIsolationPolicy("use", policyName2, nsPolicyData1); try { admin.clusters().getNamespaceIsolationPolicy("use", "no-such-policy"); fail("should have raised exception"); } catch (PulsarAdminException e) { assertTrue(e instanceof NotFoundException); } // verify delete cluster failed try { admin.clusters().deleteCluster("use"); fail("should have raised exception"); } catch (PulsarAdminException e) { assertTrue(e instanceof PreconditionFailedException); } // verify delete admin.clusters().deleteNamespaceIsolationPolicy("use", policyName1); admin.clusters().deleteNamespaceIsolationPolicy("use", policyName2); try { admin.clusters().getNamespaceIsolationPolicy("use", policyName1); fail("should have raised exception"); } catch (PulsarAdminException e) { assertTrue(e instanceof NotFoundException); } try { admin.clusters().getNamespaceIsolationPolicy("use", policyName2); fail("should have raised exception"); } catch (PulsarAdminException e) { assertTrue(e instanceof NotFoundException); } try { admin.clusters().getNamespaceIsolationPolicies("usc"); fail("should have raised exception"); } catch (PulsarAdminException e) { assertTrue(e instanceof PreconditionFailedException); } try { admin.clusters().getNamespaceIsolationPolicy("usc", "no-such-cluster"); fail("should have raised exception"); } catch (PulsarAdminException e) { assertTrue(e instanceof PreconditionFailedException); } try { admin.clusters().createNamespaceIsolationPolicy("usc", "no-such-cluster", nsPolicyData1); fail("should have raised exception"); } catch (PulsarAdminException e) { assertTrue(e instanceof PreconditionFailedException); } try { admin.clusters().updateNamespaceIsolationPolicy("usc", "no-such-cluster", policy1Data); fail("should have raised exception"); } catch (PulsarAdminException e) { assertTrue(e instanceof PreconditionFailedException); } } catch (PulsarAdminException e) { LOG.warn("TEST FAILED [{}]", e.getMessage()); throw e; } } @Test public void brokers() throws Exception { List<String> list = admin.brokers().getActiveBrokers("use"); Assert.assertNotNull(list); Assert.assertEquals(list.size(), 2); Map<String, NamespaceOwnershipStatus> nsMap = admin.brokers().getOwnedNamespaces("use", list.get(0)); // since sla-monitor ns is not created nsMap.size() == 1 (for HeartBeat Namespace) Assert.assertEquals(1, nsMap.size()); for (String ns : nsMap.keySet()) { NamespaceOwnershipStatus nsStatus = nsMap.get(ns); if (ns.equals(NamespaceService.getHeartbeatNamespace(pulsar.getHost(), pulsar.getConfiguration()) + "/0x00000000_0xffffffff")) { assertEquals(nsStatus.broker_assignment, BrokerAssignment.shared); assertFalse(nsStatus.is_controlled); assertTrue(nsStatus.is_active); } } admin.namespaces().deleteNamespace("prop-xyz/use/ns1"); admin.clusters().deleteCluster("use"); assertEquals(admin.clusters().getClusters(), Lists.newArrayList()); } @Test(enabled = true) public void properties() throws PulsarAdminException { Set<String> allowedClusters = Sets.newHashSet("use"); PropertyAdmin propertyAdmin = new PropertyAdmin(Lists.newArrayList("role1", "role2"), allowedClusters); admin.properties().updateProperty("prop-xyz", propertyAdmin); assertEquals(admin.properties().getProperties(), Lists.newArrayList("prop-xyz")); assertEquals(admin.properties().getPropertyAdmin("prop-xyz"), propertyAdmin); PropertyAdmin newPropertyAdmin = new PropertyAdmin(Lists.newArrayList("role3", "role4"), allowedClusters); admin.properties().updateProperty("prop-xyz", newPropertyAdmin); assertEquals(admin.properties().getPropertyAdmin("prop-xyz"), newPropertyAdmin); admin.namespaces().deleteNamespace("prop-xyz/use/ns1"); admin.properties().deleteProperty("prop-xyz"); assertEquals(admin.properties().getProperties(), Lists.newArrayList()); // Check name validation try { admin.properties().createProperty("prop-xyz&", propertyAdmin); fail("should have failed"); } catch (PulsarAdminException e) { assertTrue(e instanceof PreconditionFailedException); } } @Test(invocationCount = 1) public void namespaces() throws PulsarAdminException, PulsarServerException, Exception { admin.clusters().createCluster("usw", new ClusterData()); PropertyAdmin propertyAdmin = new PropertyAdmin(Lists.newArrayList("role1", "role2"), Sets.newHashSet("use", "usw")); admin.properties().updateProperty("prop-xyz", propertyAdmin); assertEquals(admin.namespaces().getPolicies("prop-xyz/use/ns1").bundles, Policies.defaultBundle()); admin.namespaces().createNamespace("prop-xyz/use/ns2"); admin.namespaces().createNamespace("prop-xyz/use/ns3", 4); assertEquals(admin.namespaces().getPolicies("prop-xyz/use/ns3").bundles.numBundles, 4); assertEquals(admin.namespaces().getPolicies("prop-xyz/use/ns3").bundles.boundaries.size(), 5); admin.namespaces().deleteNamespace("prop-xyz/use/ns3"); try { admin.namespaces().createNamespace("non-existing/usw/ns1"); fail("Should not have passed"); } catch (NotFoundException e) { // Ok } assertEquals(admin.namespaces().getNamespaces("prop-xyz"), Lists.newArrayList("prop-xyz/use/ns1", "prop-xyz/use/ns2")); assertEquals(admin.namespaces().getNamespaces("prop-xyz", "use"), Lists.newArrayList("prop-xyz/use/ns1", "prop-xyz/use/ns2")); try { admin.namespaces().createNamespace("prop-xyz/usc/ns1"); fail("Should not have passed"); } catch (NotAuthorizedException e) { // Ok, got the non authorized exception since usc cluster is not in the allowed clusters list. } admin.namespaces().grantPermissionOnNamespace("prop-xyz/use/ns1", "my-role", EnumSet.allOf(AuthAction.class)); Policies policies = new Policies(); policies.auth_policies.namespace_auth.put("my-role", EnumSet.allOf(AuthAction.class)); assertEquals(admin.namespaces().getPolicies("prop-xyz/use/ns1"), policies); assertEquals(admin.namespaces().getPermissions("prop-xyz/use/ns1"), policies.auth_policies.namespace_auth); assertEquals(admin.namespaces().getDestinations("prop-xyz/use/ns1"), Lists.newArrayList()); admin.namespaces().revokePermissionsOnNamespace("prop-xyz/use/ns1", "my-role"); policies.auth_policies.namespace_auth.remove("my-role"); assertEquals(admin.namespaces().getPolicies("prop-xyz/use/ns1"), policies); assertEquals(admin.namespaces().getPersistence("prop-xyz/use/ns1"), new PersistencePolicies(1, 1, 1, 0.0)); admin.namespaces().setPersistence("prop-xyz/use/ns1", new PersistencePolicies(3, 2, 1, 10.0)); assertEquals(admin.namespaces().getPersistence("prop-xyz/use/ns1"), new PersistencePolicies(3, 2, 1, 10.0)); // Force topic creation and namespace being loaded Producer producer = pulsarClient.createProducer("persistent://prop-xyz/use/ns1/my-topic"); producer.close(); admin.persistentTopics().delete("persistent://prop-xyz/use/ns1/my-topic"); admin.namespaces().unloadNamespaceBundle("prop-xyz/use/ns1", "0x00000000_0xffffffff"); NamespaceName ns = new NamespaceName("prop-xyz/use/ns1"); // Now, w/ bundle policies, we will use default bundle NamespaceBundle defaultBundle = bundleFactory.getFullBundle(ns); int i = 0; for (; i < 10; i++) { try { NamespaceEphemeralData data1 = pulsar.getNamespaceService().getOwnershipCache().getOwner(defaultBundle); LOG.info("Waiting for unload namespace {} to complete. Current service unit isDisabled: {}", defaultBundle, data1.isDisabled()); Thread.sleep(1000); } catch (NoNodeException nne) { break; } } assertTrue(i < 10); admin.namespaces().deleteNamespace("prop-xyz/use/ns1"); assertEquals(admin.namespaces().getNamespaces("prop-xyz", "use"), Lists.newArrayList("prop-xyz/use/ns2")); try { admin.namespaces().unload("prop-xyz/use/ns1"); fail("should have raised exception"); } catch (Exception e) { // OK excepted } // Force topic creation and namespace being loaded producer = pulsarClient.createProducer("persistent://prop-xyz/use/ns2/my-topic"); producer.close(); admin.persistentTopics().delete("persistent://prop-xyz/use/ns2/my-topic"); // both unload and delete should succeed for ns2 on other broker with a redirect // otheradmin.namespaces().unload("prop-xyz/use/ns2"); } @Test(enabled = true) public void persistentTopics() throws Exception { assertEquals(admin.persistentTopics().getList("prop-xyz/use/ns1"), Lists.newArrayList()); // Force to create a destination publishMessagesOnPersistentTopic("persistent://prop-xyz/use/ns1/ds2", 0); assertEquals(admin.persistentTopics().getList("prop-xyz/use/ns1"), Lists.newArrayList("persistent://prop-xyz/use/ns1/ds2")); // create consumer and subscription URL pulsarUrl = new URL("http://127.0.0.1" + ":" + BROKER_WEBSERVICE_PORT); ClientConfiguration clientConf = new ClientConfiguration(); clientConf.setStatsInterval(0, TimeUnit.SECONDS); PulsarClient client = PulsarClient.create(pulsarUrl.toString(), clientConf); ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); Consumer consumer = client.subscribe("persistent://prop-xyz/use/ns1/ds2", "my-sub", conf); assertEquals(admin.persistentTopics().getSubscriptions("persistent://prop-xyz/use/ns1/ds2"), Lists.newArrayList("my-sub")); publishMessagesOnPersistentTopic("persistent://prop-xyz/use/ns1/ds2", 10); PersistentTopicStats topicStats = admin.persistentTopics().getStats("persistent://prop-xyz/use/ns1/ds2"); assertEquals(topicStats.subscriptions.keySet(), Sets.newTreeSet(Lists.newArrayList("my-sub"))); assertEquals(topicStats.subscriptions.get("my-sub").consumers.size(), 1); assertEquals(topicStats.subscriptions.get("my-sub").msgBacklog, 10); assertEquals(topicStats.publishers.size(), 0); PersistentTopicInternalStats internalStats = admin.persistentTopics() .getInternalStats("persistent://prop-xyz/use/ns1/ds2"); assertEquals(internalStats.cursors.keySet(), Sets.newTreeSet(Lists.newArrayList("my-sub"))); List<Message> messages = admin.persistentTopics().peekMessages("persistent://prop-xyz/use/ns1/ds2", "my-sub", 3); assertEquals(messages.size(), 3); for (int i = 0; i < 3; i++) { String expectedMessage = "message-" + i; assertEquals(messages.get(i).getData(), expectedMessage.getBytes()); } messages = admin.persistentTopics().peekMessages("persistent://prop-xyz/use/ns1/ds2", "my-sub", 15); assertEquals(messages.size(), 10); for (int i = 0; i < 10; i++) { String expectedMessage = "message-" + i; assertEquals(messages.get(i).getData(), expectedMessage.getBytes()); } admin.persistentTopics().skipMessages("persistent://prop-xyz/use/ns1/ds2", "my-sub", 5); topicStats = admin.persistentTopics().getStats("persistent://prop-xyz/use/ns1/ds2"); assertEquals(topicStats.subscriptions.get("my-sub").msgBacklog, 5); admin.persistentTopics().skipAllMessages("persistent://prop-xyz/use/ns1/ds2", "my-sub"); topicStats = admin.persistentTopics().getStats("persistent://prop-xyz/use/ns1/ds2"); assertEquals(topicStats.subscriptions.get("my-sub").msgBacklog, 0); consumer.close(); client.close(); admin.persistentTopics().deleteSubscription("persistent://prop-xyz/use/ns1/ds2", "my-sub"); assertEquals(admin.persistentTopics().getSubscriptions("persistent://prop-xyz/use/ns1/ds2"), Lists.newArrayList()); topicStats = admin.persistentTopics().getStats("persistent://prop-xyz/use/ns1/ds2"); assertEquals(topicStats.subscriptions.keySet(), Sets.newTreeSet()); assertEquals(topicStats.publishers.size(), 0); try { admin.persistentTopics().skipAllMessages("persistent://prop-xyz/use/ns1/ds2", "my-sub"); } catch (NotFoundException e) { } admin.persistentTopics().delete("persistent://prop-xyz/use/ns1/ds2"); try { admin.persistentTopics().delete("persistent://prop-xyz/use/ns1/ds1"); fail("Should have received 404"); } catch (NotFoundException e) { } assertEquals(admin.persistentTopics().getList("prop-xyz/use/ns1"), Lists.newArrayList()); } @Test public void partitionedTopics() throws Exception { admin.persistentTopics().createPartitionedTopic("persistent://prop-xyz/use/ns1/ds1", 4); assertEquals( admin.persistentTopics().getPartitionedTopicMetadata("persistent://prop-xyz/use/ns1/ds1").partitions, 4); // check if the virtual topic doesn't get created List<String> destinations = admin.persistentTopics().getList("prop-xyz/use/ns1"); assertEquals(destinations.size(), 0); assertEquals( admin.persistentTopics().getPartitionedTopicMetadata("persistent://prop-xyz/use/ns1/ds2").partitions, 0); // create consumer and subscription URL pulsarUrl = new URL("http://127.0.0.1" + ":" + BROKER_WEBSERVICE_PORT); ClientConfiguration clientConf = new ClientConfiguration(); clientConf.setStatsInterval(0, TimeUnit.SECONDS); PulsarClient client = PulsarClient.create(pulsarUrl.toString(), clientConf); ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); Consumer consumer = client.subscribe("persistent://prop-xyz/use/ns1/ds1", "my-sub", conf); assertEquals(admin.persistentTopics().getSubscriptions("persistent://prop-xyz/use/ns1/ds1"), Lists.newArrayList("my-sub")); Consumer consumer1 = client.subscribe("persistent://prop-xyz/use/ns1/ds1", "my-sub-1", conf); assertEquals(Sets.newHashSet(admin.persistentTopics().getSubscriptions("persistent://prop-xyz/use/ns1/ds1")), Sets.newHashSet("my-sub", "my-sub-1")); consumer1.close(); admin.persistentTopics().deleteSubscription("persistent://prop-xyz/use/ns1/ds1", "my-sub-1"); assertEquals(admin.persistentTopics().getSubscriptions("persistent://prop-xyz/use/ns1/ds1"), Lists.newArrayList("my-sub")); ProducerConfiguration prodConf = new ProducerConfiguration(); prodConf.setMessageRoutingMode(MessageRoutingMode.RoundRobinPartition); Producer producer = client.createProducer("persistent://prop-xyz/use/ns1/ds1", prodConf); for (int i = 0; i < 10; i++) { String message = "message-" + i; producer.send(message.getBytes()); } assertEquals(Sets.newHashSet(admin.persistentTopics().getList("prop-xyz/use/ns1")), Sets.newHashSet( "persistent://prop-xyz/use/ns1/ds1-partition-0", "persistent://prop-xyz/use/ns1/ds1-partition-1", "persistent://prop-xyz/use/ns1/ds1-partition-2", "persistent://prop-xyz/use/ns1/ds1-partition-3")); // test cumulative stats for partitioned topic PartitionedTopicStats topicStats = admin.persistentTopics() .getPartitionedStats("persistent://prop-xyz/use/ns1/ds1", false); assertEquals(topicStats.subscriptions.keySet(), Sets.newTreeSet(Lists.newArrayList("my-sub"))); assertEquals(topicStats.subscriptions.get("my-sub").consumers.size(), 1); assertEquals(topicStats.subscriptions.get("my-sub").msgBacklog, 10); assertEquals(topicStats.publishers.size(), 1); assertEquals(topicStats.partitions, Maps.newHashMap()); // test per partition stats for partitioned topic topicStats = admin.persistentTopics().getPartitionedStats("persistent://prop-xyz/use/ns1/ds1", true); assertEquals(topicStats.metadata.partitions, 4); assertEquals(topicStats.partitions.keySet(), Sets.newHashSet("persistent://prop-xyz/use/ns1/ds1-partition-0", "persistent://prop-xyz/use/ns1/ds1-partition-1", "persistent://prop-xyz/use/ns1/ds1-partition-2", "persistent://prop-xyz/use/ns1/ds1-partition-3")); PersistentTopicStats partitionStats = topicStats.partitions .get("persistent://prop-xyz/use/ns1/ds1-partition-0"); assertEquals(partitionStats.publishers.size(), 1); assertEquals(partitionStats.subscriptions.get("my-sub").consumers.size(), 1); assertEquals(partitionStats.subscriptions.get("my-sub").msgBacklog, 3, 1); try { admin.persistentTopics().skipMessages("persistent://prop-xyz/use/ns1/ds1", "my-sub", 5); fail("skip messages for partitioned topics should fail"); } catch (Exception e) { // ok } admin.persistentTopics().skipAllMessages("persistent://prop-xyz/use/ns1/ds1", "my-sub"); topicStats = admin.persistentTopics().getPartitionedStats("persistent://prop-xyz/use/ns1/ds1", false); assertEquals(topicStats.subscriptions.get("my-sub").msgBacklog, 0); producer.close(); consumer.close(); admin.persistentTopics().deleteSubscription("persistent://prop-xyz/use/ns1/ds1", "my-sub"); assertEquals(admin.persistentTopics().getSubscriptions("persistent://prop-xyz/use/ns1/ds1"), Lists.newArrayList()); try { admin.persistentTopics().createPartitionedTopic("persistent://prop-xyz/use/ns1/ds1", 32); fail("Should have failed as the partitioned topic already exists"); } catch (ConflictException ce) { } producer = client.createProducer("persistent://prop-xyz/use/ns1/ds1"); destinations = admin.persistentTopics().getList("prop-xyz/use/ns1"); assertEquals(destinations.size(), 4); try { admin.persistentTopics().deletePartitionedTopic("persistent://prop-xyz/use/ns1/ds1"); fail("The topic is busy"); } catch (PreconditionFailedException pfe) { // ok } producer.close(); client.close(); admin.persistentTopics().deletePartitionedTopic("persistent://prop-xyz/use/ns1/ds1"); assertEquals( admin.persistentTopics().getPartitionedTopicMetadata("persistent://prop-xyz/use/ns1/ds1").partitions, 0); admin.persistentTopics().createPartitionedTopic("persistent://prop-xyz/use/ns1/ds1", 32); assertEquals( admin.persistentTopics().getPartitionedTopicMetadata("persistent://prop-xyz/use/ns1/ds1").partitions, 32); try { admin.persistentTopics().deletePartitionedTopic("persistent://prop-xyz/use/ns1/ds2"); fail("Should have failed as the partitioned topic was not created"); } catch (NotFoundException nfe) { } admin.persistentTopics().deletePartitionedTopic("persistent://prop-xyz/use/ns1/ds1"); // delete a partitioned topic in a global namespace admin.persistentTopics().createPartitionedTopic("persistent://prop-xyz/global/ns1/ds1", 4); admin.persistentTopics().deletePartitionedTopic("persistent://prop-xyz/global/ns1/ds1"); } @Test(dataProvider = "numBundles") public void testDeleteNamespaceBundle(Integer numBundles) throws Exception { admin.namespaces().deleteNamespace("prop-xyz/use/ns1"); admin.namespaces().createNamespace("prop-xyz/use/ns1-bundles", numBundles); // since we have 2 brokers running, we try to let both of them acquire bundle ownership admin.lookups().lookupDestination("persistent://prop-xyz/use/ns1-bundles/ds1"); admin.lookups().lookupDestination("persistent://prop-xyz/use/ns1-bundles/ds2"); admin.lookups().lookupDestination("persistent://prop-xyz/use/ns1-bundles/ds3"); admin.lookups().lookupDestination("persistent://prop-xyz/use/ns1-bundles/ds4"); assertEquals(admin.namespaces().getDestinations("prop-xyz/use/ns1-bundles"), Lists.newArrayList()); admin.namespaces().deleteNamespace("prop-xyz/use/ns1-bundles"); assertEquals(admin.namespaces().getNamespaces("prop-xyz", "use"), Lists.newArrayList()); } @Test public void testNamespaceSplitBundle() throws Exception { // Force to create a destination final String namespace = "prop-xyz/use/ns1"; final String topicName = (new StringBuilder("persistent://")).append(namespace).append("/ds2").toString(); Producer producer = pulsarClient.createProducer(topicName); producer.send("message".getBytes()); publishMessagesOnPersistentTopic(topicName, 0); assertEquals(admin.persistentTopics().getList(namespace), Lists.newArrayList(topicName)); try { admin.namespaces().splitNamespaceBundle(namespace, "0x00000000_0xffffffff"); } catch (Exception e) { fail("split bundle shouldn't have thrown exception"); } // bundle-factory cache must have updated split bundles NamespaceBundles bundles = bundleFactory.getBundles(new NamespaceName(namespace)); String[] splitRange = { namespace + "/0x00000000_0x7fffffff", namespace + "/0x7fffffff_0xffffffff" }; for (int i = 0; i < bundles.getBundles().size(); i++) { assertEquals(bundles.getBundles().get(i).toString(), splitRange[i]); } producer.close(); } @Test public void testNamespaceUnloadBundle() throws Exception { assertEquals(admin.persistentTopics().getList("prop-xyz/use/ns1"), Lists.newArrayList()); // Force to create a destination publishMessagesOnPersistentTopic("persistent://prop-xyz/use/ns1/ds2", 0); assertEquals(admin.persistentTopics().getList("prop-xyz/use/ns1"), Lists.newArrayList("persistent://prop-xyz/use/ns1/ds2")); // create consumer and subscription ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); Consumer consumer = pulsarClient.subscribe("persistent://prop-xyz/use/ns1/ds2", "my-sub", conf); assertEquals(admin.persistentTopics().getSubscriptions("persistent://prop-xyz/use/ns1/ds2"), Lists.newArrayList("my-sub")); // Create producer Producer producer = pulsarClient.createProducer("persistent://prop-xyz/use/ns1/ds2"); for (int i = 0; i < 10; i++) { String message = "message-" + i; producer.send(message.getBytes()); } consumer.close(); producer.close(); try { admin.namespaces().unloadNamespaceBundle("prop-xyz/use/ns1", "0x00000000_0xffffffff"); } catch (Exception e) { fail("Unload shouldn't have throw exception"); } // check that no one owns the namespace NamespaceBundle bundle = bundleFactory.getBundle(new NamespaceName("prop-xyz/use/ns1"), Range.range(0L, BoundType.CLOSED, 0xffffffffL, BoundType.CLOSED)); assertFalse(pulsar.getNamespaceService().isServiceUnitOwned(bundle)); assertFalse(otherPulsar.getNamespaceService().isServiceUnitOwned(bundle)); pulsarClient.shutdown(); LOG.info("--- RELOAD ---"); // Force reload of namespace and wait for topic to be ready for (int i = 0; i < 30; i++) { try { admin.persistentTopics().getStats("persistent://prop-xyz/use/ns1/ds2"); break; } catch (PulsarAdminException e) { LOG.warn("Failed to get topic stats.. {}", e.getMessage()); Thread.sleep(1000); } } admin.persistentTopics().deleteSubscription("persistent://prop-xyz/use/ns1/ds2", "my-sub"); admin.persistentTopics().delete("persistent://prop-xyz/use/ns1/ds2"); } @Test(dataProvider = "numBundles") public void testNamespaceBundleUnload(Integer numBundles) throws Exception { admin.namespaces().createNamespace("prop-xyz/use/ns1-bundles", numBundles); assertEquals(admin.persistentTopics().getList("prop-xyz/use/ns1-bundles"), Lists.newArrayList()); // Force to create a destination publishMessagesOnPersistentTopic("persistent://prop-xyz/use/ns1-bundles/ds2", 0); assertEquals(admin.persistentTopics().getList("prop-xyz/use/ns1-bundles"), Lists.newArrayList("persistent://prop-xyz/use/ns1-bundles/ds2")); // create consumer and subscription ConsumerConfiguration conf = new ConsumerConfiguration(); Consumer consumer = pulsarClient.subscribe("persistent://prop-xyz/use/ns1-bundles/ds2", "my-sub", conf); assertEquals(admin.persistentTopics().getSubscriptions("persistent://prop-xyz/use/ns1-bundles/ds2"), Lists.newArrayList("my-sub")); // Create producer Producer producer = pulsarClient.createProducer("persistent://prop-xyz/use/ns1-bundles/ds2"); for (int i = 0; i < 10; i++) { String message = "message-" + i; producer.send(message.getBytes()); } NamespaceBundle bundle = (NamespaceBundle) pulsar.getNamespaceService() .getBundle(DestinationName.get("persistent://prop-xyz/use/ns1-bundles/ds2")); consumer.close(); producer.close(); admin.namespaces().unloadNamespaceBundle("prop-xyz/use/ns1-bundles", bundle.getBundleRange()); // check that no one owns the namespace bundle assertFalse(pulsar.getNamespaceService().isServiceUnitOwned(bundle)); assertFalse(otherPulsar.getNamespaceService().isServiceUnitOwned(bundle)); LOG.info("--- RELOAD ---"); // Force reload of namespace and wait for topic to be ready for (int i = 0; i < 30; i++) { try { admin.persistentTopics().getStats("persistent://prop-xyz/use/ns1-bundles/ds2"); break; } catch (PulsarAdminException e) { LOG.warn("Failed to get topic stats.. {}", e.getMessage()); Thread.sleep(1000); } } admin.persistentTopics().deleteSubscription("persistent://prop-xyz/use/ns1-bundles/ds2", "my-sub"); admin.persistentTopics().delete("persistent://prop-xyz/use/ns1-bundles/ds2"); } @Test(dataProvider = "bundling") public void testClearBacklogOnNamespace(Integer numBundles) throws Exception { admin.namespaces().createNamespace("prop-xyz/use/ns1-bundles", numBundles); // create consumer and subscription pulsarClient.subscribe("persistent://prop-xyz/use/ns1-bundles/ds2", "my-sub"); pulsarClient.subscribe("persistent://prop-xyz/use/ns1-bundles/ds2", "my-sub-1"); pulsarClient.subscribe("persistent://prop-xyz/use/ns1-bundles/ds2", "my-sub-2"); pulsarClient.subscribe("persistent://prop-xyz/use/ns1-bundles/ds1", "my-sub"); pulsarClient.subscribe("persistent://prop-xyz/use/ns1-bundles/ds1", "my-sub-1"); // Create producer Producer producer = pulsarClient.createProducer("persistent://prop-xyz/use/ns1-bundles/ds2"); for (int i = 0; i < 10; i++) { String message = "message-" + i; producer.send(message.getBytes()); } producer.close(); // Create producer Producer producer1 = pulsarClient.createProducer("persistent://prop-xyz/use/ns1-bundles/ds1"); for (int i = 0; i < 10; i++) { String message = "message-" + i; producer1.send(message.getBytes()); } producer1.close(); admin.namespaces().clearNamespaceBacklogForSubscription("prop-xyz/use/ns1-bundles", "my-sub"); long backlog = admin.persistentTopics().getStats("persistent://prop-xyz/use/ns1-bundles/ds2").subscriptions .get("my-sub").msgBacklog; assertEquals(backlog, 0); backlog = admin.persistentTopics().getStats("persistent://prop-xyz/use/ns1-bundles/ds1").subscriptions .get("my-sub").msgBacklog; assertEquals(backlog, 0); backlog = admin.persistentTopics().getStats("persistent://prop-xyz/use/ns1-bundles/ds1").subscriptions .get("my-sub-1").msgBacklog; assertEquals(backlog, 10); admin.namespaces().clearNamespaceBacklog("prop-xyz/use/ns1-bundles"); backlog = admin.persistentTopics().getStats("persistent://prop-xyz/use/ns1-bundles/ds1").subscriptions .get("my-sub-1").msgBacklog; assertEquals(backlog, 0); backlog = admin.persistentTopics().getStats("persistent://prop-xyz/use/ns1-bundles/ds2").subscriptions .get("my-sub-1").msgBacklog; assertEquals(backlog, 0); backlog = admin.persistentTopics().getStats("persistent://prop-xyz/use/ns1-bundles/ds2").subscriptions .get("my-sub-2").msgBacklog; assertEquals(backlog, 0); } @Test(dataProvider = "bundling") public void testUnsubscribeOnNamespace(Integer numBundles) throws Exception { admin.namespaces().createNamespace("prop-xyz/use/ns1-bundles", numBundles); // create consumer and subscription Consumer consumer1 = pulsarClient.subscribe("persistent://prop-xyz/use/ns1-bundles/ds2", "my-sub"); Consumer consumer2 = pulsarClient.subscribe("persistent://prop-xyz/use/ns1-bundles/ds2", "my-sub-1"); /* Consumer consumer3 = */ pulsarClient.subscribe("persistent://prop-xyz/use/ns1-bundles/ds2", "my-sub-2"); Consumer consumer4 = pulsarClient.subscribe("persistent://prop-xyz/use/ns1-bundles/ds1", "my-sub"); Consumer consumer5 = pulsarClient.subscribe("persistent://prop-xyz/use/ns1-bundles/ds1", "my-sub-1"); try { admin.namespaces().unsubscribeNamespace("prop-xyz/use/ns1-bundles", "my-sub"); fail("should have failed"); } catch (PulsarAdminException.PreconditionFailedException e) { // ok } consumer1.close(); try { admin.namespaces().unsubscribeNamespace("prop-xyz/use/ns1-bundles", "my-sub"); fail("should have failed"); } catch (PulsarAdminException.PreconditionFailedException e) { // ok } consumer4.close(); admin.namespaces().unsubscribeNamespace("prop-xyz/use/ns1-bundles", "my-sub"); assertEquals(admin.persistentTopics().getSubscriptions("persistent://prop-xyz/use/ns1-bundles/ds2"), Lists.newArrayList("my-sub-1", "my-sub-2")); assertEquals(admin.persistentTopics().getSubscriptions("persistent://prop-xyz/use/ns1-bundles/ds1"), Lists.newArrayList("my-sub-1")); consumer2.close(); consumer5.close(); admin.namespaces().unsubscribeNamespace("prop-xyz/use/ns1-bundles", "my-sub-1"); assertEquals(admin.persistentTopics().getSubscriptions("persistent://prop-xyz/use/ns1-bundles/ds2"), Lists.newArrayList("my-sub-2")); assertEquals(admin.persistentTopics().getSubscriptions("persistent://prop-xyz/use/ns1-bundles/ds1"), Lists.newArrayList()); } long messageTimestamp = System.currentTimeMillis(); long secondTimestamp = System.currentTimeMillis(); private void publishMessagesOnPersistentTopic(String topicName, int messages) throws Exception { publishMessagesOnPersistentTopic(topicName, messages, 0); } private void publishMessagesOnPersistentTopic(String topicName, int messages, int startIdx) throws Exception { Producer producer = pulsarClient.createProducer(topicName); for (int i = startIdx; i < (messages + startIdx); i++) { String message = "message-" + i; producer.send(message.getBytes()); } producer.close(); } @Test public void backlogQuotas() throws Exception { assertEquals(admin.namespaces().getBacklogQuotaMap("prop-xyz/use/ns1"), Maps.newTreeMap()); Map<BacklogQuotaType, BacklogQuota> quotaMap = admin.namespaces().getBacklogQuotaMap("prop-xyz/use/ns1"); assertEquals(quotaMap.size(), 0); assertEquals(quotaMap.get(BacklogQuotaType.destination_storage), null); admin.namespaces().setBacklogQuota("prop-xyz/use/ns1", new BacklogQuota(1 * 1024 * 1024 * 1024, RetentionPolicy.producer_exception)); quotaMap = admin.namespaces().getBacklogQuotaMap("prop-xyz/use/ns1"); assertEquals(quotaMap.size(), 1); assertEquals(quotaMap.get(BacklogQuotaType.destination_storage), new BacklogQuota(1 * 1024 * 1024 * 1024, RetentionPolicy.producer_exception)); admin.namespaces().removeBacklogQuota("prop-xyz/use/ns1"); quotaMap = admin.namespaces().getBacklogQuotaMap("prop-xyz/use/ns1"); assertEquals(quotaMap.size(), 0); assertEquals(quotaMap.get(BacklogQuotaType.destination_storage), null); } @Test public void statsOnNonExistingDestinations() throws Exception { try { admin.persistentTopics().getStats("persistent://prop-xyz/use/ns1/ghostTopic"); fail("The topic doesn't exist"); } catch (NotFoundException e) { // OK } } @Test public void testDeleteFailedReturnCode() throws Exception { String topicName = "persistent://prop-xyz/use/ns1/my-topic"; Producer producer = pulsarClient.createProducer(topicName); try { admin.persistentTopics().delete(topicName); fail("The topic is busy"); } catch (PreconditionFailedException e) { // OK } producer.close(); Consumer consumer = pulsarClient.subscribe(topicName, "sub"); try { admin.persistentTopics().delete(topicName); fail("The topic is busy"); } catch (PreconditionFailedException e) { // OK } try { admin.persistentTopics().deleteSubscription(topicName, "sub"); fail("The topic is busy"); } catch (PreconditionFailedException e) { // Ok } consumer.close(); // Now should succeed admin.persistentTopics().delete(topicName); } private static class IncompatiblePropertyAdmin { public Set<String> allowedClusters; public int someNewIntField; public String someNewString; } @Test public void testJacksonWithTypeDifferencies() throws Exception { String expectedJson = "{\"adminRoles\":[\"role1\",\"role2\"],\"allowedClusters\":[\"usw\",\"use\"]}"; IncompatiblePropertyAdmin r1 = ObjectMapperFactory.getThreadLocal().reader(IncompatiblePropertyAdmin.class) .readValue(expectedJson); assertEquals(r1.allowedClusters, Sets.newHashSet("use", "usw")); assertEquals(r1.someNewIntField, 0); assertEquals(r1.someNewString, null); } @Test public void testBackwardCompatiblity() throws Exception { assertEquals(admin.properties().getProperties(), Lists.newArrayList("prop-xyz")); assertEquals(admin.properties().getPropertyAdmin("prop-xyz").getAdminRoles(), Lists.newArrayList("role1", "role2")); assertEquals(admin.properties().getPropertyAdmin("prop-xyz").getAllowedClusters(), Sets.newHashSet("use")); // Try to deserialize property JSON with IncompatiblePropertyAdmin format // it should succeed ignoring missing fields PropertiesImpl properties = (PropertiesImpl) admin.properties(); IncompatiblePropertyAdmin result = properties.request(properties.getWebTarget().path("prop-xyz")) .get(IncompatiblePropertyAdmin.class); assertEquals(result.allowedClusters, Sets.newHashSet("use")); assertEquals(result.someNewIntField, 0); assertEquals(result.someNewString, null); admin.namespaces().deleteNamespace("prop-xyz/use/ns1"); admin.properties().deleteProperty("prop-xyz"); assertEquals(admin.properties().getProperties(), Lists.newArrayList()); } @Test public void persistentTopicsCursorReset() throws Exception { admin.namespaces().setRetention("prop-xyz/use/ns1", new RetentionPolicies(10, 10)); assertEquals(admin.persistentTopics().getList("prop-xyz/use/ns1"), Lists.newArrayList()); String topicName = "persistent://prop-xyz/use/ns1/cursorreset"; // create consumer and subscription ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); Consumer consumer = pulsarClient.subscribe(topicName, "my-sub", conf); assertEquals(admin.persistentTopics().getSubscriptions(topicName), Lists.newArrayList("my-sub")); publishMessagesOnPersistentTopic(topicName, 5, 0); // Allow at least 1ms for messages to have different timestamps Thread.sleep(1); long messageTimestamp = System.currentTimeMillis(); publishMessagesOnPersistentTopic(topicName, 5, 5); List<Message> messages = admin.persistentTopics().peekMessages(topicName, "my-sub", 10); assertEquals(messages.size(), 10); for (int i = 0; i < 10; i++) { Message message = consumer.receive(); consumer.acknowledge(message); } // messages should still be available due to retention try { admin.persistentTopics().resetCursor(topicName, "my-sub", messageTimestamp); } catch (PulsarAdminException e) { // due to active consumer assertTrue(e instanceof PulsarAdminException.PreconditionFailedException, e.getMessage()); } consumer.close(); // retry after closing consumer admin.persistentTopics().resetCursor(topicName, "my-sub", messageTimestamp); consumer = pulsarClient.subscribe(topicName, "my-sub", conf); int receivedAfterReset = 0; for (int i = 4; i < 10; i++) { Message message = consumer.receive(); consumer.acknowledge(message); ++receivedAfterReset; String expected = "message-" + i; assertEquals(message.getData(), expected.getBytes()); } assertEquals(receivedAfterReset, 6); consumer.close(); admin.persistentTopics().deleteSubscription(topicName, "my-sub"); assertEquals(admin.persistentTopics().getSubscriptions(topicName), Lists.newArrayList()); admin.persistentTopics().delete(topicName); } @Test public void persistentTopicsCursorResetAfterReset() throws Exception { admin.namespaces().setRetention("prop-xyz/use/ns1", new RetentionPolicies(10, 10)); assertEquals(admin.persistentTopics().getList("prop-xyz/use/ns1"), Lists.newArrayList()); String topicName = "persistent://prop-xyz/use/ns1/cursorresetafterreset"; // create consumer and subscription ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); Consumer consumer = pulsarClient.subscribe(topicName, "my-sub", conf); assertEquals(admin.persistentTopics().getSubscriptions(topicName), Lists.newArrayList("my-sub")); publishMessagesOnPersistentTopic(topicName, 5, 0); // Allow at least 1ms for messages to have different timestamps Thread.sleep(1); long firstTimestamp = System.currentTimeMillis(); publishMessagesOnPersistentTopic(topicName, 3, 5); Thread.sleep(1); long secondTimestamp = System.currentTimeMillis(); publishMessagesOnPersistentTopic(topicName, 2, 8); List<Message> messages = admin.persistentTopics().peekMessages(topicName, "my-sub", 10); assertEquals(messages.size(), 10); messages.forEach(message -> { LOG.info("Peeked message: {}", new String(message.getData())); }); for (int i = 0; i < 10; i++) { Message message = consumer.receive(); consumer.acknowledge(message); } // messages should still be available due to retention consumer.close(); admin.persistentTopics().resetCursor(topicName, "my-sub", firstTimestamp); messages = admin.persistentTopics().peekMessages(topicName, "my-sub", 10); assertEquals(messages.size(), 6); messages.forEach(message -> { LOG.info("Peeked message: {}", new String(message.getData())); }); consumer = pulsarClient.subscribe(topicName, "my-sub", conf); int receivedAfterReset = 0; // Should received messages from 4-9 for (int i = 4; i < 10; i++) { Message message = consumer.receive(); consumer.acknowledge(message); ++receivedAfterReset; String expected = "message-" + i; assertEquals(new String(message.getData()), expected); } assertEquals(receivedAfterReset, 6); consumer.close(); // Reset at 2nd timestamp receivedAfterReset = 0; admin.persistentTopics().resetCursor(topicName, "my-sub", secondTimestamp); consumer = pulsarClient.subscribe(topicName, "my-sub", conf); // Should received messages from 7-9 for (int i = 7; i < 10; i++) { Message message = consumer.receive(); consumer.acknowledge(message); ++receivedAfterReset; String expected = "message-" + i; assertEquals(new String(message.getData()), expected); } assertEquals(receivedAfterReset, 3); consumer.close(); admin.persistentTopics().deleteSubscription(topicName, "my-sub"); assertEquals(admin.persistentTopics().getSubscriptions(topicName), Lists.newArrayList()); admin.persistentTopics().delete(topicName); } @Test public void partitionedTopicsCursorReset() throws Exception { admin.namespaces().setRetention("prop-xyz/use/ns1", new RetentionPolicies(10, 10)); String topicName = "persistent://prop-xyz/use/ns1/partitionedcursorreset"; admin.persistentTopics().createPartitionedTopic(topicName, 4); // create consumer and subscription ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); Consumer consumer = pulsarClient.subscribe(topicName, "my-sub", conf); List<String> destinations = admin.persistentTopics().getList("prop-xyz/use/ns1"); assertEquals(destinations.size(), 4); assertEquals(admin.persistentTopics().getSubscriptions(topicName), Lists.newArrayList("my-sub")); publishMessagesOnPersistentTopic(topicName, 5, 0); Thread.sleep(1); long timestamp = System.currentTimeMillis(); publishMessagesOnPersistentTopic(topicName, 5, 5); for (int i = 0; i < 10; i++) { Message message = consumer.receive(); consumer.acknowledge(message); } // messages should still be available due to retention try { admin.persistentTopics().resetCursor(topicName, "my-sub", timestamp); } catch (PulsarAdminException e) { // due to active consumer assertTrue(e instanceof PulsarAdminException.ServerSideErrorException, e.getMessage()); } consumer.close(); // retry after closing consumer admin.persistentTopics().resetCursor(topicName, "my-sub", timestamp); consumer = pulsarClient.subscribe(topicName, "my-sub", conf); Set<String> expectedMessages = Sets.newHashSet(); Set<String> receivedMessages = Sets.newHashSet(); for (int i = 4; i < 10; i++) { Message message = consumer.receive(); consumer.acknowledge(message); expectedMessages.add("message-" + i); receivedMessages.add(new String(message.getData())); } receivedMessages.removeAll(expectedMessages); assertEquals(receivedMessages.size(), 0); consumer.close(); admin.persistentTopics().deleteSubscription(topicName, "my-sub"); admin.persistentTopics().deletePartitionedTopic(topicName); } @Test public void persistentTopicsInvalidCursorReset() throws Exception { admin.namespaces().setRetention("prop-xyz/use/ns1", new RetentionPolicies(10, 10)); assertEquals(admin.persistentTopics().getList("prop-xyz/use/ns1"), Lists.newArrayList()); String topicName = "persistent://prop-xyz/use/ns1/invalidcursorreset"; // Force to create a destination publishMessagesOnPersistentTopic(topicName, 0); assertEquals(admin.persistentTopics().getList("prop-xyz/use/ns1"), Lists.newArrayList(topicName)); // create consumer and subscription URL pulsarUrl = new URL("http://127.0.0.1" + ":" + BROKER_WEBSERVICE_PORT); ClientConfiguration clientConf = new ClientConfiguration(); clientConf.setStatsInterval(0, TimeUnit.SECONDS); PulsarClient client = PulsarClient.create(pulsarUrl.toString(), clientConf); ConsumerConfiguration conf = new ConsumerConfiguration(); conf.setSubscriptionType(SubscriptionType.Exclusive); Consumer consumer = client.subscribe(topicName, "my-sub", conf); assertEquals(admin.persistentTopics().getSubscriptions(topicName), Lists.newArrayList("my-sub")); publishMessagesOnPersistentTopic(topicName, 10); List<Message> messages = admin.persistentTopics().peekMessages(topicName, "my-sub", 10); assertEquals(messages.size(), 10); for (int i = 0; i < 10; i++) { Message message = consumer.receive(); consumer.acknowledge(message); } CompletableFuture<Void> closeFuture = consumer.closeAsync(); closeFuture.get(); // use invalid timestamp try { admin.persistentTopics().resetCursor(topicName, "my-sub", System.currentTimeMillis() - 190000); } catch (Exception e) { // fail the test throw e; } admin.persistentTopics().resetCursor(topicName, "my-sub", System.currentTimeMillis() + 90000); consumer = client.subscribe(topicName, "my-sub", conf); consumer.close(); client.close(); admin.persistentTopics().deleteSubscription(topicName, "my-sub"); assertEquals(admin.persistentTopics().getSubscriptions(topicName), Lists.newArrayList()); admin.persistentTopics().delete(topicName); } @Test public void testObjectWithUnknowProperties() { class CustomPropertyAdmin extends PropertyAdmin { @SuppressWarnings("unused") public int newProperty; } PropertyAdmin pa = new PropertyAdmin(Lists.newArrayList("test_appid1", "test_appid2"), Sets.newHashSet("use")); CustomPropertyAdmin cpa = new CustomPropertyAdmin(); cpa.setAdminRoles(pa.getAdminRoles()); cpa.setAllowedClusters(pa.getAllowedClusters()); cpa.newProperty = 100; try { admin.properties().createProperty("test-property", cpa); } catch (Exception e) { fail("Should not happen."); } } }
/* TopFunctionSubscriber.java A publication of data of type TopFunction This file is derived from code automatically generated by the rtiddsgen command: rtiddsgen -language java -example <arch> .idl Example publication of type TopFunction automatically generated by 'rtiddsgen' To test them follow these steps: (1) Compile this file and the example subscription. (2) Start the subscription on the same domain used for with the command java TopFunctionSubscriber <domain_id> <sample_count> (3) Start the publication with the command java TopFunctionPublisher <domain_id> <sample_count> (4) [Optional] Specify the list of discovery initial peers and multicast receive addresses via an environment variable or a file (in the current working directory) called NDDS_DISCOVERY_PEERS. You can run any number of publishers and subscribers programs, and can add and remove them dynamically from the domain. Example: To run the example application on domain <domain_id>: Ensure that $(NDDSHOME)/lib/<arch> is on the dynamic library path for Java. On UNIX systems: add $(NDDSHOME)/lib/<arch> to the 'LD_LIBRARY_PATH' environment variable On Windows systems: add %NDDSHOME%\lib\<arch> to the 'Path' environment variable Run the Java applications: java -Djava.ext.dirs=$NDDSHOME/class TopFunctionPublisher <domain_id> java -Djava.ext.dirs=$NDDSHOME/class TopFunctionSubscriber <domain_id> modification history ------------ ------- */ import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Arrays; import java.util.ArrayList; import com.rti.dds.domain.*; import com.rti.dds.infrastructure.*; import com.rti.dds.subscription.*; import com.rti.dds.topic.*; import com.rti.ndds.config.*; // =========================================================================== public class TopFunctionSubscriber { // ----------------------------------------------------------------------- // Public Methods // ----------------------------------------------------------------------- public static void main(String[] args) { // --- Get domain ID --- // int domainId = 0; if (args.length >= 1) { domainId = Integer.valueOf(args[0]).intValue(); } // -- Get max loop count; 0 means infinite loop --- // int sampleCount = 0; if (args.length >= 2) { sampleCount = Integer.valueOf(args[1]).intValue(); } float filteredCpuUsage = 0.02f; System.out.println("Filter for CPU Usage >=..."); String input = System.console().readLine(); if (!input.trim().equals("")) filteredCpuUsage = Float.parseFloat(input); float filteredMemUsage = 0.50f; System.out.println("Filter for MEM Usage >=..."); input = System.console().readLine(); if (!input.trim().equals("")) filteredMemUsage = Float.parseFloat(input); /* Uncomment this to turn on additional logging Logger.get_instance().set_verbosity_by_category( LogCategory.NDDS_CONFIG_LOG_CATEGORY_API, LogVerbosity.NDDS_CONFIG_LOG_VERBOSITY_STATUS_ALL); */ // --- Run --- // subscriberMain(domainId, sampleCount, filteredCpuUsage, filteredMemUsage); } // ----------------------------------------------------------------------- // Private Methods // ----------------------------------------------------------------------- // --- Constructors: ----------------------------------------------------- private TopFunctionSubscriber() { super(); } // ----------------------------------------------------------------------- private static void subscriberMain(int domainId, int sampleCount, float filteredCpuUsage, float filteredMemUsage) { DomainParticipant participant = null; Subscriber subscriber = null; Topic topic = null; DataReaderListener listener = null; TopFunctionDataReader reader = null; try { // --- Create participant --- // /* To customize participant QoS, use the configuration file USER_QOS_PROFILES.xml */ DomainParticipantQos participant_qos = new DomainParticipantQos(); DomainParticipantFactory.TheParticipantFactory .get_default_participant_qos(participant_qos); participant_qos.resource_limits.type_code_max_serialized_length = 10000; participant_qos.resource_limits.contentfilter_property_max_length = 32000; participant_qos.resource_limits.topic_data_max_length = 8 * 512; participant = DomainParticipantFactory.TheParticipantFactory. create_participant( domainId, participant_qos, null /* listener */, StatusKind.STATUS_MASK_NONE); if (participant == null) { System.err.println("create_participant error\n"); return; } // --- Create subscriber --- // /* To customize subscriber QoS, use the configuration file USER_QOS_PROFILES.xml */ subscriber = participant.create_subscriber( DomainParticipant.SUBSCRIBER_QOS_DEFAULT, null /* listener */, StatusKind.STATUS_MASK_NONE); if (subscriber == null) { System.err.println("create_subscriber error\n"); return; } // --- Create topic --- // /* Register type before creating topic */ String typeName = TopFunctionTypeSupport.get_type_name(); TopFunctionTypeSupport.register_type(participant, typeName); /* To customize topic QoS, use the configuration file USER_QOS_PROFILES.xml */ topic = participant.create_topic( "CS464/564 Project 2 cwebber", typeName, DomainParticipant.TOPIC_QOS_DEFAULT, null /* listener */, StatusKind.STATUS_MASK_NONE); if (topic == null) { System.err.println("create_topic error\n"); return; } TopFilter customfilter = new TopFilter(); participant.register_contentfilter("TopFilter", customfilter); System.out.println("registered"); ArrayList<String> params = new ArrayList<>(); params.add(Float.toString(filteredCpuUsage)); params.add(Float.toString(filteredMemUsage)); params.add("valid"); StringSeq parameters = new StringSeq(params); ContentFilteredTopic filteredtopic = participant .create_contentfilteredtopic_with_filter( "CS464/564 Project 2 cwebber", topic, "%0 %1 %2 cpuUsage memUsage", parameters, "TopFilter"); if (filteredtopic == null) { System.err.println("create_contentfilteredtopic_with_filter error, using alternate.\n"); } System.out.println("compiled"); // --- Create reader --- // listener = new TopFunctionListener(filteredCpuUsage, filteredMemUsage); /* To customize data reader QoS, use the configuration file USER_QOS_PROFILES.xml */ DataReaderQos datareader_qos = new DataReaderQos(); subscriber.get_default_datareader_qos(datareader_qos); datareader_qos.ownership.kind = OwnershipQosPolicyKind.EXCLUSIVE_OWNERSHIP_QOS; if (filteredtopic == null) { reader = (TopFunctionDataReader) subscriber.create_datareader( topic, datareader_qos, listener, StatusKind.STATUS_MASK_ALL); } else { reader = (TopFunctionDataReader) subscriber.create_datareader( filteredtopic, datareader_qos, listener, StatusKind.STATUS_MASK_ALL); } if (reader == null) { System.err.println("create_datareader error\n"); return; } // --- Wait for data --- // final long receivePeriodSec = 4; for (int count = 0; (sampleCount == 0) || (count < sampleCount); ++count) { System.out.println("TopFunction subscriber sleeping for " + receivePeriodSec + " sec..."); try { Thread.sleep(receivePeriodSec * 1000); // in millisec } catch (InterruptedException ix) { System.err.println("INTERRUPTED"); break; } } } finally { // --- Shutdown --- // if(participant != null) { participant.delete_contained_entities(); DomainParticipantFactory.TheParticipantFactory. delete_participant(participant); } /* RTI Connext provides the finalize_instance() method for users who want to release memory used by the participant factory singleton. Uncomment the following block of code for clean destruction of the participant factory singleton. */ //DomainParticipantFactory.finalize_instance(); } } // ----------------------------------------------------------------------- // Private Types // ----------------------------------------------------------------------- // ======================================================================= private static class TopFunctionListener extends DataReaderAdapter { public TopFunctionListener(float cpu, float mem) { _cpu = cpu; _mem = mem; } public float _cpu = 0f; public float _mem = 0f; TopFunctionSeq _dataSeq = new TopFunctionSeq(); SampleInfoSeq _infoSeq = new SampleInfoSeq(); public void on_data_available(DataReader reader) { TopFunctionDataReader TopFunctionReader = (TopFunctionDataReader)reader; try { TopFunctionReader.take( _dataSeq, _infoSeq, ResourceLimitsQosPolicy.LENGTH_UNLIMITED, SampleStateKind.ANY_SAMPLE_STATE, ViewStateKind.ANY_VIEW_STATE, InstanceStateKind.ANY_INSTANCE_STATE); for(int i = 0; i < _dataSeq.size(); ++i) { SampleInfo info = (SampleInfo)_infoSeq.get(i); if (info.valid_data) { TopFunction inp = (TopFunction)_dataSeq.get(i); if (inp.cpuUsage >= _cpu && inp.memUsage >= _mem) { System.out.println(inp.toString("Received",0)); } } } } catch (RETCODE_NO_DATA noData) { // No data to process } finally { TopFunctionReader.return_loan(_dataSeq, _infoSeq); } } } }
package com.codahale.metrics.graphite; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.net.SocketFactory; import java.io.BufferedWriter; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.net.InetSocketAddress; import java.net.Socket; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.util.LinkedList; import java.util.List; import static java.nio.charset.StandardCharsets.UTF_8; /** * A client to a Carbon server that sends all metrics after they have been pickled in configurable sized batches */ public class PickledGraphite implements GraphiteSender { static class MetricTuple { String name; long timestamp; String value; MetricTuple(String name, long timestamp, String value) { this.name = name; this.timestamp = timestamp; this.value = value; } } /** * Minimally necessary pickle opcodes. */ private static final char MARK = '(', STOP = '.', LONG = 'L', STRING = 'S', APPEND = 'a', LIST = 'l', TUPLE = 't', QUOTE = '\'', LF = '\n'; private static final Logger LOGGER = LoggerFactory.getLogger(PickledGraphite.class); private final static int DEFAULT_BATCH_SIZE = 100; private int batchSize; // graphite expects a python-pickled list of nested tuples. private List<MetricTuple> metrics = new LinkedList<>(); private final String hostname; private final int port; private final InetSocketAddress address; private final SocketFactory socketFactory; private final Charset charset; private Socket socket; private Writer writer; private int failures; /** * Creates a new client which connects to the given address using the default {@link SocketFactory}. This defaults * to a batchSize of 100 * * @param address the address of the Carbon server */ public PickledGraphite(InetSocketAddress address) { this(address, DEFAULT_BATCH_SIZE); } /** * Creates a new client which connects to the given address using the default {@link SocketFactory}. * * @param address the address of the Carbon server * @param batchSize how many metrics are bundled into a single pickle request to graphite */ public PickledGraphite(InetSocketAddress address, int batchSize) { this(address, SocketFactory.getDefault(), batchSize); } /** * Creates a new client which connects to the given address and socket factory. * * @param address the address of the Carbon server * @param socketFactory the socket factory * @param batchSize how many metrics are bundled into a single pickle request to graphite */ public PickledGraphite(InetSocketAddress address, SocketFactory socketFactory, int batchSize) { this(address, socketFactory, UTF_8, batchSize); } /** * Creates a new client which connects to the given address and socket factory using the given character set. * * @param address the address of the Carbon server * @param socketFactory the socket factory * @param charset the character set used by the server * @param batchSize how many metrics are bundled into a single pickle request to graphite */ public PickledGraphite(InetSocketAddress address, SocketFactory socketFactory, Charset charset, int batchSize) { this.address = address; this.hostname = null; this.port = -1; this.socketFactory = socketFactory; this.charset = charset; this.batchSize = batchSize; } /** * Creates a new client which connects to the given address using the default {@link SocketFactory}. This defaults * to a batchSize of 100 * * @param hostname the hostname of the Carbon server * @param port the port of the Carbon server */ public PickledGraphite(String hostname, int port) { this(hostname, port, DEFAULT_BATCH_SIZE); } /** * Creates a new client which connects to the given address using the default {@link SocketFactory}. * * @param hostname the hostname of the Carbon server * @param port the port of the Carbon server * @param batchSize how many metrics are bundled into a single pickle request to graphite */ public PickledGraphite(String hostname, int port, int batchSize) { this(hostname, port, SocketFactory.getDefault(), batchSize); } /** * Creates a new client which connects to the given address and socket factory. * * @param hostname the hostname of the Carbon server * @param port the port of the Carbon server * @param socketFactory the socket factory * @param batchSize how many metrics are bundled into a single pickle request to graphite */ public PickledGraphite(String hostname, int port, SocketFactory socketFactory, int batchSize) { this(hostname, port, socketFactory, UTF_8, batchSize); } /** * Creates a new client which connects to the given address and socket factory using the given character set. * * @param hostname the hostname of the Carbon server * @param port the port of the Carbon server * @param socketFactory the socket factory * @param charset the character set used by the server * @param batchSize how many metrics are bundled into a single pickle request to graphite */ public PickledGraphite(String hostname, int port, SocketFactory socketFactory, Charset charset, int batchSize) { this.address = null; this.hostname = hostname; this.port = port; this.socketFactory = socketFactory; this.charset = charset; this.batchSize = batchSize; } @Override public void connect() throws IllegalStateException, IOException { if (isConnected()) { throw new IllegalStateException("Already connected"); } InetSocketAddress address = this.address; if (address == null) { address = new InetSocketAddress(hostname, port); } if (address.getAddress() == null) { throw new UnknownHostException(address.getHostName()); } this.socket = socketFactory.createSocket(address.getAddress(), address.getPort()); this.writer = new BufferedWriter(new OutputStreamWriter(socket.getOutputStream(), charset)); } @Override public boolean isConnected() { return socket != null && socket.isConnected() && !socket.isClosed(); } /** * Convert the metric to a python tuple of the form: * <p/> * (timestamp, (name, value)) * <p/> * And add it to the list of metrics. If we reach the batch size, write them out. * * @param name the name of the metric * @param value the value of the metric * @param timestamp the timestamp of the metric * @throws IOException if there was an error sending the metric */ @Override public void send(String name, String value, long timestamp) throws IOException { metrics.add(new MetricTuple(sanitize(name), timestamp, sanitize(value))); if (metrics.size() >= batchSize) { writeMetrics(); } } @Override public void flush() throws IOException { writeMetrics(); if (writer != null) { writer.flush(); } } @Override public void close() throws IOException { try { flush(); if (writer != null) { writer.close(); } } catch (IOException ex) { if (socket != null) { socket.close(); } } finally { this.socket = null; this.writer = null; } } @Override public int getFailures() { return failures; } /** * 1. Run the pickler script to package all the pending metrics into a single message * 2. Send the message to graphite * 3. Clear out the list of metrics */ private void writeMetrics() throws IOException { if (metrics.size() > 0) { try { byte[] payload = pickleMetrics(metrics); byte[] header = ByteBuffer.allocate(4).putInt(payload.length).array(); @SuppressWarnings("resource") OutputStream outputStream = socket.getOutputStream(); outputStream.write(header); outputStream.write(payload); outputStream.flush(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Wrote {} metrics", metrics.size()); } } catch (IOException e) { this.failures++; throw e; } finally { // if there was an error, we might miss some data. for now, drop those on the floor and // try to keep going. metrics.clear(); } } } /** * See: http://readthedocs.org/docs/graphite/en/1.0/feeding-carbon.html * * @throws IOException shouldn't happen because we write to memory. */ byte[] pickleMetrics(List<MetricTuple> metrics) throws IOException { // Extremely rough estimate of 75 bytes per message ByteArrayOutputStream out = new ByteArrayOutputStream(metrics.size() * 75); Writer pickled = new OutputStreamWriter(out, charset); pickled.append(MARK); pickled.append(LIST); for (MetricTuple tuple : metrics) { // start the outer tuple pickled.append(MARK); // the metric name is a string. pickled.append(STRING); // the single quotes are to match python's repr("abcd") pickled.append(QUOTE); pickled.append(tuple.name); pickled.append(QUOTE); pickled.append(LF); // start the inner tuple pickled.append(MARK); // timestamp is a long pickled.append(LONG); pickled.append(Long.toString(tuple.timestamp)); // the trailing L is to match python's repr(long(1234)) pickled.append(LONG); pickled.append(LF); // and the value is a string. pickled.append(STRING); pickled.append(QUOTE); pickled.append(tuple.value); pickled.append(QUOTE); pickled.append(LF); pickled.append(TUPLE); // inner close pickled.append(TUPLE); // outer close pickled.append(APPEND); } // every pickle ends with STOP pickled.append(STOP); pickled.flush(); return out.toByteArray(); } protected String sanitize(String s) { return GraphiteSanitize.sanitize(s); } }
package org.jopenocr.network; import java.awt.AWTEvent; import java.awt.Color; import java.awt.Graphics; import java.awt.Image; import java.awt.event.MouseEvent; import java.awt.image.PixelGrabber; import javax.swing.JPanel; public class Entry extends JPanel { public static final long serialVersionUID = 1; /** * The image that the user is drawing into. */ protected Image entryImage; /** * A graphics handle to the image that the * user is drawing into. */ protected Graphics entryGraphics; /** * The last x that the user was drawing at. */ protected int lastX = -1; /** * The last y that the user was drawing at. */ protected int lastY = -1; /** * The down sample component used with this * component. */ protected Sample sample; /** * Specifies the left boundary of the cropping * rectangle. */ protected int downSampleLeft; /** * Specifies the right boundary of the cropping * rectangle. */ protected int downSampleRight; /** * Specifies the top boundary of the cropping * rectangle. */ protected int downSampleTop; /** * Specifies the bottom boundary of the cropping * rectangle. */ protected int downSampleBottom; /** * The downsample ratio for x. */ protected double ratioX; /** * The downsample ratio for y */ protected double ratioY; /** * The pixel map of what the user has drawn. * Used to downsample it. */ protected int pixelMap[]; /** * The constructor. */ public Entry(){ enableEvents( AWTEvent.MOUSE_MOTION_EVENT_MASK | AWTEvent.MOUSE_EVENT_MASK | AWTEvent.COMPONENT_EVENT_MASK ); } /** * Setup the internal image that the user draws onto. */ protected void initImage(){ entryImage = createImage(getWidth(), getHeight()); entryGraphics = entryImage.getGraphics(); entryGraphics.setColor(Color.white); entryGraphics.fillRect(0,0,getWidth(),getHeight()); } /** * Paint the drawn image and cropping box (if active). * * @param g The graphics context */ public void paint(Graphics g){ if ( entryImage == null ) initImage(); g.drawImage(entryImage, 0, 0, this); g.setColor(Color.black); g.drawRect(0, 0, getWidth(), getHeight()); g.setColor(Color.red); g.drawRect(downSampleLeft, downSampleTop, downSampleRight - downSampleLeft, downSampleBottom - downSampleTop); } /** * Process messages. * * @param e The event. */ protected void processMouseEvent(MouseEvent e){ if ( e.getID()!=MouseEvent.MOUSE_PRESSED ) return; lastX = e.getX(); lastY = e.getY(); } /** * Process messages. * * @param e The event. */ protected void processMouseMotionEvent(MouseEvent e){ if ( e.getID()!=MouseEvent.MOUSE_DRAGGED ) return; entryGraphics.setColor(Color.black); entryGraphics.drawLine(lastX,lastY,e.getX(),e.getY()); getGraphics().drawImage(entryImage,0,0,this); lastX = e.getX(); lastY = e.getY(); } /** * Set the sample control to use. The * sample control displays a downsampled * version of the character. * * @param s */ public void setSample(Sample s){ sample = s; } /** * Get the down sample component to be used * with this component. * * @return The down sample component. */ public Sample getSample(){ return sample; } /** * This method is called internally to * see if there are any pixels in the given * scan line. This method is used to perform * autocropping. * * @param y The horizontal line to scan. * @return True if there were any pixels in this * horizontal line. */ protected boolean hLineClear(int y){ int w = entryImage.getWidth(this); for ( int i = 0 ; i < w; i++ ) { if ( pixelMap[(y*w)+i] !=-1 ) return false; } return true; } /** * This method is called to determine .... * * @param x The vertical line to scan. * @return True if there are any pixels in the * specified vertical line. */ protected boolean vLineClear(int x){ int w = entryImage.getWidth(this); int h = entryImage.getHeight(this); for ( int i = 0; i < h; i++ ) { if ( pixelMap[(i*w)+x] !=-1 ) return false; } return true; } /** * This method is called to automatically * crop the image so that whitespace is * removed. * * @param w The width of the image. * @param h The height of the image */ protected void findBounds(int w,int h){ /** Top line */ for ( int y = 0; y < h; y++ ) { if ( !hLineClear(y) ) { downSampleTop=y; break; } } /** Bottom line */ for ( int y = h-1; y >= 0; y-- ) { if ( !hLineClear(y) ) { downSampleBottom = y; break; } } /** Left line */ for ( int x = 0; x < w; x++ ) { if ( !vLineClear(x) ) { downSampleLeft = x; break; } } /** Right line */ for ( int x = w-1; x >= 0; x-- ) { if ( !vLineClear(x) ) { downSampleRight = x; break; } } } /** * Called to downsample a quadrant of the image. * * @param x The x coordinate of the resulting downsample. * @param y The y coordinate of the resulting downsample. * @return Returns true if there were ANY pixels in the specified quadrant. */ protected boolean downSampleQuadrant(int x,int y){ int w = entryImage.getWidth(this); int startX = (int)(downSampleLeft + (x * ratioX)); int startY = (int)(downSampleTop + (y * ratioY)); int endX = (int)(startX + ratioX); int endY = (int)(startY + ratioY); for ( int yy = startY; yy <= endY; yy++ ) { for ( int xx = startX; xx <= endX; xx++ ) { int loc = xx + (yy * w); if ( pixelMap[loc] != -1 ) return true; } } return false; } /** * Called to downsample the image and store it in the down sample component. */ public void downSample(){ int w = entryImage.getWidth(this); int h = entryImage.getHeight(this); PixelGrabber grabber = new PixelGrabber(entryImage, 0, 0, w, h, true); try { grabber.grabPixels(); pixelMap = (int[])grabber.getPixels(); findBounds(w,h); SampleData data = sample.getData(); ratioX = (double)(downSampleRight - downSampleLeft)/(double)data.getWidth(); ratioY = (double)(downSampleBottom - downSampleTop)/(double)data.getHeight(); for ( int y = 0; y < data.getHeight(); y++ ) for ( int x = 0; x < data.getWidth(); x++ ) data.setData(x, y, downSampleQuadrant(x,y)); sample.repaint(); repaint(); } catch ( InterruptedException e ) {} } /** * Called to clear the image. */ public void clear(){ if ( entryImage == null ) initImage(); this.entryGraphics.setColor(Color.white); this.entryGraphics.fillRect(0, 0, getWidth(), getHeight()); this.downSampleBottom = 0; this.downSampleTop = 0; this.downSampleLeft = 0; this.downSampleRight = 0; repaint(); } public boolean setBufferedImage(Image image){ if ( entryImage == null ) initImage(); int x = getWidth(); int y = getHeight(); if(x < image.getWidth(this) || y < image.getHeight(this)) return false; int ix = image.getWidth(this) / 2; int iy = image.getHeight(this) / 2; entryGraphics.drawImage(image, x / 2 - ix, y / 2 - iy, this); return true; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.query.topn; import com.google.common.base.Preconditions; import com.google.common.base.Predicates; import org.apache.druid.collections.NonBlockingPool; import org.apache.druid.java.util.common.granularity.Granularity; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.guava.Sequences; import org.apache.druid.query.Result; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.extraction.ExtractionFn; import org.apache.druid.query.filter.Filter; import org.apache.druid.segment.SegmentMissingException; import org.apache.druid.segment.StorageAdapter; import org.apache.druid.segment.column.ColumnCapabilities; import org.apache.druid.segment.column.ColumnHolder; import org.apache.druid.segment.column.ValueType; import org.apache.druid.segment.filter.Filters; import org.joda.time.Interval; import javax.annotation.Nullable; import java.nio.ByteBuffer; import java.util.List; /** */ public class TopNQueryEngine { private final NonBlockingPool<ByteBuffer> bufferPool; public TopNQueryEngine(NonBlockingPool<ByteBuffer> bufferPool) { this.bufferPool = bufferPool; } /** * Do the thing - process a {@link StorageAdapter} into a {@link Sequence} of {@link TopNResultValue}, with one of the * fine {@link TopNAlgorithm} available chosen based on the type of column being aggregated. The algorithm provides a * mapping function to process rows from the adapter {@link org.apache.druid.segment.Cursor} to apply * {@link AggregatorFactory} and create or update {@link TopNResultValue} */ public Sequence<Result<TopNResultValue>> query( final TopNQuery query, final StorageAdapter adapter, final @Nullable TopNQueryMetrics queryMetrics ) { if (adapter == null) { throw new SegmentMissingException( "Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped." ); } final List<Interval> queryIntervals = query.getQuerySegmentSpec().getIntervals(); final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getDimensionsFilter())); final Granularity granularity = query.getGranularity(); final TopNMapFn mapFn = getMapFn(query, adapter, queryMetrics); Preconditions.checkArgument( queryIntervals.size() == 1, "Can only handle a single interval, got[%s]", queryIntervals ); return Sequences.filter( Sequences.map( adapter.makeCursors( filter, queryIntervals.get(0), query.getVirtualColumns(), granularity, query.isDescending(), queryMetrics ), input -> { if (queryMetrics != null) { queryMetrics.cursor(input); } return mapFn.apply(input, queryMetrics); } ), Predicates.notNull() ); } /** * Choose the best {@link TopNAlgorithm} for the given query. */ private TopNMapFn getMapFn( final TopNQuery query, final StorageAdapter adapter, final @Nullable TopNQueryMetrics queryMetrics ) { final String dimension = query.getDimensionSpec().getDimension(); final int cardinality = adapter.getDimensionCardinality(dimension); if (queryMetrics != null) { queryMetrics.dimensionCardinality(cardinality); } int numBytesPerRecord = 0; for (AggregatorFactory aggregatorFactory : query.getAggregatorSpecs()) { numBytesPerRecord += aggregatorFactory.getMaxIntermediateSizeWithNulls(); } final TopNAlgorithmSelector selector = new TopNAlgorithmSelector(cardinality, numBytesPerRecord); query.initTopNAlgorithmSelector(selector); final ColumnCapabilities columnCapabilities = query.getVirtualColumns() .getColumnCapabilitiesWithFallback(adapter, dimension); final TopNAlgorithm<?, ?> topNAlgorithm; if (canUsePooledAlgorithm(selector, query, columnCapabilities)) { // pool based algorithm selection, if we can if (selector.isAggregateAllMetrics()) { // if sorted by dimension we should aggregate all metrics in a single pass, use the regular pooled algorithm for // this topNAlgorithm = new PooledTopNAlgorithm(adapter, query, bufferPool); } else if (selector.isAggregateTopNMetricFirst() || query.getContextBoolean("doAggregateTopNMetricFirst", false)) { // for high cardinality dimensions with larger result sets we aggregate with only the ordering aggregation to // compute the first 'n' values, and then for the rest of the metrics but for only the 'n' values topNAlgorithm = new AggregateTopNMetricFirstAlgorithm(adapter, query, bufferPool); } else { // anything else, use the regular pooled algorithm topNAlgorithm = new PooledTopNAlgorithm(adapter, query, bufferPool); } } else { // heap based algorithm selection, if we must if (selector.isHasExtractionFn() && dimension.equals(ColumnHolder.TIME_COLUMN_NAME)) { // TimeExtractionTopNAlgorithm can work on any single-value dimension of type long. // We might be able to use this for any long column with an extraction function, that is // ValueType.LONG.equals(columnCapabilities.getType()) // but this needs investigation to ensure that it is an improvement over HeapBasedTopNAlgorithm // A special TimeExtractionTopNAlgorithm is required since HeapBasedTopNAlgorithm // currently relies on the dimension cardinality to support lexicographic sorting topNAlgorithm = new TimeExtractionTopNAlgorithm(adapter, query); } else { topNAlgorithm = new HeapBasedTopNAlgorithm(adapter, query); } } if (queryMetrics != null) { queryMetrics.algorithm(topNAlgorithm); } return new TopNMapFn(query, topNAlgorithm); } /** * {@link PooledTopNAlgorithm} (and {@link AggregateTopNMetricFirstAlgorithm} which utilizes the pooled * algorithm) are optimized off-heap algorithms for aggregating dictionary encoded string columns. These algorithms * rely on dictionary ids being unique so to aggregate on the dictionary ids directly and defer * {@link org.apache.druid.segment.DimensionSelector#lookupName(int)} until as late as possible in query processing. * * When these conditions are not true, we have an on-heap fall-back algorithm, the {@link HeapBasedTopNAlgorithm} * (and {@link TimeExtractionTopNAlgorithm} for a specialized form for long columns) which aggregates on values of * selectors. */ private static boolean canUsePooledAlgorithm( final TopNAlgorithmSelector selector, final TopNQuery query, final ColumnCapabilities capabilities ) { if (selector.isHasExtractionFn()) { // extraction functions can have a many to one mapping, and should use a heap algorithm return false; } if (query.getDimensionSpec().getOutputType() != ValueType.STRING) { // non-string output cannot use the pooled algorith, even if the underlying selector supports it return false; } if (capabilities != null && capabilities.getType() == ValueType.STRING) { // string columns must use the on heap algorithm unless they have the following capabilites return capabilities.isDictionaryEncoded() && capabilities.areDictionaryValuesUnique().isTrue(); } else { // non-strings are not eligible to use the pooled algorithm, and should use a heap algorithm return false; } } /** * {@link ExtractionFn} which are one to one may have their execution deferred until as late as possible, since the * which value is used as the grouping key itself doesn't particularly matter. For top-n, this method allows the * query to be transformed in {@link TopNQueryQueryToolChest#preMergeQueryDecoration} to strip off the * {@link ExtractionFn} on the broker, so that a more optimized algorithm (e.g. {@link PooledTopNAlgorithm}) can be * chosen for processing segments, and then added back and evaluated against the final merged result sets on the * broker via {@link TopNQueryQueryToolChest#postMergeQueryDecoration}. */ public static boolean canApplyExtractionInPost(TopNQuery query) { return query.getDimensionSpec() != null && query.getDimensionSpec().getExtractionFn() != null && ExtractionFn.ExtractionType.ONE_TO_ONE.equals(query.getDimensionSpec() .getExtractionFn() .getExtractionType()) && query.getTopNMetricSpec().canBeOptimizedUnordered(); } }
package com.mapswithme.maps.routing; import android.app.Activity; import android.content.DialogInterface; import android.support.annotation.DimenRes; import android.support.annotation.IntRange; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v4.app.FragmentActivity; import android.support.v7.app.AlertDialog; import android.text.TextUtils; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.TextView; import java.util.Calendar; import java.util.concurrent.TimeUnit; import com.mapswithme.maps.Framework; import com.mapswithme.maps.MwmApplication; import com.mapswithme.maps.R; import com.mapswithme.maps.bookmarks.data.MapObject; import com.mapswithme.maps.downloader.MapManager; import com.mapswithme.maps.location.LocationHelper; import com.mapswithme.util.Config; import com.mapswithme.util.StringUtils; import com.mapswithme.util.ThemeSwitcher; import com.mapswithme.util.UiUtils; import com.mapswithme.util.Utils; import com.mapswithme.util.concurrency.UiThread; import com.mapswithme.util.statistics.AlohaHelper; import com.mapswithme.util.statistics.Statistics; @android.support.annotation.UiThread public class RoutingController { private static final int NO_SLOT = 0; private static final String TAG = "RCSTATE"; private enum State { NONE, PREPARE, NAVIGATION } enum BuildState { NONE, BUILDING, BUILT, ERROR } public interface Container { FragmentActivity getActivity(); void showSearch(); void showRoutePlan(boolean show, @Nullable Runnable completionListener); void showNavigation(boolean show); void showDownloader(boolean openDownloaded); void updateMenu(); void updatePoints(); /** * @param progress progress to be displayed. * @param router selected router type. One of {@link Framework#ROUTER_TYPE_VEHICLE} and {@link Framework#ROUTER_TYPE_PEDESTRIAN}. * */ void updateBuildProgress(@IntRange(from = 0, to = 100) int progress, @IntRange(from = Framework.ROUTER_TYPE_VEHICLE, to = Framework.ROUTER_TYPE_PEDESTRIAN) int router); } private static final RoutingController sInstance = new RoutingController(); private Container mContainer; private Button mStartButton; private BuildState mBuildState = BuildState.NONE; private State mState = State.NONE; private int mWaitingPoiPickSlot = NO_SLOT; private MapObject mStartPoint; private MapObject mEndPoint; private int mLastBuildProgress; private int mLastRouterType = Framework.nativeGetLastUsedRouter(); private boolean mHasContainerSavedState; private boolean mContainsCachedResult; private int mLastResultCode; private String[] mLastMissingMaps; private RoutingInfo mCachedRoutingInfo; @SuppressWarnings("FieldCanBeLocal") private final Framework.RoutingListener mRoutingListener = new Framework.RoutingListener() { @Override public void onRoutingEvent(final int resultCode, @Nullable final String[] missingMaps) { Log.d(TAG, "onRoutingEvent(resultCode: " + resultCode + ")"); UiThread.run(new Runnable() { @Override public void run() { mLastResultCode = resultCode; mLastMissingMaps = missingMaps; mContainsCachedResult = true; if (mLastResultCode == ResultCodesHelper.NO_ERROR) { mCachedRoutingInfo = Framework.nativeGetRouteFollowingInfo(); setBuildState(BuildState.BUILT); mLastBuildProgress = 100; } processRoutingEvent(); } }); } }; @SuppressWarnings("FieldCanBeLocal") private final Framework.RoutingProgressListener mRoutingProgressListener = new Framework.RoutingProgressListener() { @Override public void onRouteBuildingProgress(final float progress) { UiThread.run(new Runnable() { @Override public void run() { mLastBuildProgress = (int) progress; updateProgress(); } }); } }; public static RoutingController get() { return sInstance; } private void processRoutingEvent() { if (!mContainsCachedResult || mContainer == null || mHasContainerSavedState) return; mContainsCachedResult = false; if (mLastResultCode == ResultCodesHelper.NO_ERROR) { updatePlan(); return; } setBuildState(BuildState.ERROR); mLastBuildProgress = 0; updateProgress(); RoutingErrorDialogFragment fragment = RoutingErrorDialogFragment.create(mLastResultCode, mLastMissingMaps); fragment.show(mContainer.getActivity().getSupportFragmentManager(), RoutingErrorDialogFragment.class.getSimpleName()); } private void setState(State newState) { Log.d(TAG, "[S] State: " + mState + " -> " + newState + ", BuildState: " + mBuildState); mState = newState; if (mContainer != null) mContainer.updateMenu(); } private void setBuildState(BuildState newState) { Log.d(TAG, "[B] State: " + mState + ", BuildState: " + mBuildState + " -> " + newState); mBuildState = newState; if (mBuildState == BuildState.BUILT && !MapObject.isOfType(MapObject.MY_POSITION, mStartPoint)) Framework.nativeDisableFollowing(); } private void updateProgress() { if (mContainer != null) mContainer.updateBuildProgress(mLastBuildProgress, mLastRouterType); } private void showRoutePlan() { if (mContainer != null) mContainer.showRoutePlan(true, new Runnable() { @Override public void run() { updatePlan(); } }); } public void attach(@NonNull Container container) { mContainer = container; } public void initialize() { Framework.nativeSetRoutingListener(mRoutingListener); Framework.nativeSetRouteProgressListener(mRoutingProgressListener); } public void detach() { mContainer = null; mStartButton = null; } public void restore() { mHasContainerSavedState = false; if (isPlanning()) showRoutePlan(); mContainer.showNavigation(isNavigating()); mContainer.updateMenu(); mContainer.updatePoints(); processRoutingEvent(); } public void onSaveState() { mHasContainerSavedState = true; } private void build() { Log.d(TAG, "build"); mLastBuildProgress = 0; setBuildState(BuildState.BUILDING); updatePlan(); Statistics.INSTANCE.trackRouteBuild(Statistics.getPointType(mStartPoint), Statistics.getPointType(mEndPoint)); org.alohalytics.Statistics.logEvent(AlohaHelper.ROUTING_BUILD, new String[] {Statistics.EventParam.FROM, Statistics.getPointType(mStartPoint), Statistics.EventParam.TO, Statistics.getPointType(mEndPoint)}); Framework.nativeBuildRoute(mStartPoint.getLat(), mStartPoint.getLon(), mEndPoint.getLat(), mEndPoint.getLon()); } private void showDisclaimer(final MapObject endPoint) { StringBuilder builder = new StringBuilder(); for (int resId : new int[] { R.string.dialog_routing_disclaimer_priority, R.string.dialog_routing_disclaimer_precision, R.string.dialog_routing_disclaimer_recommendations, R.string.dialog_routing_disclaimer_borders, R.string.dialog_routing_disclaimer_beware }) builder.append(MwmApplication.get().getString(resId)).append("\n\n"); new AlertDialog.Builder(mContainer.getActivity()) .setTitle(R.string.dialog_routing_disclaimer_title) .setMessage(builder.toString()) .setCancelable(false) .setNegativeButton(R.string.cancel, null) .setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dlg, int which) { Config.acceptRoutingDisclaimer(); prepare(endPoint); } }).show(); } public void prepare(@Nullable MapObject endPoint) { Log.d(TAG, "prepare (" + (endPoint == null ? "route)" : "p2p)")); if (!Config.isRoutingDisclaimerAccepted()) { showDisclaimer(endPoint); return; } cancel(); mStartPoint = LocationHelper.INSTANCE.getMyPosition(); mEndPoint = endPoint; setState(State.PREPARE); if (mStartPoint != null && mEndPoint != null) mLastRouterType = Framework.nativeGetBestRouter(mStartPoint.getLat(), mStartPoint.getLon(), mEndPoint.getLat(), mEndPoint.getLon()); Framework.nativeSetRouter(mLastRouterType); if (mContainer != null) mContainer.showRoutePlan(true, new Runnable() { @Override public void run() { if (mStartPoint == null || mEndPoint == null) updatePlan(); else build(); } }); } public void start() { Log.d(TAG, "start"); if (!MapObject.isOfType(MapObject.MY_POSITION, mStartPoint)) { Statistics.INSTANCE.trackEvent(Statistics.EventName.ROUTING_START_SUGGEST_REBUILD); AlohaHelper.logClick(AlohaHelper.ROUTING_START_SUGGEST_REBUILD); suggestRebuildRoute(); return; } MapObject my = LocationHelper.INSTANCE.getMyPosition(); if (my == null) { mRoutingListener.onRoutingEvent(ResultCodesHelper.NO_POSITION, null); return; } mStartPoint = my; Statistics.INSTANCE.trackEvent(Statistics.EventName.ROUTING_START); AlohaHelper.logClick(AlohaHelper.ROUTING_START); setState(State.NAVIGATION); mContainer.showRoutePlan(false, null); mContainer.showNavigation(true); ThemeSwitcher.restart(); Framework.nativeFollowRoute(); LocationHelper.INSTANCE.restart(); } private void suggestRebuildRoute() { final AlertDialog.Builder builder = new AlertDialog.Builder(mContainer.getActivity()) .setMessage(R.string.p2p_reroute_from_current) .setCancelable(false) .setNegativeButton(R.string.cancel, null); TextView titleView = (TextView)View.inflate(mContainer.getActivity(), R.layout.dialog_suggest_reroute_title, null); titleView.setText(R.string.p2p_only_from_current); builder.setCustomTitle(titleView); if (MapObject.isOfType(MapObject.MY_POSITION, mEndPoint)) { builder.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { swapPoints(); } }); } else { if (LocationHelper.INSTANCE.getMyPosition() == null) builder.setMessage(null).setNegativeButton(null, null); builder.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { setStartFromMyPosition(); } }); } builder.show(); } private void updatePlan() { updateProgress(); updateStartButton(); } private void updateStartButton() { Log.d(TAG, "updateStartButton" + (mStartButton == null ? ": SKIP" : "")); if (mStartButton == null) return; mStartButton.setEnabled(mState == State.PREPARE && mBuildState == BuildState.BUILT); UiUtils.updateAccentButton(mStartButton); } public void setStartButton(@Nullable Button button) { Log.d(TAG, "setStartButton"); mStartButton = button; updateStartButton(); } private void cancelInternal() { Log.d(TAG, "cancelInternal"); mStartPoint = null; mEndPoint = null; setPointsInternal(); mWaitingPoiPickSlot = NO_SLOT; setBuildState(BuildState.NONE); setState(State.NONE); ThemeSwitcher.restart(); Framework.nativeCloseRouting(); LocationHelper.INSTANCE.restart(); } public boolean cancel() { if (isPlanning()) { Log.d(TAG, "cancel: planning"); cancelInternal(); if (mContainer != null) mContainer.showRoutePlan(false, null); return true; } if (isNavigating()) { Log.d(TAG, "cancel: navigating"); cancelInternal(); if (mContainer != null) { mContainer.showNavigation(false); mContainer.updateMenu(); } return true; } Log.d(TAG, "cancel: none"); return false; } public boolean cancelPlanning() { Log.d(TAG, "cancelPlanning"); if (isPlanning()) { cancel(); return true; } return false; } public boolean isPlanning() { return (mState == State.PREPARE); } public boolean isNavigating() { return (mState == State.NAVIGATION); } public boolean isBuilding() { return (mState == State.PREPARE && mBuildState == BuildState.BUILDING); } public boolean isWaitingPoiPick() { return (mWaitingPoiPickSlot != NO_SLOT); } public BuildState getBuildState() { return mBuildState; } public MapObject getStartPoint() { return mStartPoint; } public MapObject getEndPoint() { return mEndPoint; } public RoutingInfo getCachedRoutingInfo() { return mCachedRoutingInfo; } private void setPointsInternal() { if (mStartPoint == null) Framework.nativeSetRouteStartPoint(0.0, 0.0, false); else Framework.nativeSetRouteStartPoint(mStartPoint.getLat(), mStartPoint.getLon(), !MapObject.isOfType(MapObject.MY_POSITION, mStartPoint)); if (mEndPoint == null) Framework.nativeSetRouteEndPoint(0.0, 0.0, false); else Framework.nativeSetRouteEndPoint(mEndPoint.getLat(), mEndPoint.getLon(), true); } void checkAndBuildRoute() { if (mContainer != null) { if (isWaitingPoiPick()) showRoutePlan(); mContainer.updatePoints(); } if (mStartPoint != null && mEndPoint != null) build(); } private boolean setStartFromMyPosition() { Log.d(TAG, "setStartFromMyPosition"); MapObject my = LocationHelper.INSTANCE.getMyPosition(); if (my == null) { Log.d(TAG, "setStartFromMyPosition: no my position - skip"); if (mContainer != null) mContainer.updatePoints(); setPointsInternal(); return false; } return setStartPoint(my); } /** * Sets starting point. * <ul> * <li>If {@code point} matches ending one and the starting point was set &mdash; swap points. * <li>The same as the currently set starting point is skipped. * </ul> * Route starts to build if both points were set. * * @return {@code true} if the point was set. */ @SuppressWarnings("Duplicates") public boolean setStartPoint(MapObject point) { Log.d(TAG, "setStartPoint"); if (MapObject.same(mStartPoint, point)) { Log.d(TAG, "setStartPoint: skip the same starting point"); return false; } if (point != null && point.sameAs(mEndPoint)) { if (mStartPoint == null) { Log.d(TAG, "setStartPoint: skip because starting point is empty"); return false; } Log.d(TAG, "setStartPoint: swap with end point"); mEndPoint = mStartPoint; } mStartPoint = point; setPointsInternal(); checkAndBuildRoute(); return true; } /** * Sets ending point. * <ul> * <li>If {@code point} is the same as starting point &mdash; swap points if ending point is set, skip otherwise. * <li>Set starting point to MyPosition if it was not set before. * </ul> * Route starts to build if both points were set. * * @return {@code true} if the point was set. */ @SuppressWarnings("Duplicates") public boolean setEndPoint(MapObject point) { Log.d(TAG, "setEndPoint"); if (MapObject.same(mEndPoint, point)) { if (mStartPoint == null) return setStartFromMyPosition(); Log.d(TAG, "setEndPoint: skip the same end point"); return false; } if (point != null && point.sameAs(mStartPoint)) { if (mEndPoint == null) { Log.d(TAG, "setEndPoint: skip because end point is empty"); return false; } Log.d(TAG, "setEndPoint: swap with starting point"); mStartPoint = mEndPoint; } mEndPoint = point; if (mStartPoint == null) return setStartFromMyPosition(); setPointsInternal(); checkAndBuildRoute(); return true; } public void swapPoints() { Log.d(TAG, "swapPoints"); MapObject point = mStartPoint; mStartPoint = mEndPoint; mEndPoint = point; Statistics.INSTANCE.trackEvent(Statistics.EventName.ROUTING_SWAP_POINTS); AlohaHelper.logClick(AlohaHelper.ROUTING_SWAP_POINTS); setPointsInternal(); checkAndBuildRoute(); } public void setRouterType(int router) { Log.d(TAG, "setRouterType: " + mLastRouterType + " -> " + router); if (router == mLastRouterType) return; mLastRouterType = router; Framework.nativeSetRouter(router); if (mStartPoint != null && mEndPoint != null) build(); } public void searchPoi(int slotId) { Log.d(TAG, "searchPoi: " + slotId); Statistics.INSTANCE.trackEvent(Statistics.EventName.ROUTING_SEARCH_POINT); AlohaHelper.logClick(AlohaHelper.ROUTING_SEARCH_POINT); mWaitingPoiPickSlot = slotId; mContainer.showSearch(); mContainer.updateMenu(); } private void onPoiSelectedInternal(@Nullable MapObject point, int slot) { if (point != null) { if (slot == 1) setStartPoint(point); else setEndPoint(point); } if (mContainer == null) return; mContainer.updateMenu(); showRoutePlan(); } public void onPoiSelected(@Nullable MapObject point) { int slot = mWaitingPoiPickSlot; mWaitingPoiPickSlot = NO_SLOT; onPoiSelectedInternal(point, slot); if (mContainer != null) mContainer.updatePoints(); } public static CharSequence formatRoutingTime(int seconds, @DimenRes int unitsSize) { long minutes = TimeUnit.SECONDS.toMinutes(seconds) % 60; long hours = TimeUnit.SECONDS.toHours(seconds); return hours == 0 ? Utils.formatUnitsText(R.dimen.text_size_routing_number, unitsSize, String.valueOf(minutes), "min") : TextUtils.concat(Utils.formatUnitsText(R.dimen.text_size_routing_number, unitsSize, String.valueOf(hours), "h "), Utils.formatUnitsText(R.dimen.text_size_routing_number, unitsSize, String.valueOf(minutes), "min")); } static String formatArrivalTime(int seconds) { Calendar current = Calendar.getInstance(); current.set(Calendar.SECOND, 0); current.add(Calendar.SECOND, seconds); return StringUtils.formatUsingUsLocale("%d:%02d", current.get(Calendar.HOUR_OF_DAY), current.get(Calendar.MINUTE)); } public boolean checkMigration(Activity activity) { if (!MapManager.nativeIsLegacyMode()) return false; if (!isNavigating() && !isPlanning()) return false; new AlertDialog.Builder(activity) .setTitle(R.string.migrate_title) .setMessage(R.string.no_migration_during_navigation) .setPositiveButton(android.R.string.ok, null) .show(); return true; } }
/** * Copyright 2009-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.javacrumbs.jsonunit.spring.testit; import net.javacrumbs.jsonunit.core.Option; import net.javacrumbs.jsonunit.core.listener.Difference; import net.javacrumbs.jsonunit.core.listener.DifferenceContext; import net.javacrumbs.jsonunit.core.listener.DifferenceListener; import net.javacrumbs.jsonunit.spring.testit.demo.SpringConfig; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.MediaType; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.test.web.reactive.server.WebTestClient; import org.springframework.test.web.servlet.client.MockMvcWebTestClient; import org.springframework.web.context.WebApplicationContext; import static java.math.BigDecimal.valueOf; import static net.javacrumbs.jsonunit.spring.WebTestClientJsonMatcher.json; import static net.javacrumbs.jsonunit.spring.testit.demo.ExampleController.CORRECT_JSON; import static net.javacrumbs.jsonunit.spring.testit.demo.ExampleController.ISO_VALUE; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @ExtendWith(SpringExtension.class) @ContextConfiguration(classes = {SpringConfig.class}) @WebAppConfiguration class WebTestClientTest { @Autowired private WebApplicationContext wac; private WebTestClient client; @BeforeEach void setup() { client = MockMvcWebTestClient.bindToApplicationContext(wac).build(); } @Test void shouldPassIfEqualsWithProduces() { exec("/sampleProduces").consumeWith(json().isEqualTo(CORRECT_JSON)); } @Test void shouldPassIfEqualsWithIsoEncoding() { exec("/sampleIso").consumeWith(json().node("result").isEqualTo(ISO_VALUE)); } @Test void shouldPassIfEquals() { exec().consumeWith(json().isEqualTo(CORRECT_JSON)); } @Test void isEqualToShouldFailIfDoesNotEqual() { DifferenceListener listener = mock(DifferenceListener.class); assertThatThrownBy(() -> exec().consumeWith(json().withDifferenceListener(listener).isEqualTo(CORRECT_JSON.replace("stringValue", "stringValue2")))) .hasMessageStartingWith("JSON documents are different:\n" + "Different value found in node \"result.string\", expected: <\"stringValue2\"> but was: <\"stringValue\">.\n"); verify(listener).diff(any(Difference.class), any(DifferenceContext.class)); } @Test void isEqualToInNodeFailIfDoesNotEqual() { assertThatThrownBy(() -> exec().consumeWith(json().node("result.string").isEqualTo("stringValue2"))) .hasMessageStartingWith("JSON documents are different:\n" + "Different value found in node \"result.string\", expected: <\"stringValue2\"> but was: <\"stringValue\">.\n"); } @Test void isNullShouldPassOnNull() { exec().consumeWith(json().node("result.null").isNull()); } @Test void isNullShouldFailOnNonNull() { assertThatThrownBy(() -> exec().consumeWith(json().node("result.string").isNull())) .hasMessageStartingWith("Node \"result.string\" has invalid type, expected: <a null> but was: <\"stringValue\">."); } @Test void isNullShouldFailOnMissing() { assertThatThrownBy(() -> exec().consumeWith(json().node("result.missing").isNull())) .hasMessageStartingWith("Different value found in node \"result.missing\", expected: <node to be present> but was: <missing>."); } @Test void isNotNullShouldPassOnString() { exec().consumeWith(json().node("result.string").isNotNull()); } @Test void isNotNullShouldFailOnNull() { assertThatThrownBy(() -> exec().consumeWith(json().node("result.null").isNotNull())) .hasMessageStartingWith("Node \"result.null\" has invalid type, expected: <not null> but was: <null>."); } @Test void isStringEqualToShouldFailOnNumber() { assertThatThrownBy(() -> exec().consumeWith(json().node("result.array[0]").isStringEqualTo("1"))) .hasMessageStartingWith("Node \"result.array[0]\" has invalid type, expected: <string> but was: <1>."); } @Test void isTrueShouldPassOnTrue() { exec().consumeWith(json().node("result.boolean").isTrue()); } @Test void isFalseShouldFailOnTrue() { assertThatThrownBy(() -> exec().consumeWith(json().node("result.boolean").isFalse())) .hasMessageStartingWith("JSON documents are different:\n" + "Different value found in node \"result.boolean\", expected: <false> but was: <true>.\n"); } @Test void isTrueShouldFailOnString() { assertThatThrownBy(() -> exec().consumeWith(json().node("result.string").isTrue())) .hasMessageStartingWith("JSON documents are different:\n" + "Different value found in node \"result.string\", expected: <true> but was: <\"stringValue\">.\n"); } @Test void isStringEqualToShouldPassIfEquals() { exec().consumeWith(json().node("result.string").isStringEqualTo("stringValue")); } @Test void isAbsentShouldFailIfNodeExists() { assertThatThrownBy(() -> exec().consumeWith(json().node("result.string").isAbsent())) .hasMessageStartingWith("Different value found in node \"result.string\", expected: <node to be absent> but was: <\"stringValue\">."); } @Test void isAbsentShouldPassIfNodeIsAbsent() { exec().consumeWith(json().node("result.string2").isAbsent()); } @Test void isPresentShouldFailIfNodeIsAbsent() { assertThatThrownBy(() -> exec().consumeWith(json().node("result.string2").isPresent())) .hasMessageStartingWith("Different value found in node \"result.string2\", expected: <node to be present> but was: <missing>."); } @Test void isPresentShouldPassIfPresent() { exec().consumeWith(json().node("result.string").isPresent()); } @Test void isArrayShouldFailOnNotArray() { assertThatThrownBy(() -> exec().consumeWith(json().node("result.string").isArray())) .hasMessageStartingWith("Node \"result.string\" has invalid type, expected: <array> but was: <\"stringValue\">."); } @Test void isArrayShouldFailIfNotPresent() { assertThatThrownBy(() -> exec().consumeWith(json().node("result.array2").isArray())) .hasMessageStartingWith("Different value found in node \"result.array2\", expected: <array> but was: <missing>."); } @Test void isObjectShouldFailOnArray() { assertThatThrownBy(() -> exec().consumeWith(json().node("result.array").isObject())) .hasMessageStartingWith("Node \"result.array\" has invalid type, expected: <object> but was: <[1, 2, 3]>."); } @Test void isStringShouldFailOnArray() { assertThatThrownBy(() -> exec().consumeWith(json().node("result.array").isString())) .hasMessageStartingWith("Node \"result.array\" has invalid type, expected: <string> but was: <[1, 2, 3]>."); } @Test void isArrayShouldPassOnArray() { exec().consumeWith(json().node("result.array").isArray()); } @Test void ignoreShouldWork() { exec().consumeWith(json().isEqualTo("{\"result\":\"${json-unit.ignore}\"}")); } @Test void ignoreStringShouldBeModifiable() { exec().consumeWith(json().ignoring("##IGNORE##").isEqualTo("{\"result\":\"##IGNORE##\"}")); } @Test void shouldSetTolerance() { exec().consumeWith(json().node("result.decimal").withTolerance(0.001).isEqualTo(1)); } @Test void settingOptionShouldTakeEffect() { exec().consumeWith(json().node("result.array").when(Option.IGNORING_ARRAY_ORDER).isEqualTo(new int[]{3, 2, 1})); } @Test void isNotEqualToShouldFailIfEquals() { assertThatThrownBy(() -> exec().consumeWith(json().isNotEqualTo(CORRECT_JSON))) .hasMessageStartingWith("JSON is equal."); } @Test void isEqualToShouldFailIfNodeDoesNotEqual() { assertThatThrownBy(() -> exec() .consumeWith(json().node("result.string").isEqualTo("stringValue2"))) .hasMessageStartingWith("JSON documents are different:\n" + "Different value found in node \"result.string\", expected: <\"stringValue2\"> but was: <\"stringValue\">.\n"); } @Test void intValueShouldMatch() { exec().consumeWith(json().node("result.array").matches(everyItem(lessThanOrEqualTo(valueOf(4))))); } @Test void intValueShouldFailIfDoesNotMatch() { assertThatThrownBy(() -> exec().consumeWith(json().node("result.array").matches(everyItem(lessThanOrEqualTo(valueOf(2)))))) .hasMessageStartingWith("Node \"result.array\" does not match.\n" + "Expected: every item is a value less than or equal to <2>\n" + " but: an item <3> was greater than <2>"); } private WebTestClient.BodyContentSpec exec() { return exec("/sample"); } private WebTestClient.BodyContentSpec exec(String path) { try { return this.client.get().uri(path).accept(MediaType.APPLICATION_JSON).exchange().expectBody(); } catch (Exception e) { throw new IllegalStateException(e); } } }
/* * Muhimbi PDF * Convert, Merge, Watermark, Secure and OCR files. * * OpenAPI spec version: 9.15 * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ package com.muhimbi.online.client; import com.squareup.okhttp.Call; import com.squareup.okhttp.Callback; import com.squareup.okhttp.OkHttpClient; import com.squareup.okhttp.Request; import com.squareup.okhttp.Response; import com.squareup.okhttp.RequestBody; import com.squareup.okhttp.FormEncodingBuilder; import com.squareup.okhttp.MultipartBuilder; import com.squareup.okhttp.MediaType; import com.squareup.okhttp.Headers; import com.squareup.okhttp.internal.http.HttpMethod; import com.squareup.okhttp.logging.HttpLoggingInterceptor; import com.squareup.okhttp.logging.HttpLoggingInterceptor.Level; import java.lang.reflect.Type; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.Map.Entry; import java.util.HashMap; import java.util.List; import java.util.ArrayList; import java.util.Date; import java.util.TimeZone; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.net.URLEncoder; import java.net.URLConnection; import java.io.File; import java.io.InputStream; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.security.GeneralSecurityException; import java.security.KeyStore; import java.security.SecureRandom; import java.security.cert.Certificate; import java.security.cert.CertificateException; import java.security.cert.CertificateFactory; import java.security.cert.X509Certificate; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.text.ParseException; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.KeyManager; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLSession; import javax.net.ssl.TrustManager; import javax.net.ssl.TrustManagerFactory; import javax.net.ssl.X509TrustManager; import okio.BufferedSink; import okio.Okio; import com.muhimbi.online.client.auth.Authentication; import com.muhimbi.online.client.auth.HttpBasicAuth; import com.muhimbi.online.client.auth.ApiKeyAuth; import com.muhimbi.online.client.auth.OAuth; public class ApiClient { public static final double JAVA_VERSION; public static final boolean IS_ANDROID; public static final int ANDROID_SDK_VERSION; static { JAVA_VERSION = Double.parseDouble(System.getProperty("java.specification.version")); boolean isAndroid; try { Class.forName("android.app.Activity"); isAndroid = true; } catch (ClassNotFoundException e) { isAndroid = false; } IS_ANDROID = isAndroid; int sdkVersion = 0; if (IS_ANDROID) { try { sdkVersion = Class.forName("android.os.Build$VERSION").getField("SDK_INT").getInt(null); } catch (Exception e) { try { sdkVersion = Integer.parseInt((String) Class.forName("android.os.Build$VERSION").getField("SDK").get(null)); } catch (Exception e2) { } } } ANDROID_SDK_VERSION = sdkVersion; } /** * The datetime format to be used when <code>lenientDatetimeFormat</code> is enabled. */ public static final String LENIENT_DATETIME_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; private String basePath = "https://api.muhimbi.com/api"; private boolean lenientOnJson = false; private boolean debugging = false; private Map<String, String> defaultHeaderMap = new HashMap<String, String>(); private String tempFolderPath = null; private Map<String, Authentication> authentications; private DateFormat dateFormat; private DateFormat datetimeFormat; private boolean lenientDatetimeFormat; private int dateLength; private InputStream sslCaCert; private boolean verifyingSsl; private OkHttpClient httpClient; private JSON json; private HttpLoggingInterceptor loggingInterceptor; /* * Constructor for ApiClient */ public ApiClient() { httpClient = new OkHttpClient(); verifyingSsl = true; json = new JSON(this); /* * Use RFC3339 format for date and datetime. * See http://xml2rfc.ietf.org/public/rfc/html/rfc3339.html#anchor14 */ this.dateFormat = new SimpleDateFormat("yyyy-MM-dd"); // Always use UTC as the default time zone when dealing with date (without time). this.dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); initDatetimeFormat(); // Be lenient on datetime formats when parsing datetime from string. // See <code>parseDatetime</code>. this.lenientDatetimeFormat = true; // Set default User-Agent. setUserAgent("Swagger-Codegen/9.15.0/java"); // Setup authentications (key: authentication name, value: authentication). authentications = new HashMap<String, Authentication>(); authentications.put("api_key", new ApiKeyAuth("header", "api_key")); authentications.put("oauth2_auth", new OAuth()); // Prevent the authentications from being modified. authentications = Collections.unmodifiableMap(authentications); } /** * Get base path * * @return Baes path */ public String getBasePath() { return basePath; } /** * Set base path * * @param basePath Base path of the URL (e.g https://api.muhimbi.com/api * @return An instance of OkHttpClient */ public ApiClient setBasePath(String basePath) { this.basePath = basePath; return this; } /** * Get HTTP client * * @return An instance of OkHttpClient */ public OkHttpClient getHttpClient() { return httpClient; } /** * Set HTTP client * * @param httpClient An instance of OkHttpClient * @return Api Client */ public ApiClient setHttpClient(OkHttpClient httpClient) { this.httpClient = httpClient; return this; } /** * Get JSON * * @return JSON object */ public JSON getJSON() { return json; } /** * Set JSON * * @param json JSON object * @return Api client */ public ApiClient setJSON(JSON json) { this.json = json; return this; } /** * True if isVerifyingSsl flag is on * * @return True if isVerifySsl flag is on */ public boolean isVerifyingSsl() { return verifyingSsl; } /** * Configure whether to verify certificate and hostname when making https requests. * Default to true. * NOTE: Do NOT set to false in production code, otherwise you would face multiple types of cryptographic attacks. * * @param verifyingSsl True to verify TLS/SSL connection * @return ApiClient */ public ApiClient setVerifyingSsl(boolean verifyingSsl) { this.verifyingSsl = verifyingSsl; applySslSettings(); return this; } /** * Get SSL CA cert. * * @return Input stream to the SSL CA cert */ public InputStream getSslCaCert() { return sslCaCert; } /** * Configure the CA certificate to be trusted when making https requests. * Use null to reset to default. * * @param sslCaCert input stream for SSL CA cert * @return ApiClient */ public ApiClient setSslCaCert(InputStream sslCaCert) { this.sslCaCert = sslCaCert; applySslSettings(); return this; } public DateFormat getDateFormat() { return dateFormat; } public ApiClient setDateFormat(DateFormat dateFormat) { this.dateFormat = dateFormat; this.dateLength = this.dateFormat.format(new Date()).length(); return this; } public DateFormat getDatetimeFormat() { return datetimeFormat; } public ApiClient setDatetimeFormat(DateFormat datetimeFormat) { this.datetimeFormat = datetimeFormat; return this; } /** * Whether to allow various ISO 8601 datetime formats when parsing a datetime string. * @see #parseDatetime(String) * @return True if lenientDatetimeFormat flag is set to true */ public boolean isLenientDatetimeFormat() { return lenientDatetimeFormat; } public ApiClient setLenientDatetimeFormat(boolean lenientDatetimeFormat) { this.lenientDatetimeFormat = lenientDatetimeFormat; return this; } /** * Parse the given date string into Date object. * The default <code>dateFormat</code> supports these ISO 8601 date formats: * 2015-08-16 * 2015-8-16 * @param str String to be parsed * @return Date */ public Date parseDate(String str) { if (str == null) return null; try { return dateFormat.parse(str); } catch (ParseException e) { throw new RuntimeException(e); } } /** * Parse the given datetime string into Date object. * When lenientDatetimeFormat is enabled, the following ISO 8601 datetime formats are supported: * 2015-08-16T08:20:05Z * 2015-8-16T8:20:05Z * 2015-08-16T08:20:05+00:00 * 2015-08-16T08:20:05+0000 * 2015-08-16T08:20:05.376Z * 2015-08-16T08:20:05.376+00:00 * 2015-08-16T08:20:05.376+00 * Note: The 3-digit milli-seconds is optional. Time zone is required and can be in one of * these formats: * Z (same with +0000) * +08:00 (same with +0800) * -02 (same with -0200) * -0200 * @see <a href="https://en.wikipedia.org/wiki/ISO_8601">ISO 8601</a> * @param str Date time string to be parsed * @return Date representation of the string */ public Date parseDatetime(String str) { if (str == null) return null; DateFormat format; if (lenientDatetimeFormat) { /* * When lenientDatetimeFormat is enabled, normalize the date string * into <code>LENIENT_DATETIME_FORMAT</code> to support various formats * defined by ISO 8601. */ // normalize time zone // trailing "Z": 2015-08-16T08:20:05Z => 2015-08-16T08:20:05+0000 str = str.replaceAll("[zZ]\\z", "+0000"); // remove colon in time zone: 2015-08-16T08:20:05+00:00 => 2015-08-16T08:20:05+0000 str = str.replaceAll("([+-]\\d{2}):(\\d{2})\\z", "$1$2"); // expand time zone: 2015-08-16T08:20:05+00 => 2015-08-16T08:20:05+0000 str = str.replaceAll("([+-]\\d{2})\\z", "$100"); // add milliseconds when missing // 2015-08-16T08:20:05+0000 => 2015-08-16T08:20:05.000+0000 str = str.replaceAll("(:\\d{1,2})([+-]\\d{4})\\z", "$1.000$2"); format = new SimpleDateFormat(LENIENT_DATETIME_FORMAT); } else { format = this.datetimeFormat; } try { return format.parse(str); } catch (ParseException e) { throw new RuntimeException(e); } } /* * Parse date or date time in string format into Date object. * * @param str Date time string to be parsed * @return Date representation of the string */ public Date parseDateOrDatetime(String str) { if (str == null) return null; else if (str.length() <= dateLength) return parseDate(str); else return parseDatetime(str); } /** * Format the given Date object into string (Date format). * * @param date Date object * @return Formatted date in string representation */ public String formatDate(Date date) { return dateFormat.format(date); } /** * Format the given Date object into string (Datetime format). * * @param date Date object * @return Formatted datetime in string representation */ public String formatDatetime(Date date) { return datetimeFormat.format(date); } /** * Get authentications (key: authentication name, value: authentication). * * @return Map of authentication objects */ public Map<String, Authentication> getAuthentications() { return authentications; } /** * Get authentication for the given name. * * @param authName The authentication name * @return The authentication, null if not found */ public Authentication getAuthentication(String authName) { return authentications.get(authName); } /** * Helper method to set username for the first HTTP basic authentication. * * @param username Username */ public void setUsername(String username) { for (Authentication auth : authentications.values()) { if (auth instanceof HttpBasicAuth) { ((HttpBasicAuth) auth).setUsername(username); return; } } throw new RuntimeException("No HTTP basic authentication configured!"); } /** * Helper method to set password for the first HTTP basic authentication. * * @param password Password */ public void setPassword(String password) { for (Authentication auth : authentications.values()) { if (auth instanceof HttpBasicAuth) { ((HttpBasicAuth) auth).setPassword(password); return; } } throw new RuntimeException("No HTTP basic authentication configured!"); } /** * Helper method to set API key value for the first API key authentication. * * @param apiKey API key */ public void setApiKey(String apiKey) { for (Authentication auth : authentications.values()) { if (auth instanceof ApiKeyAuth) { ((ApiKeyAuth) auth).setApiKey(apiKey); return; } } throw new RuntimeException("No API key authentication configured!"); } /** * Helper method to set API key prefix for the first API key authentication. * * @param apiKeyPrefix API key prefix */ public void setApiKeyPrefix(String apiKeyPrefix) { for (Authentication auth : authentications.values()) { if (auth instanceof ApiKeyAuth) { ((ApiKeyAuth) auth).setApiKeyPrefix(apiKeyPrefix); return; } } throw new RuntimeException("No API key authentication configured!"); } /** * Helper method to set access token for the first OAuth2 authentication. * * @param accessToken Access token */ public void setAccessToken(String accessToken) { for (Authentication auth : authentications.values()) { if (auth instanceof OAuth) { ((OAuth) auth).setAccessToken(accessToken); return; } } throw new RuntimeException("No OAuth2 authentication configured!"); } /** * Set the User-Agent header's value (by adding to the default header map). * * @param userAgent HTTP request's user agent * @return ApiClient */ public ApiClient setUserAgent(String userAgent) { addDefaultHeader("User-Agent", userAgent); return this; } /** * Add a default header. * * @param key The header's key * @param value The header's value * @return ApiClient */ public ApiClient addDefaultHeader(String key, String value) { defaultHeaderMap.put(key, value); return this; } /** * @see <a href="https://google-gson.googlecode.com/svn/trunk/gson/docs/javadocs/com/google/gson/stream/JsonReader.html#setLenient(boolean)">setLenient</a> * * @return True if lenientOnJson is enabled, false otherwise. */ public boolean isLenientOnJson() { return lenientOnJson; } /** * Set LenientOnJson * * @param lenient True to enable lenientOnJson * @return ApiClient */ public ApiClient setLenientOnJson(boolean lenient) { this.lenientOnJson = lenient; return this; } /** * Check that whether debugging is enabled for this API client. * * @return True if debugging is enabled, false otherwise. */ public boolean isDebugging() { return debugging; } /** * Enable/disable debugging for this API client. * * @param debugging To enable (true) or disable (false) debugging * @return ApiClient */ public ApiClient setDebugging(boolean debugging) { if (debugging != this.debugging) { if (debugging) { loggingInterceptor = new HttpLoggingInterceptor(); loggingInterceptor.setLevel(Level.BODY); httpClient.interceptors().add(loggingInterceptor); } else { httpClient.interceptors().remove(loggingInterceptor); loggingInterceptor = null; } } this.debugging = debugging; return this; } /** * The path of temporary folder used to store downloaded files from endpoints * with file response. The default value is <code>null</code>, i.e. using * the system's default tempopary folder. * * @see <a href="https://docs.oracle.com/javase/7/docs/api/java/io/File.html#createTempFile">createTempFile</a> * @return Temporary folder path */ public String getTempFolderPath() { return tempFolderPath; } /** * Set the tempoaray folder path (for downloading files) * * @param tempFolderPath Temporary folder path * @return ApiClient */ public ApiClient setTempFolderPath(String tempFolderPath) { this.tempFolderPath = tempFolderPath; return this; } /** * Get connection timeout (in milliseconds). * * @return Timeout in milliseconds */ public int getConnectTimeout() { return httpClient.getConnectTimeout(); } /** * Sets the connect timeout (in milliseconds). * A value of 0 means no timeout, otherwise values must be between 1 and * * @param connectionTimeout connection timeout in milliseconds * @return Api client */ public ApiClient setConnectTimeout(int connectionTimeout) { httpClient.setConnectTimeout(connectionTimeout, TimeUnit.MILLISECONDS); return this; } /** * Format the given parameter object into string. * * @param param Parameter * @return String representation of the parameter */ public String parameterToString(Object param) { if (param == null) { return ""; } else if (param instanceof Date) { return formatDatetime((Date) param); } else if (param instanceof Collection) { StringBuilder b = new StringBuilder(); for (Object o : (Collection)param) { if (b.length() > 0) { b.append(","); } b.append(String.valueOf(o)); } return b.toString(); } else { return String.valueOf(param); } } /** * Format to {@code Pair} objects. * * @param collectionFormat collection format (e.g. csv, tsv) * @param name Name * @param value Value * @return A list of Pair objects */ public List<Pair> parameterToPairs(String collectionFormat, String name, Object value){ List<Pair> params = new ArrayList<Pair>(); // preconditions if (name == null || name.isEmpty() || value == null) return params; Collection valueCollection = null; if (value instanceof Collection) { valueCollection = (Collection) value; } else { params.add(new Pair(name, parameterToString(value))); return params; } if (valueCollection.isEmpty()){ return params; } // get the collection format collectionFormat = (collectionFormat == null || collectionFormat.isEmpty() ? "csv" : collectionFormat); // default: csv // create the params based on the collection format if (collectionFormat.equals("multi")) { for (Object item : valueCollection) { params.add(new Pair(name, parameterToString(item))); } return params; } String delimiter = ","; if (collectionFormat.equals("csv")) { delimiter = ","; } else if (collectionFormat.equals("ssv")) { delimiter = " "; } else if (collectionFormat.equals("tsv")) { delimiter = "\t"; } else if (collectionFormat.equals("pipes")) { delimiter = "|"; } StringBuilder sb = new StringBuilder() ; for (Object item : valueCollection) { sb.append(delimiter); sb.append(parameterToString(item)); } params.add(new Pair(name, sb.substring(1))); return params; } /** * Sanitize filename by removing path. * e.g. ../../sun.gif becomes sun.gif * * @param filename The filename to be sanitized * @return The sanitized filename */ public String sanitizeFilename(String filename) { return filename.replaceAll(".*[/\\\\]", ""); } /** * Check if the given MIME is a JSON MIME. * JSON MIME examples: * application/json * application/json; charset=UTF8 * APPLICATION/JSON * * @param mime MIME (Multipurpose Internet Mail Extensions) * @return True if the given MIME is JSON, false otherwise. */ public boolean isJsonMime(String mime) { return mime != null && mime.matches("(?i)application\\/json(;.*)?"); } /** * Select the Accept header's value from the given accepts array: * if JSON exists in the given array, use it; * otherwise use all of them (joining into a string) * * @param accepts The accepts array to select from * @return The Accept header to use. If the given array is empty, * null will be returned (not to set the Accept header explicitly). */ public String selectHeaderAccept(String[] accepts) { if (accepts.length == 0) { return null; } for (String accept : accepts) { if (isJsonMime(accept)) { return accept; } } return StringUtil.join(accepts, ","); } /** * Select the Content-Type header's value from the given array: * if JSON exists in the given array, use it; * otherwise use the first one of the array. * * @param contentTypes The Content-Type array to select from * @return The Content-Type header to use. If the given array is empty, * JSON will be used. */ public String selectHeaderContentType(String[] contentTypes) { if (contentTypes.length == 0) { return "application/json"; } for (String contentType : contentTypes) { if (isJsonMime(contentType)) { return contentType; } } return contentTypes[0]; } /** * Escape the given string to be used as URL query value. * * @param str String to be escaped * @return Escaped string */ public String escapeString(String str) { try { return URLEncoder.encode(str, "utf8").replaceAll("\\+", "%20"); } catch (UnsupportedEncodingException e) { return str; } } /** * Deserialize response body to Java object, according to the return type and * the Content-Type response header. * * @param <T> Type * @param response HTTP response * @param returnType The type of the Java object * @return The deserialized Java object * @throws ApiException If fail to deserialize response body, i.e. cannot read response body * or the Content-Type of the response is not supported. */ @SuppressWarnings("unchecked") public <T> T deserialize(Response response, Type returnType) throws ApiException { if (response == null || returnType == null) { return null; } if ("byte[]".equals(returnType.toString())) { // Handle binary response (byte array). try { return (T) response.body().bytes(); } catch (IOException e) { throw new ApiException(e); } } else if (returnType.equals(File.class)) { // Handle file downloading. return (T) downloadFileFromResponse(response); } String respBody; try { if (response.body() != null) respBody = response.body().string(); else respBody = null; } catch (IOException e) { throw new ApiException(e); } if (respBody == null || "".equals(respBody)) { return null; } String contentType = response.headers().get("Content-Type"); if (contentType == null) { // ensuring a default content type contentType = "application/json"; } if (isJsonMime(contentType)) { return json.deserialize(respBody, returnType); } else if (returnType.equals(String.class)) { // Expecting string, return the raw response body. return (T) respBody; } else { throw new ApiException( "Content type \"" + contentType + "\" is not supported for type: " + returnType, response.code(), response.headers().toMultimap(), respBody); } } /** * Serialize the given Java object into request body according to the object's * class and the request Content-Type. * * @param obj The Java object * @param contentType The request Content-Type * @return The serialized request body * @throws ApiException If fail to serialize the given object */ public RequestBody serialize(Object obj, String contentType) throws ApiException { if (obj instanceof byte[]) { // Binary (byte array) body parameter support. return RequestBody.create(MediaType.parse(contentType), (byte[]) obj); } else if (obj instanceof File) { // File body parameter support. return RequestBody.create(MediaType.parse(contentType), (File) obj); } else if (isJsonMime(contentType)) { String content; if (obj != null) { content = json.serialize(obj); } else { content = null; } return RequestBody.create(MediaType.parse(contentType), content); } else { throw new ApiException("Content type \"" + contentType + "\" is not supported"); } } /** * Download file from the given response. * * @param response An instance of the Response object * @throws ApiException If fail to read file content from response and write to disk * @return Downloaded file */ public File downloadFileFromResponse(Response response) throws ApiException { try { File file = prepareDownloadFile(response); BufferedSink sink = Okio.buffer(Okio.sink(file)); sink.writeAll(response.body().source()); sink.close(); return file; } catch (IOException e) { throw new ApiException(e); } } /** * Prepare file for download * * @param response An instance of the Response object * @throws IOException If fail to prepare file for download * @return Prepared file for the download */ public File prepareDownloadFile(Response response) throws IOException { String filename = null; String contentDisposition = response.header("Content-Disposition"); if (contentDisposition != null && !"".equals(contentDisposition)) { // Get filename from the Content-Disposition header. Pattern pattern = Pattern.compile("filename=['\"]?([^'\"\\s]+)['\"]?"); Matcher matcher = pattern.matcher(contentDisposition); if (matcher.find()) { filename = sanitizeFilename(matcher.group(1)); } } String prefix = null; String suffix = null; if (filename == null) { prefix = "download-"; suffix = ""; } else { int pos = filename.lastIndexOf("."); if (pos == -1) { prefix = filename + "-"; } else { prefix = filename.substring(0, pos) + "-"; suffix = filename.substring(pos); } // File.createTempFile requires the prefix to be at least three characters long if (prefix.length() < 3) prefix = "download-"; } if (tempFolderPath == null) return File.createTempFile(prefix, suffix); else return File.createTempFile(prefix, suffix, new File(tempFolderPath)); } /** * {@link #execute(Call, Type)} * * @param <T> Type * @param call An instance of the Call object * @throws ApiException If fail to execute the call * @return ApiResponse&lt;T&gt; */ public <T> ApiResponse<T> execute(Call call) throws ApiException { return execute(call, null); } /** * Execute HTTP call and deserialize the HTTP response body into the given return type. * * @param returnType The return type used to deserialize HTTP response body * @param <T> The return type corresponding to (same with) returnType * @param call Call * @return ApiResponse object containing response status, headers and * data, which is a Java object deserialized from response body and would be null * when returnType is null. * @throws ApiException If fail to execute the call */ public <T> ApiResponse<T> execute(Call call, Type returnType) throws ApiException { try { Response response = call.execute(); T data = handleResponse(response, returnType); return new ApiResponse<T>(response.code(), response.headers().toMultimap(), data); } catch (IOException e) { throw new ApiException(e); } } /** * {@link #executeAsync(Call, Type, ApiCallback)} * * @param <T> Type * @param call An instance of the Call object * @param callback ApiCallback&lt;T&gt; */ public <T> void executeAsync(Call call, ApiCallback<T> callback) { executeAsync(call, null, callback); } /** * Execute HTTP call asynchronously. * * @see #execute(Call, Type) * @param <T> Type * @param call The callback to be executed when the API call finishes * @param returnType Return type * @param callback ApiCallback */ @SuppressWarnings("unchecked") public <T> void executeAsync(Call call, final Type returnType, final ApiCallback<T> callback) { call.enqueue(new Callback() { @Override public void onFailure(Request request, IOException e) { callback.onFailure(new ApiException(e), 0, null); } @Override public void onResponse(Response response) throws IOException { T result; try { result = (T) handleResponse(response, returnType); } catch (ApiException e) { callback.onFailure(e, response.code(), response.headers().toMultimap()); return; } callback.onSuccess(result, response.code(), response.headers().toMultimap()); } }); } /** * Handle the given response, return the deserialized object when the response is successful. * * @param <T> Type * @param response Response * @param returnType Return type * @throws ApiException If the response has a unsuccessful status code or * fail to deserialize the response body * @return Type */ public <T> T handleResponse(Response response, Type returnType) throws ApiException { if (response.isSuccessful()) { if (returnType == null || response.code() == 204) { // returning null if the returnType is not defined, // or the status code is 204 (No Content) return null; } else { return deserialize(response, returnType); } } else { String respBody = null; if (response.body() != null) { try { respBody = response.body().string(); } catch (IOException e) { throw new ApiException(response.message(), e, response.code(), response.headers().toMultimap()); } } throw new ApiException(response.message(), response.code(), response.headers().toMultimap(), respBody); } } /** * Build HTTP call with the given options. * * @param path The sub-path of the HTTP URL * @param method The request method, one of "GET", "HEAD", "OPTIONS", "POST", "PUT", "PATCH" and "DELETE" * @param queryParams The query parameters * @param body The request body object * @param headerParams The header parameters * @param formParams The form parameters * @param authNames The authentications to apply * @param progressRequestListener Progress request listener * @return The HTTP call * @throws ApiException If fail to serialize the request body object */ public Call buildCall(String path, String method, List<Pair> queryParams, Object body, Map<String, String> headerParams, Map<String, Object> formParams, String[] authNames, ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException { updateParamsForAuth(authNames, queryParams, headerParams); final String url = buildUrl(path, queryParams); final Request.Builder reqBuilder = new Request.Builder().url(url); processHeaderParams(headerParams, reqBuilder); String contentType = (String) headerParams.get("Content-Type"); // ensuring a default content type if (contentType == null) { contentType = "application/json"; } RequestBody reqBody; if (!HttpMethod.permitsRequestBody(method)) { reqBody = null; } else if ("application/x-www-form-urlencoded".equals(contentType)) { reqBody = buildRequestBodyFormEncoding(formParams); } else if ("multipart/form-data".equals(contentType)) { reqBody = buildRequestBodyMultipart(formParams); } else if (body == null) { if ("DELETE".equals(method)) { // allow calling DELETE without sending a request body reqBody = null; } else { // use an empty request body (for POST, PUT and PATCH) reqBody = RequestBody.create(MediaType.parse(contentType), ""); } } else { reqBody = serialize(body, contentType); } Request request = null; if(progressRequestListener != null && reqBody != null) { ProgressRequestBody progressRequestBody = new ProgressRequestBody(reqBody, progressRequestListener); request = reqBuilder.method(method, progressRequestBody).build(); } else { request = reqBuilder.method(method, reqBody).build(); } return httpClient.newCall(request); } /** * Build full URL by concatenating base path, the given sub path and query parameters. * * @param path The sub path * @param queryParams The query parameters * @return The full URL */ public String buildUrl(String path, List<Pair> queryParams) { final StringBuilder url = new StringBuilder(); url.append(basePath).append(path); if (queryParams != null && !queryParams.isEmpty()) { // support (constant) query string in `path`, e.g. "/posts?draft=1" String prefix = path.contains("?") ? "&" : "?"; for (Pair param : queryParams) { if (param.getValue() != null) { if (prefix != null) { url.append(prefix); prefix = null; } else { url.append("&"); } String value = parameterToString(param.getValue()); url.append(escapeString(param.getName())).append("=").append(escapeString(value)); } } } return url.toString(); } /** * Set header parameters to the request builder, including default headers. * * @param headerParams Header parameters in the ofrm of Map * @param reqBuilder Reqeust.Builder */ public void processHeaderParams(Map<String, String> headerParams, Request.Builder reqBuilder) { for (Entry<String, String> param : headerParams.entrySet()) { reqBuilder.header(param.getKey(), parameterToString(param.getValue())); } for (Entry<String, String> header : defaultHeaderMap.entrySet()) { if (!headerParams.containsKey(header.getKey())) { reqBuilder.header(header.getKey(), parameterToString(header.getValue())); } } } /** * Update query and header parameters based on authentication settings. * * @param authNames The authentications to apply * @param queryParams List of query parameters * @param headerParams Map of header parameters */ public void updateParamsForAuth(String[] authNames, List<Pair> queryParams, Map<String, String> headerParams) { for (String authName : authNames) { Authentication auth = authentications.get(authName); if (auth == null) throw new RuntimeException("Authentication undefined: " + authName); auth.applyToParams(queryParams, headerParams); } } /** * Build a form-encoding request body with the given form parameters. * * @param formParams Form parameters in the form of Map * @return RequestBody */ public RequestBody buildRequestBodyFormEncoding(Map<String, Object> formParams) { FormEncodingBuilder formBuilder = new FormEncodingBuilder(); for (Entry<String, Object> param : formParams.entrySet()) { formBuilder.add(param.getKey(), parameterToString(param.getValue())); } return formBuilder.build(); } /** * Build a multipart (file uploading) request body with the given form parameters, * which could contain text fields and file fields. * * @param formParams Form parameters in the form of Map * @return RequestBody */ public RequestBody buildRequestBodyMultipart(Map<String, Object> formParams) { MultipartBuilder mpBuilder = new MultipartBuilder().type(MultipartBuilder.FORM); for (Entry<String, Object> param : formParams.entrySet()) { if (param.getValue() instanceof File) { File file = (File) param.getValue(); Headers partHeaders = Headers.of("Content-Disposition", "form-data; name=\"" + param.getKey() + "\"; filename=\"" + file.getName() + "\""); MediaType mediaType = MediaType.parse(guessContentTypeFromFile(file)); mpBuilder.addPart(partHeaders, RequestBody.create(mediaType, file)); } else { Headers partHeaders = Headers.of("Content-Disposition", "form-data; name=\"" + param.getKey() + "\""); mpBuilder.addPart(partHeaders, RequestBody.create(null, parameterToString(param.getValue()))); } } return mpBuilder.build(); } /** * Guess Content-Type header from the given file (defaults to "application/octet-stream"). * * @param file The given file * @return The guessed Content-Type */ public String guessContentTypeFromFile(File file) { String contentType = URLConnection.guessContentTypeFromName(file.getName()); if (contentType == null) { return "application/octet-stream"; } else { return contentType; } } /** * Initialize datetime format according to the current environment, e.g. Java 1.7 and Android. */ private void initDatetimeFormat() { String formatWithTimeZone = null; if (IS_ANDROID) { if (ANDROID_SDK_VERSION >= 18) { // The time zone format "ZZZZZ" is available since Android 4.3 (SDK version 18) formatWithTimeZone = "yyyy-MM-dd'T'HH:mm:ss.SSSZZZZZ"; } } else if (JAVA_VERSION >= 1.7) { // The time zone format "XXX" is available since Java 1.7 formatWithTimeZone = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX"; } if (formatWithTimeZone != null) { this.datetimeFormat = new SimpleDateFormat(formatWithTimeZone); // NOTE: Use the system's default time zone (mainly for datetime formatting). } else { // Use a common format that works across all systems. this.datetimeFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); // Always use the UTC time zone as we are using a constant trailing "Z" here. this.datetimeFormat.setTimeZone(TimeZone.getTimeZone("UTC")); } } /** * Apply SSL related settings to httpClient according to the current values of * verifyingSsl and sslCaCert. */ private void applySslSettings() { try { KeyManager[] keyManagers = null; TrustManager[] trustManagers = null; HostnameVerifier hostnameVerifier = null; if (!verifyingSsl) { TrustManager trustAll = new X509TrustManager() { @Override public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException {} @Override public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException {} @Override public X509Certificate[] getAcceptedIssuers() { return null; } }; SSLContext sslContext = SSLContext.getInstance("TLS"); trustManagers = new TrustManager[]{ trustAll }; hostnameVerifier = new HostnameVerifier() { @Override public boolean verify(String hostname, SSLSession session) { return true; } }; } else if (sslCaCert != null) { char[] password = null; // Any password will work. CertificateFactory certificateFactory = CertificateFactory.getInstance("X.509"); Collection<? extends Certificate> certificates = certificateFactory.generateCertificates(sslCaCert); if (certificates.isEmpty()) { throw new IllegalArgumentException("expected non-empty set of trusted certificates"); } KeyStore caKeyStore = newEmptyKeyStore(password); int index = 0; for (Certificate certificate : certificates) { String certificateAlias = "ca" + Integer.toString(index++); caKeyStore.setCertificateEntry(certificateAlias, certificate); } TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); trustManagerFactory.init(caKeyStore); trustManagers = trustManagerFactory.getTrustManagers(); } if (keyManagers != null || trustManagers != null) { SSLContext sslContext = SSLContext.getInstance("TLS"); sslContext.init(keyManagers, trustManagers, new SecureRandom()); httpClient.setSslSocketFactory(sslContext.getSocketFactory()); } else { httpClient.setSslSocketFactory(null); } httpClient.setHostnameVerifier(hostnameVerifier); } catch (GeneralSecurityException e) { throw new RuntimeException(e); } } private KeyStore newEmptyKeyStore(char[] password) throws GeneralSecurityException { try { KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); keyStore.load(null, password); return keyStore; } catch (IOException e) { throw new AssertionError(e); } } }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.table.under.glue; import alluxio.grpc.table.BinaryColumnStatsData; import alluxio.grpc.table.BooleanColumnStatsData; import alluxio.grpc.table.ColumnStatisticsData; import alluxio.grpc.table.ColumnStatisticsInfo; import alluxio.grpc.table.Date; import alluxio.grpc.table.DateColumnStatsData; import alluxio.grpc.table.Decimal; import alluxio.grpc.table.DecimalColumnStatsData; import alluxio.grpc.table.DoubleColumnStatsData; import alluxio.grpc.table.LongColumnStatsData; import alluxio.grpc.table.Schema; import alluxio.grpc.table.StringColumnStatsData; import alluxio.grpc.table.layout.hive.HiveBucketProperty; import alluxio.grpc.table.layout.hive.SortingColumn; import alluxio.grpc.table.layout.hive.Storage; import alluxio.grpc.table.layout.hive.StorageFormat; import alluxio.table.common.udb.PathTranslator; import com.amazonaws.services.glue.model.Column; import com.amazonaws.services.glue.model.ColumnStatistics; import com.amazonaws.services.glue.model.Order; import com.amazonaws.services.glue.model.StorageDescriptor; import com.google.protobuf.ByteString; import org.apache.hadoop.hive.common.FileUtils; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.stream.Collectors; /** * Glue utils. */ public class GlueUtils { private GlueUtils() {} /** * Convert glue field schema to alluxio proto (Glue do not have filedschema api). * * @param glueColumns list of glue columns * @return alluxio proto of schema */ public static Schema toProtoSchema(List<Column> glueColumns) { Schema.Builder schemaBuilder = Schema.newBuilder(); schemaBuilder.addAllCols(toProto(glueColumns)); return schemaBuilder.build(); } /** * Convert the Glue FieldSchema to Alluxio FieldSchema. * * @param glueCloumns Glue FiledSchema * @return list of Alluxio FieldSchema */ public static List<alluxio.grpc.table.FieldSchema> toProto(List<Column> glueCloumns) { if (glueCloumns == null) { return Collections.emptyList(); } List<alluxio.grpc.table.FieldSchema> list = new ArrayList<>(); for (Column column:glueCloumns) { alluxio.grpc.table.FieldSchema.Builder builder = alluxio.grpc.table.FieldSchema.newBuilder() .setName(column.getName()) .setType(column.getType()); if (column.getComment() != null) { builder.setComment(column.getComment()); } list.add(builder.build()); } return list; } /** * Convert glue ColumnStatistics to Alluxio ColumnStatisticsInfo. * * @param glueColumnStatistic glue column statistic info * @return Alluxio ColumnStatisticsInfo */ public static ColumnStatisticsInfo toProto(ColumnStatistics glueColumnStatistic) { if (glueColumnStatistic == null) { return ColumnStatisticsInfo.newBuilder().build(); } ColumnStatisticsInfo.Builder columnStatisticsInfoBuilder = ColumnStatisticsInfo.newBuilder(); columnStatisticsInfoBuilder.setColName(glueColumnStatistic.getColumnName()) .setColType(glueColumnStatistic.getColumnType()); if (glueColumnStatistic.getStatisticsData() != null) { com.amazonaws.services.glue.model.ColumnStatisticsData glueColumnStatisticsData = glueColumnStatistic.getStatisticsData(); String columnType = glueColumnStatistic.getStatisticsData().getType(); if (columnType != null) { if (columnType.equals("BOOLEAN") && glueColumnStatisticsData.getBooleanColumnStatisticsData() != null) { com.amazonaws.services.glue.model.BooleanColumnStatisticsData booleanData = glueColumnStatisticsData.getBooleanColumnStatisticsData(); if (booleanData != null) { columnStatisticsInfoBuilder.setData( ColumnStatisticsData.newBuilder().setBooleanStats(toProto(booleanData)).build()); } } if (columnType.equals("DATE") && glueColumnStatisticsData.getDateColumnStatisticsData() != null) { com.amazonaws.services.glue.model.DateColumnStatisticsData dateData = glueColumnStatisticsData.getDateColumnStatisticsData(); if (dateData != null) { columnStatisticsInfoBuilder.setData( ColumnStatisticsData.newBuilder().setDateStats(toProto(dateData)).build()); } } if (columnType.equals("DECIMAL") && glueColumnStatisticsData.getDecimalColumnStatisticsData() != null) { com.amazonaws.services.glue.model.DecimalColumnStatisticsData decimalData = glueColumnStatisticsData.getDecimalColumnStatisticsData(); if (decimalData != null) { columnStatisticsInfoBuilder.setData( ColumnStatisticsData.newBuilder().setDecimalStats(toProto(decimalData)).build()); } } if (columnType.equals("DOUBLE") && glueColumnStatisticsData.getDoubleColumnStatisticsData() != null) { com.amazonaws.services.glue.model.DoubleColumnStatisticsData doubleData = glueColumnStatisticsData.getDoubleColumnStatisticsData(); if (doubleData != null) { columnStatisticsInfoBuilder.setData( ColumnStatisticsData.newBuilder().setDoubleStats(toProto(doubleData)).build()); } } if (columnType.equals("LONG") && glueColumnStatisticsData.getLongColumnStatisticsData() != null) { com.amazonaws.services.glue.model.LongColumnStatisticsData longData = glueColumnStatisticsData.getLongColumnStatisticsData(); if (longData != null) { columnStatisticsInfoBuilder.setData( ColumnStatisticsData.newBuilder().setLongStats(toProto(longData)).build()); } } if (columnType.equals("STRING") && glueColumnStatisticsData.getStringColumnStatisticsData() != null) { com.amazonaws.services.glue.model.StringColumnStatisticsData stringData = glueColumnStatisticsData.getStringColumnStatisticsData(); if (stringData != null) { columnStatisticsInfoBuilder.setData( ColumnStatisticsData.newBuilder().setStringStats(toProto(stringData)).build()); } } if (columnType.equals("BINARY") && glueColumnStatisticsData.getBinaryColumnStatisticsData() != null) { com.amazonaws.services.glue.model.BinaryColumnStatisticsData binaryData = glueColumnStatisticsData.getBinaryColumnStatisticsData(); if (binaryData != null) { columnStatisticsInfoBuilder.setData( ColumnStatisticsData.newBuilder().setBinaryStats(toProto(binaryData)).build()); } } } } return columnStatisticsInfoBuilder.build(); } private static BooleanColumnStatsData toProto( com.amazonaws.services.glue.model.BooleanColumnStatisticsData booleanData) { BooleanColumnStatsData.Builder builder = BooleanColumnStatsData.newBuilder(); builder.setNumNulls(booleanData.getNumberOfNulls()) .setNumTrues(booleanData.getNumberOfTrues()) .setNumFalses(booleanData.getNumberOfFalses()); return builder.build(); } private static DateColumnStatsData toProto( com.amazonaws.services.glue.model.DateColumnStatisticsData dateData) { DateColumnStatsData.Builder builder = DateColumnStatsData.newBuilder(); builder.setNumNulls(dateData.getNumberOfNulls()) .setNumDistincts(dateData.getNumberOfDistinctValues()); if (dateData.getMaximumValue() != null) { builder.setHighValue(Date.newBuilder() .setDaysSinceEpoch(dateData.getMaximumValue().getTime()).build()); } if (dateData.getMinimumValue() != null) { builder.setLowValue(Date.newBuilder() .setDaysSinceEpoch(dateData.getMinimumValue().getTime()).build()); } return builder.build(); } private static DecimalColumnStatsData toProto( com.amazonaws.services.glue.model.DecimalColumnStatisticsData decimalData) { DecimalColumnStatsData.Builder builder = DecimalColumnStatsData.newBuilder(); builder.setNumNulls(decimalData.getNumberOfNulls()) .setNumDistincts(decimalData.getNumberOfDistinctValues()); if (decimalData.getMaximumValue() != null) { builder.setHighValue(Decimal.newBuilder().setScale(decimalData.getMaximumValue().getScale()) .setUnscaled( ByteString.copyFrom(decimalData.getMaximumValue().getUnscaledValue().array()))); } if (decimalData.getMinimumValue() != null) { builder.setLowValue(Decimal.newBuilder().setScale(decimalData.getMinimumValue().getScale()) .setUnscaled( ByteString.copyFrom(decimalData.getMinimumValue().getUnscaledValue().array()))); } return builder.build(); } private static DoubleColumnStatsData toProto( com.amazonaws.services.glue.model.DoubleColumnStatisticsData doubleData) { DoubleColumnStatsData.Builder builder = DoubleColumnStatsData.newBuilder(); builder.setNumNulls(doubleData.getNumberOfNulls()) .setNumDistincts(doubleData.getNumberOfDistinctValues()); if (doubleData.getMaximumValue() != null) { builder.setHighValue(doubleData.getMaximumValue()); } if (doubleData.getMinimumValue() != null) { builder.setLowValue(doubleData.getMinimumValue()); } return builder.build(); } private static LongColumnStatsData toProto( com.amazonaws.services.glue.model.LongColumnStatisticsData longData) { LongColumnStatsData.Builder builder = LongColumnStatsData.newBuilder(); builder.setNumNulls(longData.getNumberOfNulls()) .setNumDistincts(longData.getNumberOfDistinctValues()); if (longData.getMaximumValue() != null) { builder.setHighValue(longData.getMaximumValue()); } if (longData.getMinimumValue() != null) { builder.setLowValue(longData.getMinimumValue()); } return builder.build(); } private static StringColumnStatsData toProto( com.amazonaws.services.glue.model.StringColumnStatisticsData stringData) { StringColumnStatsData.Builder builder = StringColumnStatsData.newBuilder(); builder.setNumNulls(stringData.getNumberOfNulls()) .setNumDistincts(stringData.getNumberOfDistinctValues()); if (stringData.getAverageLength() != null) { builder.setAvgColLen(stringData.getAverageLength()); } if (stringData.getMaximumLength() != null) { builder.setMaxColLen(stringData.getMaximumLength().longValue()); } return builder.build(); } private static BinaryColumnStatsData toProto( com.amazonaws.services.glue.model.BinaryColumnStatisticsData binaryData) { BinaryColumnStatsData.Builder builder = BinaryColumnStatsData.newBuilder(); builder.setNumNulls(binaryData.getNumberOfNulls()); if (binaryData.getMaximumLength() != null) { builder.setMaxColLen(binaryData.getMaximumLength()); } if (binaryData.getAverageLength() != null) { builder.setAvgColLen(binaryData.getAverageLength()); } return builder.build(); } /** * Convert the Glue Storage Descriptor and Translator information to Storage. * * @param sd the glue storage descriptor * @param translator the glue translator * @return storage proto * @throws IOException */ public static Storage toProto(StorageDescriptor sd, PathTranslator translator) throws IOException { if (sd == null) { return Storage.getDefaultInstance(); } String serDe = sd.getSerdeInfo() == null ? null : sd.getSerdeInfo().getSerializationLibrary(); Map<String, String> serdeLibMap = sd.getSerdeInfo() == null ? null : sd.getSerdeInfo().getParameters(); StorageFormat.Builder formatBuilder = StorageFormat.newBuilder() .setInputFormat(sd.getInputFormat()) .setOutputFormat(sd.getOutputFormat()); if (serdeLibMap != null) { formatBuilder.putAllSerdelibParameters(serdeLibMap); } if (serDe != null) { formatBuilder.setSerde(serDe); // Check SerDe info } alluxio.grpc.table.layout.hive.Storage.Builder storageBuilder = alluxio.grpc.table.layout.hive.Storage.newBuilder(); List<Order> orderList = sd.getSortColumns(); List<SortingColumn> sortingColumns; if (orderList == null) { sortingColumns = Collections.emptyList(); } else { sortingColumns = orderList.stream().map( order -> SortingColumn.newBuilder().setColumnName(order.getColumn()) .setOrder(order.getSortOrder() == 1 ? SortingColumn.SortingOrder.ASCENDING : SortingColumn.SortingOrder.DESCENDING).build()) .collect(Collectors.toList()); } return storageBuilder.setStorageFormat(formatBuilder.build()) .setLocation(translator.toAlluxioPath(sd.getLocation())) .setBucketProperty(HiveBucketProperty.newBuilder().setBucketCount(sd.getNumberOfBuckets()) .addAllBucketedBy(sd.getBucketColumns()).addAllSortedBy(sortingColumns).build()) .setSkewed(sd.getSkewedInfo() != null && (sd.getSkewedInfo().getSkewedColumnNames()) != null && !sd.getSkewedInfo().getSkewedColumnNames().isEmpty()) .putAllSerdeParameters(sd.getParameters()).build(); } /** * Align to hive makePartName, convert glue partition information to alluxio partition name. * * @param columns glue table partition keys * @param partitionValues glue partition values * @return partition name * @throws IOException */ public static String makePartitionName(List<Column> columns, List<String> partitionValues) throws IOException { if ((columns.size() != partitionValues.size()) || columns.size() == 0) { String errorMesg = "Invalid partition key & values; key ["; for (Column column : columns) { errorMesg += (column.getName() + ","); } errorMesg += "], values ["; for (String partitionValue : partitionValues) { errorMesg += (partitionValue + ", "); } throw new IOException(errorMesg + "]"); } List<String> columnNames = new ArrayList<>(); for (Column column : columns) { columnNames.add(column.getName()); } return makePartName(columnNames, partitionValues); } /** * Make partition name for glue, wrapper of hive makePartName. * * @param partCols partition columns * @param vals partition values * @return partition name */ public static String makePartName(List<String> partCols, List<String> vals) { return FileUtils.makePartName(partCols, vals); } }
/* * Copyright 2006-2011 National Institute of Advanced Industrial Science * and Technology (AIST), and contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ow.messaging.tcp; import java.io.IOException; import java.net.InetAddress; import java.net.ServerSocket; import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import ow.messaging.ExtendedMessageHandler; import ow.messaging.InetMessagingAddress; import ow.messaging.Message; import ow.messaging.MessageHandler; import ow.messaging.MessageReceiver; import ow.messaging.MessageSender; import ow.messaging.MessagingAddress; import ow.messaging.Signature; import ow.messaging.upnp.Mapping; import ow.messaging.util.UPnPAddressPortMapper; import ow.stat.MessagingReporter; import ow.stat.StatConfiguration; import ow.stat.StatFactory; import ow.util.concurrent.ExecutorBlockingMode; import ow.util.concurrent.SingletonThreadPoolExecutors; public class TCPMessageReceiver implements MessageReceiver, Runnable { private final static Logger logger = Logger.getLogger("messaging"); private MessagingAddress selfAddr; private ServerSocketChannel servSock; protected TCPMessagingConfiguration config; protected TCPMessagingProvider provider; protected ConnectionPool connPool; private Thread receiverThread; private Set<Thread> handlerThreads = Collections.synchronizedSet(new HashSet<Thread>()); private List<MessageHandler> handlerList = new ArrayList<MessageHandler>(); protected boolean extMessageHandlerRegistered = false; private final MessagingReporter msgReporter; private static boolean oomPrinted = false; protected TCPMessageReceiver(InetAddress selfInetAddr, int port, int portRange, TCPMessagingConfiguration config, TCPMessagingProvider provider) throws IOException { this.config = config; this.provider = provider; // prepare a server socket this.servSock = ServerSocketChannel.open(); // prepare local address if (selfInetAddr == null) { selfInetAddr = InetAddress.getLocalHost(); } // bind to the specified address, and then to a local address if failed. ServerSocket s = this.servSock.socket(); s.setReuseAddress(true); // for development this.selfAddr = this.bind(s, selfInetAddr, port, portRange); if (this.selfAddr == null && !selfInetAddr.equals(InetAddress.getLocalHost())) { InetMessagingAddress boundAddr = this.bind(s, InetAddress.getLocalHost(), port, portRange); if (boundAddr != null) { boundAddr.setInetAddress(selfInetAddr); this.selfAddr = boundAddr; } } if (this.selfAddr == null) { String addrPort = selfInetAddr.getHostAddress() + ":" + port + "-" + (port + portRange - 1); logger.log(Level.SEVERE, "Could not bind to " + addrPort + "." + " Specify self hostname with -s option."); throw new IOException("Bind failed: " + addrPort); } this.connPool = new ConnectionPool( config.getConnectionPoolSize(), config.getSenderKeepAliveTime()); StatConfiguration conf = StatFactory.getDefaultConfiguration(); this.msgReporter = StatFactory.getMessagingReporter(conf, this.provider, this.getSender()); // for UPnP Address Port Mapping if (this.config.getDoUPnPNATTraversal()) { String internalAddress = this.selfAddr.getHostAddress(); UPnPAddressPortMapper.start(internalAddress, port, Mapping.Protocol.TCP, "Overlay Weaver", this.provider, config.getUPnPTimeout()); } } private InetMessagingAddress bind( ServerSocket sock, InetAddress inetAddr, int port, int range) { InetMessagingAddress addr = null; boolean bound = false; if (range <= 0) range = 1; for (int i = 0; i < range; i++) { addr = new InetMessagingAddress(inetAddr, port + i); try { sock.bind(addr.getInetSocketAddress()); port = port + i; bound = true; break; } catch (IOException e) { /*ignore*/ } } if (!bound) addr = null; return addr; } public MessagingAddress getSelfAddress() { return this.selfAddr; } public void setSelfAddress(String hostOrIP) { try { MessagingAddress addr = this.provider.getMessagingAddress( hostOrIP, this.selfAddr.getPort()); this.selfAddr.copyFrom(addr); } catch (UnknownHostException e) { logger.log(Level.WARNING, "Could not resolve a hostname: " + hostOrIP); } } public MessagingAddress setSelfAddress(MessagingAddress addr) { MessagingAddress old = this.selfAddr; this.selfAddr = addr; return old; } public int getPort() { return this.selfAddr.getPort(); } public MessagingReporter getMessagingReporter() { return this.msgReporter; } public MessageSender getSender() { // does not share a sender return new TCPMessageSender(this); } public void start() { synchronized (this) { if (receiverThread == null) { receiverThread = new Thread(this); receiverThread.setDaemon(true); receiverThread.setName("TCPMessageReceiver"); // give higher priority receiverThread.setPriority(Thread.currentThread().getPriority() + this.config.getReceiverThreadPriority()); receiverThread.start(); } } } public void stop() { synchronized (this) { if (this.receiverThread != null) { this.receiverThread.interrupt(); this.receiverThread = null; } } Thread[] handlerArray = new Thread[this.handlerThreads.size()]; this.handlerThreads.toArray(handlerArray); for (int i = 0; i < handlerArray.length; i++) { handlerArray[i].interrupt(); } this.handlerThreads.clear(); // notify statistics collector this.msgReporter.notifyStatCollectorOfDeletedNode(this.selfAddr); // close all sockets in the connection pool this.connPool.clear(); } public void run() { while (true) { SocketChannel sock = null; try { sock = servSock.accept(); } catch (IOException e) { logger.log(Level.WARNING, "ServerSocket#accept() threw an Exception and the receiver will die."); return; } // invoke a Thread handling an incoming Message Runnable r = new TCPMessageHandler(sock); try { if (this.config.getUseThreadPool()) { SingletonThreadPoolExecutors.getThreadPool( ExecutorBlockingMode.CONCURRENT_NON_BLOCKING, false).submit(r); } else { Thread handlerThread = new Thread(r); handlerThread.setDaemon(false); handlerThreads.add(handlerThread); handlerThread.start(); } } catch (OutOfMemoryError e) { logger.log(Level.SEVERE, "# of threads: " + Thread.activeCount(), e); // synchronized (TCPMessageReceiver.class) { // if (!TCPMessageReceiver.oomPrinted) { // TCPMessageReceiver.oomPrinted = true; // // Thread[] tarray = new Thread[Thread.activeCount()]; // Thread.enumerate(tarray); // for (Thread t: tarray) if (t != null) System.out.println("Th: " + t.getName()); // System.out.flush(); // } // } throw e; } } } public void addHandler(MessageHandler handler) { List<MessageHandler> newHandlerList = new ArrayList<MessageHandler>(); synchronized (this) { newHandlerList.addAll(this.handlerList); // copy newHandlerList.add(handler); this.handlerList = newHandlerList; // substitute } if (handler instanceof ExtendedMessageHandler) { this.extMessageHandlerRegistered = true; } } public void removeHandler(MessageHandler handler) { List<MessageHandler> newHandlerList = new ArrayList<MessageHandler>(); synchronized (this) { newHandlerList.addAll(this.handlerList); // copy newHandlerList.remove(handler); this.handlerList = newHandlerList; // substitute } boolean exists = false; for (MessageHandler h: newHandlerList) { if (h instanceof ExtendedMessageHandler) { exists = true; break; } } this.extMessageHandlerRegistered = exists; } private class TCPMessageHandler implements Runnable { SocketChannel sock; TCPMessageHandler(SocketChannel sock) { this.sock = sock; } public void run() { Thread th = Thread.currentThread(); String origName = th.getName(); th.setName("TCPMessageHandler: " + this.sock.socket().getInetAddress()); int times = 0; // # of times a message is received on this socket. while (!Thread.interrupted()) { times++; long timeout = -1L; if (times > 1) timeout = config.getReceiverKeepAliveTime(); Message msg = null; try { msg = Message.decode(this.sock, timeout); } catch (IOException e0) { logger.log(Level.INFO, "No Message could not be decoded (or just closed)."); // close Socket try { this.sock.close(); } catch (IOException e1) {} break; } // check signature byte[] sig = msg.getSignature(); byte[] acceptableSig = TCPMessageReceiver.this.provider.getMessageSignature(); if (!Signature.match(sig, acceptableSig)) continue; // process the received message Message ret = TCPMessageReceiver.this.processMessage(msg); // return a Message (from the last handler) if (ret != null) { logger.log(Level.INFO, "Return a message: " + ret); // set source address ret.setSource(TCPMessageReceiver.this.getSelfAddress()); MessagingAddress src = (msg.getSource() != null ? msg.getSource().getMessagingAddress() : null); try { ByteBuffer buf = ret.encode(sock); // notify statistics collector if (src != null) { msgReporter.notifyStatCollectorOfMessageSent(src, ret, buf.remaining()); } } catch (IOException e) { logger.log(Level.WARNING, "Could not return a message (or just closed)."); // close Socket try { sock.close(); } catch (IOException e1) {} // notify statistics collector if (src != null) { msgReporter.notifyStatCollectorOfDeletedNode(src); } break; } } else { logger.log(Level.INFO, "Return no message."); } // post-process TCPMessageReceiver.this.postProcessMessage(msg); } // while (true) handlerThreads.remove(Thread.currentThread()); th.setName(origName); } } protected Message processMessage(Message msg) { // call every handlers List<MessageHandler> currentHandlerList; synchronized (this) { currentHandlerList = handlerList; } Message ret = null; for (MessageHandler handler: currentHandlerList) { try { ret = handler.process(msg); } catch (Throwable e) { logger.log(Level.SEVERE, "A MessageHandler#process() threw an Exception.", e); } } return ret; } protected void postProcessMessage(Message msg) { if (!this.extMessageHandlerRegistered) return; // call every handlers List<MessageHandler> currentHandlerList; synchronized (this) { currentHandlerList = handlerList; } for (MessageHandler handler: currentHandlerList) { if (!(handler instanceof ExtendedMessageHandler)) continue; try { ((ExtendedMessageHandler)handler).postProcess(msg); } catch (Throwable e) { logger.log(Level.SEVERE, "A MessageHandler#postProcess() threw an Exception.", e); } } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Describes a VPC endpoint. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/VpcEndpoint" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class VpcEndpoint implements Serializable, Cloneable { /** * <p> * The ID of the VPC endpoint. * </p> */ private String vpcEndpointId; /** * <p> * The type of endpoint. * </p> */ private String vpcEndpointType; /** * <p> * The ID of the VPC to which the endpoint is associated. * </p> */ private String vpcId; /** * <p> * The name of the service to which the endpoint is associated. * </p> */ private String serviceName; /** * <p> * The state of the VPC endpoint. * </p> */ private String state; /** * <p> * The policy document associated with the endpoint, if applicable. * </p> */ private String policyDocument; /** * <p> * (Gateway endpoint) One or more route tables associated with the endpoint. * </p> */ private com.amazonaws.internal.SdkInternalList<String> routeTableIds; /** * <p> * (Interface endpoint) One or more subnets in which the endpoint is located. * </p> */ private com.amazonaws.internal.SdkInternalList<String> subnetIds; /** * <p> * (Interface endpoint) Information about the security groups that are associated with the network interface. * </p> */ private com.amazonaws.internal.SdkInternalList<SecurityGroupIdentifier> groups; /** * <p> * (Interface endpoint) Indicates whether the VPC is associated with a private hosted zone. * </p> */ private Boolean privateDnsEnabled; /** * <p> * Indicates whether the VPC endpoint is being managed by its service. * </p> */ private Boolean requesterManaged; /** * <p> * (Interface endpoint) One or more network interfaces for the endpoint. * </p> */ private com.amazonaws.internal.SdkInternalList<String> networkInterfaceIds; /** * <p> * (Interface endpoint) The DNS entries for the endpoint. * </p> */ private com.amazonaws.internal.SdkInternalList<DnsEntry> dnsEntries; /** * <p> * The date and time that the VPC endpoint was created. * </p> */ private java.util.Date creationTimestamp; /** * <p> * Any tags assigned to the VPC endpoint. * </p> */ private com.amazonaws.internal.SdkInternalList<Tag> tags; /** * <p> * The ID of the Amazon Web Services account that owns the VPC endpoint. * </p> */ private String ownerId; /** * <p> * The last error that occurred for VPC endpoint. * </p> */ private LastError lastError; /** * <p> * The ID of the VPC endpoint. * </p> * * @param vpcEndpointId * The ID of the VPC endpoint. */ public void setVpcEndpointId(String vpcEndpointId) { this.vpcEndpointId = vpcEndpointId; } /** * <p> * The ID of the VPC endpoint. * </p> * * @return The ID of the VPC endpoint. */ public String getVpcEndpointId() { return this.vpcEndpointId; } /** * <p> * The ID of the VPC endpoint. * </p> * * @param vpcEndpointId * The ID of the VPC endpoint. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withVpcEndpointId(String vpcEndpointId) { setVpcEndpointId(vpcEndpointId); return this; } /** * <p> * The type of endpoint. * </p> * * @param vpcEndpointType * The type of endpoint. * @see VpcEndpointType */ public void setVpcEndpointType(String vpcEndpointType) { this.vpcEndpointType = vpcEndpointType; } /** * <p> * The type of endpoint. * </p> * * @return The type of endpoint. * @see VpcEndpointType */ public String getVpcEndpointType() { return this.vpcEndpointType; } /** * <p> * The type of endpoint. * </p> * * @param vpcEndpointType * The type of endpoint. * @return Returns a reference to this object so that method calls can be chained together. * @see VpcEndpointType */ public VpcEndpoint withVpcEndpointType(String vpcEndpointType) { setVpcEndpointType(vpcEndpointType); return this; } /** * <p> * The type of endpoint. * </p> * * @param vpcEndpointType * The type of endpoint. * @see VpcEndpointType */ public void setVpcEndpointType(VpcEndpointType vpcEndpointType) { withVpcEndpointType(vpcEndpointType); } /** * <p> * The type of endpoint. * </p> * * @param vpcEndpointType * The type of endpoint. * @return Returns a reference to this object so that method calls can be chained together. * @see VpcEndpointType */ public VpcEndpoint withVpcEndpointType(VpcEndpointType vpcEndpointType) { this.vpcEndpointType = vpcEndpointType.toString(); return this; } /** * <p> * The ID of the VPC to which the endpoint is associated. * </p> * * @param vpcId * The ID of the VPC to which the endpoint is associated. */ public void setVpcId(String vpcId) { this.vpcId = vpcId; } /** * <p> * The ID of the VPC to which the endpoint is associated. * </p> * * @return The ID of the VPC to which the endpoint is associated. */ public String getVpcId() { return this.vpcId; } /** * <p> * The ID of the VPC to which the endpoint is associated. * </p> * * @param vpcId * The ID of the VPC to which the endpoint is associated. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withVpcId(String vpcId) { setVpcId(vpcId); return this; } /** * <p> * The name of the service to which the endpoint is associated. * </p> * * @param serviceName * The name of the service to which the endpoint is associated. */ public void setServiceName(String serviceName) { this.serviceName = serviceName; } /** * <p> * The name of the service to which the endpoint is associated. * </p> * * @return The name of the service to which the endpoint is associated. */ public String getServiceName() { return this.serviceName; } /** * <p> * The name of the service to which the endpoint is associated. * </p> * * @param serviceName * The name of the service to which the endpoint is associated. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withServiceName(String serviceName) { setServiceName(serviceName); return this; } /** * <p> * The state of the VPC endpoint. * </p> * * @param state * The state of the VPC endpoint. * @see State */ public void setState(String state) { this.state = state; } /** * <p> * The state of the VPC endpoint. * </p> * * @return The state of the VPC endpoint. * @see State */ public String getState() { return this.state; } /** * <p> * The state of the VPC endpoint. * </p> * * @param state * The state of the VPC endpoint. * @return Returns a reference to this object so that method calls can be chained together. * @see State */ public VpcEndpoint withState(String state) { setState(state); return this; } /** * <p> * The state of the VPC endpoint. * </p> * * @param state * The state of the VPC endpoint. * @see State */ public void setState(State state) { withState(state); } /** * <p> * The state of the VPC endpoint. * </p> * * @param state * The state of the VPC endpoint. * @return Returns a reference to this object so that method calls can be chained together. * @see State */ public VpcEndpoint withState(State state) { this.state = state.toString(); return this; } /** * <p> * The policy document associated with the endpoint, if applicable. * </p> * * @param policyDocument * The policy document associated with the endpoint, if applicable. */ public void setPolicyDocument(String policyDocument) { this.policyDocument = policyDocument; } /** * <p> * The policy document associated with the endpoint, if applicable. * </p> * * @return The policy document associated with the endpoint, if applicable. */ public String getPolicyDocument() { return this.policyDocument; } /** * <p> * The policy document associated with the endpoint, if applicable. * </p> * * @param policyDocument * The policy document associated with the endpoint, if applicable. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withPolicyDocument(String policyDocument) { setPolicyDocument(policyDocument); return this; } /** * <p> * (Gateway endpoint) One or more route tables associated with the endpoint. * </p> * * @return (Gateway endpoint) One or more route tables associated with the endpoint. */ public java.util.List<String> getRouteTableIds() { if (routeTableIds == null) { routeTableIds = new com.amazonaws.internal.SdkInternalList<String>(); } return routeTableIds; } /** * <p> * (Gateway endpoint) One or more route tables associated with the endpoint. * </p> * * @param routeTableIds * (Gateway endpoint) One or more route tables associated with the endpoint. */ public void setRouteTableIds(java.util.Collection<String> routeTableIds) { if (routeTableIds == null) { this.routeTableIds = null; return; } this.routeTableIds = new com.amazonaws.internal.SdkInternalList<String>(routeTableIds); } /** * <p> * (Gateway endpoint) One or more route tables associated with the endpoint. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setRouteTableIds(java.util.Collection)} or {@link #withRouteTableIds(java.util.Collection)} if you want * to override the existing values. * </p> * * @param routeTableIds * (Gateway endpoint) One or more route tables associated with the endpoint. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withRouteTableIds(String... routeTableIds) { if (this.routeTableIds == null) { setRouteTableIds(new com.amazonaws.internal.SdkInternalList<String>(routeTableIds.length)); } for (String ele : routeTableIds) { this.routeTableIds.add(ele); } return this; } /** * <p> * (Gateway endpoint) One or more route tables associated with the endpoint. * </p> * * @param routeTableIds * (Gateway endpoint) One or more route tables associated with the endpoint. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withRouteTableIds(java.util.Collection<String> routeTableIds) { setRouteTableIds(routeTableIds); return this; } /** * <p> * (Interface endpoint) One or more subnets in which the endpoint is located. * </p> * * @return (Interface endpoint) One or more subnets in which the endpoint is located. */ public java.util.List<String> getSubnetIds() { if (subnetIds == null) { subnetIds = new com.amazonaws.internal.SdkInternalList<String>(); } return subnetIds; } /** * <p> * (Interface endpoint) One or more subnets in which the endpoint is located. * </p> * * @param subnetIds * (Interface endpoint) One or more subnets in which the endpoint is located. */ public void setSubnetIds(java.util.Collection<String> subnetIds) { if (subnetIds == null) { this.subnetIds = null; return; } this.subnetIds = new com.amazonaws.internal.SdkInternalList<String>(subnetIds); } /** * <p> * (Interface endpoint) One or more subnets in which the endpoint is located. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setSubnetIds(java.util.Collection)} or {@link #withSubnetIds(java.util.Collection)} if you want to * override the existing values. * </p> * * @param subnetIds * (Interface endpoint) One or more subnets in which the endpoint is located. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withSubnetIds(String... subnetIds) { if (this.subnetIds == null) { setSubnetIds(new com.amazonaws.internal.SdkInternalList<String>(subnetIds.length)); } for (String ele : subnetIds) { this.subnetIds.add(ele); } return this; } /** * <p> * (Interface endpoint) One or more subnets in which the endpoint is located. * </p> * * @param subnetIds * (Interface endpoint) One or more subnets in which the endpoint is located. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withSubnetIds(java.util.Collection<String> subnetIds) { setSubnetIds(subnetIds); return this; } /** * <p> * (Interface endpoint) Information about the security groups that are associated with the network interface. * </p> * * @return (Interface endpoint) Information about the security groups that are associated with the network * interface. */ public java.util.List<SecurityGroupIdentifier> getGroups() { if (groups == null) { groups = new com.amazonaws.internal.SdkInternalList<SecurityGroupIdentifier>(); } return groups; } /** * <p> * (Interface endpoint) Information about the security groups that are associated with the network interface. * </p> * * @param groups * (Interface endpoint) Information about the security groups that are associated with the network interface. */ public void setGroups(java.util.Collection<SecurityGroupIdentifier> groups) { if (groups == null) { this.groups = null; return; } this.groups = new com.amazonaws.internal.SdkInternalList<SecurityGroupIdentifier>(groups); } /** * <p> * (Interface endpoint) Information about the security groups that are associated with the network interface. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setGroups(java.util.Collection)} or {@link #withGroups(java.util.Collection)} if you want to override the * existing values. * </p> * * @param groups * (Interface endpoint) Information about the security groups that are associated with the network interface. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withGroups(SecurityGroupIdentifier... groups) { if (this.groups == null) { setGroups(new com.amazonaws.internal.SdkInternalList<SecurityGroupIdentifier>(groups.length)); } for (SecurityGroupIdentifier ele : groups) { this.groups.add(ele); } return this; } /** * <p> * (Interface endpoint) Information about the security groups that are associated with the network interface. * </p> * * @param groups * (Interface endpoint) Information about the security groups that are associated with the network interface. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withGroups(java.util.Collection<SecurityGroupIdentifier> groups) { setGroups(groups); return this; } /** * <p> * (Interface endpoint) Indicates whether the VPC is associated with a private hosted zone. * </p> * * @param privateDnsEnabled * (Interface endpoint) Indicates whether the VPC is associated with a private hosted zone. */ public void setPrivateDnsEnabled(Boolean privateDnsEnabled) { this.privateDnsEnabled = privateDnsEnabled; } /** * <p> * (Interface endpoint) Indicates whether the VPC is associated with a private hosted zone. * </p> * * @return (Interface endpoint) Indicates whether the VPC is associated with a private hosted zone. */ public Boolean getPrivateDnsEnabled() { return this.privateDnsEnabled; } /** * <p> * (Interface endpoint) Indicates whether the VPC is associated with a private hosted zone. * </p> * * @param privateDnsEnabled * (Interface endpoint) Indicates whether the VPC is associated with a private hosted zone. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withPrivateDnsEnabled(Boolean privateDnsEnabled) { setPrivateDnsEnabled(privateDnsEnabled); return this; } /** * <p> * (Interface endpoint) Indicates whether the VPC is associated with a private hosted zone. * </p> * * @return (Interface endpoint) Indicates whether the VPC is associated with a private hosted zone. */ public Boolean isPrivateDnsEnabled() { return this.privateDnsEnabled; } /** * <p> * Indicates whether the VPC endpoint is being managed by its service. * </p> * * @param requesterManaged * Indicates whether the VPC endpoint is being managed by its service. */ public void setRequesterManaged(Boolean requesterManaged) { this.requesterManaged = requesterManaged; } /** * <p> * Indicates whether the VPC endpoint is being managed by its service. * </p> * * @return Indicates whether the VPC endpoint is being managed by its service. */ public Boolean getRequesterManaged() { return this.requesterManaged; } /** * <p> * Indicates whether the VPC endpoint is being managed by its service. * </p> * * @param requesterManaged * Indicates whether the VPC endpoint is being managed by its service. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withRequesterManaged(Boolean requesterManaged) { setRequesterManaged(requesterManaged); return this; } /** * <p> * Indicates whether the VPC endpoint is being managed by its service. * </p> * * @return Indicates whether the VPC endpoint is being managed by its service. */ public Boolean isRequesterManaged() { return this.requesterManaged; } /** * <p> * (Interface endpoint) One or more network interfaces for the endpoint. * </p> * * @return (Interface endpoint) One or more network interfaces for the endpoint. */ public java.util.List<String> getNetworkInterfaceIds() { if (networkInterfaceIds == null) { networkInterfaceIds = new com.amazonaws.internal.SdkInternalList<String>(); } return networkInterfaceIds; } /** * <p> * (Interface endpoint) One or more network interfaces for the endpoint. * </p> * * @param networkInterfaceIds * (Interface endpoint) One or more network interfaces for the endpoint. */ public void setNetworkInterfaceIds(java.util.Collection<String> networkInterfaceIds) { if (networkInterfaceIds == null) { this.networkInterfaceIds = null; return; } this.networkInterfaceIds = new com.amazonaws.internal.SdkInternalList<String>(networkInterfaceIds); } /** * <p> * (Interface endpoint) One or more network interfaces for the endpoint. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setNetworkInterfaceIds(java.util.Collection)} or {@link #withNetworkInterfaceIds(java.util.Collection)} * if you want to override the existing values. * </p> * * @param networkInterfaceIds * (Interface endpoint) One or more network interfaces for the endpoint. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withNetworkInterfaceIds(String... networkInterfaceIds) { if (this.networkInterfaceIds == null) { setNetworkInterfaceIds(new com.amazonaws.internal.SdkInternalList<String>(networkInterfaceIds.length)); } for (String ele : networkInterfaceIds) { this.networkInterfaceIds.add(ele); } return this; } /** * <p> * (Interface endpoint) One or more network interfaces for the endpoint. * </p> * * @param networkInterfaceIds * (Interface endpoint) One or more network interfaces for the endpoint. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withNetworkInterfaceIds(java.util.Collection<String> networkInterfaceIds) { setNetworkInterfaceIds(networkInterfaceIds); return this; } /** * <p> * (Interface endpoint) The DNS entries for the endpoint. * </p> * * @return (Interface endpoint) The DNS entries for the endpoint. */ public java.util.List<DnsEntry> getDnsEntries() { if (dnsEntries == null) { dnsEntries = new com.amazonaws.internal.SdkInternalList<DnsEntry>(); } return dnsEntries; } /** * <p> * (Interface endpoint) The DNS entries for the endpoint. * </p> * * @param dnsEntries * (Interface endpoint) The DNS entries for the endpoint. */ public void setDnsEntries(java.util.Collection<DnsEntry> dnsEntries) { if (dnsEntries == null) { this.dnsEntries = null; return; } this.dnsEntries = new com.amazonaws.internal.SdkInternalList<DnsEntry>(dnsEntries); } /** * <p> * (Interface endpoint) The DNS entries for the endpoint. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setDnsEntries(java.util.Collection)} or {@link #withDnsEntries(java.util.Collection)} if you want to * override the existing values. * </p> * * @param dnsEntries * (Interface endpoint) The DNS entries for the endpoint. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withDnsEntries(DnsEntry... dnsEntries) { if (this.dnsEntries == null) { setDnsEntries(new com.amazonaws.internal.SdkInternalList<DnsEntry>(dnsEntries.length)); } for (DnsEntry ele : dnsEntries) { this.dnsEntries.add(ele); } return this; } /** * <p> * (Interface endpoint) The DNS entries for the endpoint. * </p> * * @param dnsEntries * (Interface endpoint) The DNS entries for the endpoint. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withDnsEntries(java.util.Collection<DnsEntry> dnsEntries) { setDnsEntries(dnsEntries); return this; } /** * <p> * The date and time that the VPC endpoint was created. * </p> * * @param creationTimestamp * The date and time that the VPC endpoint was created. */ public void setCreationTimestamp(java.util.Date creationTimestamp) { this.creationTimestamp = creationTimestamp; } /** * <p> * The date and time that the VPC endpoint was created. * </p> * * @return The date and time that the VPC endpoint was created. */ public java.util.Date getCreationTimestamp() { return this.creationTimestamp; } /** * <p> * The date and time that the VPC endpoint was created. * </p> * * @param creationTimestamp * The date and time that the VPC endpoint was created. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withCreationTimestamp(java.util.Date creationTimestamp) { setCreationTimestamp(creationTimestamp); return this; } /** * <p> * Any tags assigned to the VPC endpoint. * </p> * * @return Any tags assigned to the VPC endpoint. */ public java.util.List<Tag> getTags() { if (tags == null) { tags = new com.amazonaws.internal.SdkInternalList<Tag>(); } return tags; } /** * <p> * Any tags assigned to the VPC endpoint. * </p> * * @param tags * Any tags assigned to the VPC endpoint. */ public void setTags(java.util.Collection<Tag> tags) { if (tags == null) { this.tags = null; return; } this.tags = new com.amazonaws.internal.SdkInternalList<Tag>(tags); } /** * <p> * Any tags assigned to the VPC endpoint. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tags * Any tags assigned to the VPC endpoint. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withTags(Tag... tags) { if (this.tags == null) { setTags(new com.amazonaws.internal.SdkInternalList<Tag>(tags.length)); } for (Tag ele : tags) { this.tags.add(ele); } return this; } /** * <p> * Any tags assigned to the VPC endpoint. * </p> * * @param tags * Any tags assigned to the VPC endpoint. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withTags(java.util.Collection<Tag> tags) { setTags(tags); return this; } /** * <p> * The ID of the Amazon Web Services account that owns the VPC endpoint. * </p> * * @param ownerId * The ID of the Amazon Web Services account that owns the VPC endpoint. */ public void setOwnerId(String ownerId) { this.ownerId = ownerId; } /** * <p> * The ID of the Amazon Web Services account that owns the VPC endpoint. * </p> * * @return The ID of the Amazon Web Services account that owns the VPC endpoint. */ public String getOwnerId() { return this.ownerId; } /** * <p> * The ID of the Amazon Web Services account that owns the VPC endpoint. * </p> * * @param ownerId * The ID of the Amazon Web Services account that owns the VPC endpoint. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withOwnerId(String ownerId) { setOwnerId(ownerId); return this; } /** * <p> * The last error that occurred for VPC endpoint. * </p> * * @param lastError * The last error that occurred for VPC endpoint. */ public void setLastError(LastError lastError) { this.lastError = lastError; } /** * <p> * The last error that occurred for VPC endpoint. * </p> * * @return The last error that occurred for VPC endpoint. */ public LastError getLastError() { return this.lastError; } /** * <p> * The last error that occurred for VPC endpoint. * </p> * * @param lastError * The last error that occurred for VPC endpoint. * @return Returns a reference to this object so that method calls can be chained together. */ public VpcEndpoint withLastError(LastError lastError) { setLastError(lastError); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getVpcEndpointId() != null) sb.append("VpcEndpointId: ").append(getVpcEndpointId()).append(","); if (getVpcEndpointType() != null) sb.append("VpcEndpointType: ").append(getVpcEndpointType()).append(","); if (getVpcId() != null) sb.append("VpcId: ").append(getVpcId()).append(","); if (getServiceName() != null) sb.append("ServiceName: ").append(getServiceName()).append(","); if (getState() != null) sb.append("State: ").append(getState()).append(","); if (getPolicyDocument() != null) sb.append("PolicyDocument: ").append(getPolicyDocument()).append(","); if (getRouteTableIds() != null) sb.append("RouteTableIds: ").append(getRouteTableIds()).append(","); if (getSubnetIds() != null) sb.append("SubnetIds: ").append(getSubnetIds()).append(","); if (getGroups() != null) sb.append("Groups: ").append(getGroups()).append(","); if (getPrivateDnsEnabled() != null) sb.append("PrivateDnsEnabled: ").append(getPrivateDnsEnabled()).append(","); if (getRequesterManaged() != null) sb.append("RequesterManaged: ").append(getRequesterManaged()).append(","); if (getNetworkInterfaceIds() != null) sb.append("NetworkInterfaceIds: ").append(getNetworkInterfaceIds()).append(","); if (getDnsEntries() != null) sb.append("DnsEntries: ").append(getDnsEntries()).append(","); if (getCreationTimestamp() != null) sb.append("CreationTimestamp: ").append(getCreationTimestamp()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()).append(","); if (getOwnerId() != null) sb.append("OwnerId: ").append(getOwnerId()).append(","); if (getLastError() != null) sb.append("LastError: ").append(getLastError()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof VpcEndpoint == false) return false; VpcEndpoint other = (VpcEndpoint) obj; if (other.getVpcEndpointId() == null ^ this.getVpcEndpointId() == null) return false; if (other.getVpcEndpointId() != null && other.getVpcEndpointId().equals(this.getVpcEndpointId()) == false) return false; if (other.getVpcEndpointType() == null ^ this.getVpcEndpointType() == null) return false; if (other.getVpcEndpointType() != null && other.getVpcEndpointType().equals(this.getVpcEndpointType()) == false) return false; if (other.getVpcId() == null ^ this.getVpcId() == null) return false; if (other.getVpcId() != null && other.getVpcId().equals(this.getVpcId()) == false) return false; if (other.getServiceName() == null ^ this.getServiceName() == null) return false; if (other.getServiceName() != null && other.getServiceName().equals(this.getServiceName()) == false) return false; if (other.getState() == null ^ this.getState() == null) return false; if (other.getState() != null && other.getState().equals(this.getState()) == false) return false; if (other.getPolicyDocument() == null ^ this.getPolicyDocument() == null) return false; if (other.getPolicyDocument() != null && other.getPolicyDocument().equals(this.getPolicyDocument()) == false) return false; if (other.getRouteTableIds() == null ^ this.getRouteTableIds() == null) return false; if (other.getRouteTableIds() != null && other.getRouteTableIds().equals(this.getRouteTableIds()) == false) return false; if (other.getSubnetIds() == null ^ this.getSubnetIds() == null) return false; if (other.getSubnetIds() != null && other.getSubnetIds().equals(this.getSubnetIds()) == false) return false; if (other.getGroups() == null ^ this.getGroups() == null) return false; if (other.getGroups() != null && other.getGroups().equals(this.getGroups()) == false) return false; if (other.getPrivateDnsEnabled() == null ^ this.getPrivateDnsEnabled() == null) return false; if (other.getPrivateDnsEnabled() != null && other.getPrivateDnsEnabled().equals(this.getPrivateDnsEnabled()) == false) return false; if (other.getRequesterManaged() == null ^ this.getRequesterManaged() == null) return false; if (other.getRequesterManaged() != null && other.getRequesterManaged().equals(this.getRequesterManaged()) == false) return false; if (other.getNetworkInterfaceIds() == null ^ this.getNetworkInterfaceIds() == null) return false; if (other.getNetworkInterfaceIds() != null && other.getNetworkInterfaceIds().equals(this.getNetworkInterfaceIds()) == false) return false; if (other.getDnsEntries() == null ^ this.getDnsEntries() == null) return false; if (other.getDnsEntries() != null && other.getDnsEntries().equals(this.getDnsEntries()) == false) return false; if (other.getCreationTimestamp() == null ^ this.getCreationTimestamp() == null) return false; if (other.getCreationTimestamp() != null && other.getCreationTimestamp().equals(this.getCreationTimestamp()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; if (other.getOwnerId() == null ^ this.getOwnerId() == null) return false; if (other.getOwnerId() != null && other.getOwnerId().equals(this.getOwnerId()) == false) return false; if (other.getLastError() == null ^ this.getLastError() == null) return false; if (other.getLastError() != null && other.getLastError().equals(this.getLastError()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getVpcEndpointId() == null) ? 0 : getVpcEndpointId().hashCode()); hashCode = prime * hashCode + ((getVpcEndpointType() == null) ? 0 : getVpcEndpointType().hashCode()); hashCode = prime * hashCode + ((getVpcId() == null) ? 0 : getVpcId().hashCode()); hashCode = prime * hashCode + ((getServiceName() == null) ? 0 : getServiceName().hashCode()); hashCode = prime * hashCode + ((getState() == null) ? 0 : getState().hashCode()); hashCode = prime * hashCode + ((getPolicyDocument() == null) ? 0 : getPolicyDocument().hashCode()); hashCode = prime * hashCode + ((getRouteTableIds() == null) ? 0 : getRouteTableIds().hashCode()); hashCode = prime * hashCode + ((getSubnetIds() == null) ? 0 : getSubnetIds().hashCode()); hashCode = prime * hashCode + ((getGroups() == null) ? 0 : getGroups().hashCode()); hashCode = prime * hashCode + ((getPrivateDnsEnabled() == null) ? 0 : getPrivateDnsEnabled().hashCode()); hashCode = prime * hashCode + ((getRequesterManaged() == null) ? 0 : getRequesterManaged().hashCode()); hashCode = prime * hashCode + ((getNetworkInterfaceIds() == null) ? 0 : getNetworkInterfaceIds().hashCode()); hashCode = prime * hashCode + ((getDnsEntries() == null) ? 0 : getDnsEntries().hashCode()); hashCode = prime * hashCode + ((getCreationTimestamp() == null) ? 0 : getCreationTimestamp().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); hashCode = prime * hashCode + ((getOwnerId() == null) ? 0 : getOwnerId().hashCode()); hashCode = prime * hashCode + ((getLastError() == null) ? 0 : getLastError().hashCode()); return hashCode; } @Override public VpcEndpoint clone() { try { return (VpcEndpoint) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.util; import hudson.Extension; import hudson.ExtensionList; import hudson.model.Describable; import hudson.model.Descriptor; import hudson.model.Descriptor.FormException; import jenkins.model.Jenkins; import net.sf.json.JSONException; import net.sf.json.JSONObject; import org.kohsuke.stapler.Stapler; import java.util.AbstractList; import java.util.Iterator; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import edu.umd.cs.findbugs.annotations.CheckForNull; /** * List of {@link Descriptor}s. * * <p> * Before Hudson 1.286, this class stored {@link Descriptor}s directly, but since 1.286, * this class works in two modes that are rather different. * * <p> * One is the compatibility mode, where it works just like pre 1.286 and store everything locally, * disconnected from any of the additions of 1.286. This is necessary for situations where * {@link DescriptorList} is owned by pre-1.286 plugins where this class doesn't know 'T'. * In this mode, {@link #legacy} is non-null but {@link #type} is null. * * <p> * The other mode is the new mode, where the {@link Descriptor}s are actually stored in {@link ExtensionList} * (see {@link jenkins.model.Jenkins#getDescriptorList(Class)}) and this class acts as a view to it. This enables * bi-directional interoperability &mdash; both descriptors registered automatically and descriptors registered * manually are visible from both {@link DescriptorList} and {@link ExtensionList}. In this mode, * {@link #legacy} is null but {@link #type} is non-null. * * <p> * The number of plugins that define extension points are limited, so we expect to be able to remove * this dual behavior first, then when everyone stops using {@link DescriptorList}, we can remove this class * altogether. * * @author Kohsuke Kawaguchi * @since 1.161 */ public final class DescriptorList<T extends Describable<T>> extends AbstractList<Descriptor<T>> { private final Class<T> type; private final CopyOnWriteArrayList<Descriptor<T>> legacy; /** * This will create a legacy {@link DescriptorList} that is disconnected from * {@link ExtensionList}. * * @deprecated * As of 1.286. Use {@link #DescriptorList(Class)} instead. */ @Deprecated public DescriptorList(Descriptor<T>... descriptors) { this.type = null; this.legacy = new CopyOnWriteArrayList<>(descriptors); } /** * Creates a {@link DescriptorList} backed by {@link ExtensionList}. */ public DescriptorList(Class<T> type) { this.type = type; this.legacy = null; } @Override public Descriptor<T> get(int index) { return store().get(index); } @Override public int size() { return store().size(); } @Override public Iterator<Descriptor<T>> iterator() { return store().iterator(); } /** * @deprecated * As of 1.286. Put {@link Extension} on your descriptor to have it auto-registered, * instead of registering a descriptor manually. */ @Override @Deprecated public boolean add(Descriptor<T> d) { return store().add(d); } /** * @deprecated * As of 1.286. Put {@link Extension} on your descriptor to have it auto-registered, * instead of registering a descriptor manually. */ @Override @Deprecated public void add(int index, Descriptor<T> element) { add(element); // order is ignored } @Override public boolean remove(Object o) { return store().remove(o); } /** * Gets the actual data store. This is the key to control the dual-mode nature of {@link DescriptorList} */ private List<Descriptor<T>> store() { if(type==null) return legacy; else return Jenkins.get().getDescriptorList(type); } /** * Creates a new instance of a {@link Describable} * from the structured form submission data posted * by a radio button group. * @param config Submitted configuration for Radio List * @return New instance. * {@code null} if none was selected in the radio list or if the value is filtered by a {@link hudson.model.DescriptorVisibilityFilter} * @throws FormException Data submission error */ @CheckForNull public T newInstanceFromRadioList(JSONObject config) throws FormException { if(config.isNullObject()) return null; // none was selected int idx = config.getInt("value"); return get(idx).newInstance(Stapler.getCurrentRequest(),config); } /** * Creates a new instance of a {@link Describable} * from the structured form submission data posted * by a radio button group. * @param parent JSON, which contains the configuration entry for the radio list * @param name Name of the configuration entry for the radio list * @return New instance. * {@code null} if none was selected in the radio list or if the value is filtered by a {@link hudson.model.DescriptorVisibilityFilter} * @throws FormException Data submission error */ @CheckForNull public T newInstanceFromRadioList(JSONObject parent, String name) throws FormException { try { return newInstanceFromRadioList(parent.getJSONObject(name)); } catch (JSONException ex) { throw new FormException(ex, name); } } /** * Finds a descriptor by their {@link Descriptor#getId()}. * @param id Descriptor ID * @return If none is found, {@code null} is returned. */ @CheckForNull public Descriptor<T> findByName(String id) { for (Descriptor<T> d : this) if(d.getId().equals(id)) return d; return null; } /** * No-op method used to force the class initialization of the given class. * The class initialization in turn is expected to put the descriptor * into the {@link DescriptorList}. * * <p> * This is necessary to resolve the class initialization order problem. * Often a {@link DescriptorList} is defined in the base class, and * when it tries to initialize itself by listing up descriptors of known * sub-classes, they might not be available in time. * * @since 1.162 */ public void load(Class<? extends Describable> c) { try { Class.forName(c.getName(), true, c.getClassLoader()); } catch (ClassNotFoundException e) { throw new AssertionError(e); // Can't happen } } /** * Finds the descriptor that has the matching fully-qualified class name. * @deprecated Underspecified what the parameter is. {@link Descriptor#getId}? A {@link Describable} class name? */ @Deprecated @CheckForNull public Descriptor<T> find(String fqcn) { return Descriptor.find(this,fqcn); } }
/* * Copyright 2004-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.compass.core.lucene.engine; import java.io.IOException; import java.util.ArrayList; import java.util.Locale; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Filter; import org.apache.lucene.search.HitCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.spans.SpanQuery; import org.compass.core.CompassQuery.SortDirection; import org.compass.core.CompassQuery.SortImplicitType; import org.compass.core.CompassQuery.SortPropertyType; import org.compass.core.engine.SearchEngine; import org.compass.core.engine.SearchEngineException; import org.compass.core.engine.SearchEngineHits; import org.compass.core.engine.SearchEngineQuery; import org.compass.core.engine.SearchEngineQueryFilter; import org.compass.core.lucene.engine.queryparser.QueryHolder; import org.compass.core.lucene.search.CountHitCollector; /** * @author kimchy */ public class LuceneSearchEngineQuery implements SearchEngineQuery, Cloneable { public static class LuceneSearchEngineSpanQuery extends LuceneSearchEngineQuery implements SearchEngineSpanQuery { private SpanQuery spanQuery; public LuceneSearchEngineSpanQuery(LuceneSearchEngineFactory searchEngineFactory, SpanQuery query) { super(searchEngineFactory, query); this.spanQuery = query; } public SpanQuery toSpanQuery() { return spanQuery; } } private final LuceneSearchEngineFactory searchEngineFactory; private ArrayList<SortField> sortFields = new ArrayList<SortField>(); private String[] subIndexes; private String[] aliases; private LuceneSearchEngineQueryFilter filter; private Query origQuery; private Query query; private String defaultSearchProperty; private boolean rewrite; private boolean suggested; public LuceneSearchEngineQuery(LuceneSearchEngineFactory searchEngineFactory, Query query) { this(searchEngineFactory, new QueryHolder(query)); } public LuceneSearchEngineQuery(LuceneSearchEngineFactory searchEngineFactory, QueryHolder query) { this(searchEngineFactory, query, searchEngineFactory.getLuceneSettings().getDefaultSearchPropery()); } public LuceneSearchEngineQuery(LuceneSearchEngineFactory searchEngineFactory, QueryHolder query, String defualtSearchProperty) { this.searchEngineFactory = searchEngineFactory; this.query = query.getQuery(); this.origQuery = query.getQuery(); this.suggested = query.isSuggested(); this.defaultSearchProperty = defualtSearchProperty; } public SearchEngineQuery addSort(String propertyName) { sortFields.add(new SortField(propertyName)); return this; } public SearchEngineQuery addSort(String propertyName, SortDirection direction) { sortFields.add(new SortField(propertyName, getSortReverse(direction))); return this; } public SearchEngineQuery addSort(String propertyName, SortPropertyType type) { sortFields.add(new SortField(propertyName, getSortType(type))); return this; } public SearchEngineQuery addSort(String propertyName, SortPropertyType type, SortDirection direction) { sortFields.add(new SortField(propertyName, getSortType(type), getSortReverse(direction))); return this; } public SearchEngineQuery addSort(SortImplicitType implicitType) { sortFields.add(new SortField(null, getImplicitSortField(implicitType))); return this; } public SearchEngineQuery addSort(SortImplicitType implicitType, SortDirection direction) { sortFields.add(new SortField(null, getImplicitSortField(implicitType), getSortReverse(direction))); return this; } public SearchEngineQuery addSort(String propertyName, Locale locale, SortDirection direction) { sortFields.add(new SortField(propertyName, locale, getSortReverse(direction))); return this; } public SearchEngineQuery addSort(String propertyName, Locale locale) { sortFields.add(new SortField(propertyName, locale)); return this; } public SearchEngineQuery addSort(SortField sortField) { sortFields.add(sortField); return this; } public Sort getSort() { if (sortFields.size() == 0) { return null; } SortField[] sortFieldsArr = sortFields.toArray(new SortField[sortFields.size()]); return new Sort(sortFieldsArr); } private int getImplicitSortField(SortImplicitType implicitType) { switch (implicitType) { case DOC: return SortField.DOC; case SCORE: return SortField.SCORE; default: throw new IllegalArgumentException("Faile to create lucene implicit type for [" + implicitType + "]"); } } private boolean getSortReverse(SortDirection direction) { return direction == SortDirection.REVERSE; } private int getSortType(SortPropertyType type) { switch (type) { case AUTO: return SortField.AUTO; case BYTE: return SortField.BYTE; case DOUBLE: return SortField.DOUBLE; case FLOAT: return SortField.FLOAT; case INT: return SortField.INT; case LONG: return SortField.LONG; case STRING: return SortField.STRING; default: throw new IllegalArgumentException("Failed to convert type [" + type + "]"); } } public SearchEngineHits hits(SearchEngine searchEngine) { return ((LuceneSearchEngine) searchEngine).find(this); } public long count(SearchEngine searchEngine) { return count(searchEngine, 0.0f); } public long count(SearchEngine searchEngine, float minimumScore) { CountHitCollector countHitCollector = new CountHitCollector(minimumScore); try { collect(searchEngine, countHitCollector); return countHitCollector.getTotalHits(); } catch (SearchEngineException e) { throw new SearchEngineException("Failed to count query [" + query + "]", e); } } public void collect(SearchEngine searchEngine, HitCollector hitCollector) { LuceneSearchEngineInternalSearch internalSearch = (LuceneSearchEngineInternalSearch) searchEngine.internalSearch(getSubIndexes(), getAliases()); try { if (internalSearch.getSearcher() == null) { // no index return; } internalSearch.getSearcher().search(getQuery(), getLuceneFilter(), hitCollector); } catch (IOException e) { throw new SearchEngineException("Failed to collect hits for query [" + query + "]", e); } } public SearchEngineQuery setBoost(float boost) { query.setBoost(boost); return this; } public SearchEngineQuery setSubIndexes(String[] subindexes) { this.subIndexes = subindexes; return this; } public String[] getSubIndexes() { return this.subIndexes; } public SearchEngineQuery setAliases(String[] aliases) { if (aliases == null) { query = origQuery; return this; } String aliasProperty = searchEngineFactory.getLuceneSettings().getAliasProperty(); BooleanQuery boolQuery2 = new BooleanQuery(); for (String alias : aliases) { boolQuery2.add(new TermQuery(new Term(aliasProperty, alias)), BooleanClause.Occur.SHOULD); } BooleanQuery boolQuery = new BooleanQuery(); boolQuery.add(origQuery, BooleanClause.Occur.MUST); boolQuery.add(boolQuery2, BooleanClause.Occur.MUST); this.query = boolQuery; this.aliases = aliases; return this; } public String[] getAliases() { return this.aliases; } public SearchEngineQuery setFilter(SearchEngineQueryFilter filter) { this.filter = (LuceneSearchEngineQueryFilter) filter; return this; } public LuceneSearchEngineQueryFilter getFilter() { return this.filter; } public Filter getLuceneFilter() { if (filter == null) { return null; } return filter.getFilter(); } public SearchEngineQuery rewrite() { this.rewrite = true; return this; } public boolean isRewrite() { return this.rewrite; } public boolean isSuggested() { return this.suggested; } public Query getOriginalQuery() { return this.origQuery; } public Query getQuery() { return this.query; } public String toString() { if (query == null) { return "<null>"; } // remove the "zzz-all:" prefix return query.toString(defaultSearchProperty); } public Object clone() throws CloneNotSupportedException { return super.clone(); } // breaks encapsulation, but we need it public void setQuery(Query query) { this.query = query; this.origQuery = query; } public void setSuggested(boolean suggested) { this.suggested = suggested; } }