gt
stringclasses
1 value
context
stringlengths
2.05k
161k
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.apple; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableSet; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.packages.NativeProvider; import com.google.devtools.build.lib.packages.Provider; import com.google.devtools.build.lib.packages.StarlarkInfo; import com.google.devtools.build.lib.packages.StructImpl; import com.google.devtools.build.lib.skylarkbuildapi.apple.ApplePlatformApi; import com.google.devtools.build.lib.skylarkbuildapi.apple.ApplePlatformTypeApi; import com.google.devtools.build.lib.syntax.Location; import com.google.devtools.build.lib.syntax.Printer; import java.util.HashMap; import java.util.Locale; import javax.annotation.Nullable; /** An enum that can be used to distinguish between various apple platforms. */ @Immutable public enum ApplePlatform implements ApplePlatformApi { IOS_DEVICE("ios_device", "iPhoneOS", PlatformType.IOS, true), IOS_SIMULATOR("ios_simulator", "iPhoneSimulator", PlatformType.IOS, false), MACOS("macos", "MacOSX", PlatformType.MACOS, true), TVOS_DEVICE("tvos_device", "AppleTVOS", PlatformType.TVOS, true), TVOS_SIMULATOR("tvos_simulator", "AppleTVSimulator", PlatformType.TVOS, false), WATCHOS_DEVICE("watchos_device", "WatchOS", PlatformType.WATCHOS, true), WATCHOS_SIMULATOR("watchos_simulator", "WatchSimulator", PlatformType.WATCHOS, false); private static final ImmutableSet<String> IOS_SIMULATOR_TARGET_CPUS = ImmutableSet.of("ios_x86_64", "ios_i386"); private static final ImmutableSet<String> IOS_DEVICE_TARGET_CPUS = ImmutableSet.of("ios_armv6", "ios_arm64", "ios_armv7", "ios_armv7s", "ios_arm64e"); private static final ImmutableSet<String> WATCHOS_SIMULATOR_TARGET_CPUS = ImmutableSet.of("watchos_i386", "watchos_x86_64"); private static final ImmutableSet<String> WATCHOS_DEVICE_TARGET_CPUS = ImmutableSet.of("watchos_armv7k", "watchos_arm64_32"); private static final ImmutableSet<String> TVOS_SIMULATOR_TARGET_CPUS = ImmutableSet.of("tvos_x86_64"); private static final ImmutableSet<String> TVOS_DEVICE_TARGET_CPUS = ImmutableSet.of("tvos_arm64"); // "darwin" is included because that's currently the default when on macOS, and // migrating it would be a breaking change more details: // https://github.com/bazelbuild/bazel/pull/7062 private static final ImmutableSet<String> MACOS_TARGET_CPUS = ImmutableSet.of("darwin_x86_64", "darwin"); private static final ImmutableSet<String> BIT_32_TARGET_CPUS = ImmutableSet.of("ios_i386", "ios_armv7", "ios_armv7s", "watchos_i386", "watchos_armv7k"); private final String starlarkKey; private final String nameInPlist; private final PlatformType platformType; private final boolean isDevice; ApplePlatform( String starlarkKey, String nameInPlist, PlatformType platformType, boolean isDevice) { this.starlarkKey = starlarkKey; this.nameInPlist = Preconditions.checkNotNull(nameInPlist); this.platformType = platformType; this.isDevice = isDevice; } @Override public boolean isImmutable() { return true; // immutable and Starlark-hashable } @Override public PlatformType getType() { return platformType; } @Override public boolean isDevice() { return isDevice; } @Override public String getNameInPlist() { return nameInPlist; } /** * Returns the name of the "platform" as it appears in the plist when it appears in all-lowercase. */ public String getLowerCaseNameInPlist() { return nameInPlist.toLowerCase(Locale.US); } @Nullable private static ApplePlatform forTargetCpuNullable(String targetCpu) { if (IOS_SIMULATOR_TARGET_CPUS.contains(targetCpu)) { return IOS_SIMULATOR; } else if (IOS_DEVICE_TARGET_CPUS.contains(targetCpu)) { return IOS_DEVICE; } else if (WATCHOS_SIMULATOR_TARGET_CPUS.contains(targetCpu)) { return WATCHOS_SIMULATOR; } else if (WATCHOS_DEVICE_TARGET_CPUS.contains(targetCpu)) { return WATCHOS_DEVICE; } else if (TVOS_SIMULATOR_TARGET_CPUS.contains(targetCpu)) { return TVOS_SIMULATOR; } else if (TVOS_DEVICE_TARGET_CPUS.contains(targetCpu)) { return TVOS_DEVICE; } else if (MACOS_TARGET_CPUS.contains(targetCpu)) { return MACOS; } else { return null; } } /** * Returns true if the platform for the given target cpu and platform type is a known 32-bit * architecture. * * @param platformType platform type that the given cpu value is implied for * @param arch architecture representation, such as 'arm64' */ public static boolean is32Bit(PlatformType platformType, String arch) { return BIT_32_TARGET_CPUS.contains(cpuStringForTarget(platformType, arch)); } /** * Returns the platform cpu string for the given target cpu and platform type. * * @param platformType platform type that the given cpu value is implied for * @param arch architecture representation, such as 'arm64' */ public static String cpuStringForTarget(PlatformType platformType, String arch) { switch (platformType) { case MACOS: return String.format("darwin_%s", arch); default: return String.format("%s_%s", platformType.toString(), arch); } } /** * Returns the platform for the given target cpu and platform type. * * @param platformType platform type that the given cpu value is implied for * @param arch architecture representation, such as 'arm64' * @throws IllegalArgumentException if there is no valid apple platform for the given target cpu */ public static ApplePlatform forTarget(PlatformType platformType, String arch) { return forTargetCpu(cpuStringForTarget(platformType, arch)); } /** * Returns the platform for the given target cpu. * * @param targetCpu cpu value with platform type prefix, such as 'ios_arm64' * @throws IllegalArgumentException if there is no valid apple platform for the given target cpu */ public static ApplePlatform forTargetCpu(String targetCpu) { ApplePlatform platform = forTargetCpuNullable(targetCpu); if (platform != null) { return platform; } else { throw new IllegalArgumentException( "No supported apple platform registered for target cpu " + targetCpu); } } /** * Returns true if the given target cpu is an apple platform. */ public static boolean isApplePlatform(String targetCpu) { return forTargetCpuNullable(targetCpu) != null; } /** Returns a Starlark struct that contains the instances of this enum. */ public static StructImpl getStarlarkStruct() { Provider constructor = new NativeProvider<StructImpl>(StructImpl.class, "platforms") {}; HashMap<String, Object> fields = new HashMap<>(); for (ApplePlatform type : values()) { fields.put(type.starlarkKey, type); } return StarlarkInfo.create(constructor, fields, Location.BUILTIN); } @Override public void repr(Printer printer) { printer.append(toString()); } /** Exception indicating an unknown or unsupported Apple platform type. */ public static class UnsupportedPlatformTypeException extends Exception { public UnsupportedPlatformTypeException(String msg) { super(msg); } } /** * Value used to describe Apple platform "type". A {@link ApplePlatform} is implied from a * platform type (for example, watchOS) together with a cpu value (for example, armv7). */ // TODO(cparsons): Use these values in static retrieval methods in this class. @Immutable public enum PlatformType implements ApplePlatformTypeApi { IOS("ios"), WATCHOS("watchos"), TVOS("tvos"), MACOS("macos"); /** * The key used to access the enum value as a field in the Starlark apple_common.platform_type * struct. */ private final String starlarkKey; PlatformType(String starlarkKey) { this.starlarkKey = starlarkKey; } @Override public boolean isImmutable() { return true; // immutable and Starlark-hashable } @Override public String toString() { return name().toLowerCase(); } /** * Returns the {@link PlatformType} with given name (case insensitive). * * @throws UnsupportedPlatformTypeException if the name does not match a valid platform type. */ public static PlatformType fromString(String name) throws UnsupportedPlatformTypeException { for (PlatformType platformType : PlatformType.values()) { if (name.equalsIgnoreCase(platformType.toString())) { return platformType; } } throw new UnsupportedPlatformTypeException( String.format("Unsupported platform type \"%s\"", name)); } /** Returns a Starlark struct that contains the instances of this enum. */ public static StructImpl getStarlarkStruct() { Provider constructor = new NativeProvider<StructImpl>(StructImpl.class, "platform_types") {}; HashMap<String, Object> fields = new HashMap<>(); for (PlatformType type : values()) { fields.put(type.starlarkKey, type); } return StarlarkInfo.create(constructor, fields, Location.BUILTIN); } @Override public void repr(Printer printer) { printer.append(toString()); } } }
/* * This file is part of Industrial Foregoing. * * Copyright 2019, Buuz135 * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in the * Software without restriction, including without limitation the rights to use, copy, * modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, * and to permit persons to whom the Software is furnished to do so, subject to the * following conditions: * * The above copyright notice and this permission notice shall be included in all copies * or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE * FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.buuz135.industrial.jei; import com.buuz135.industrial.block.generator.tile.BioReactorTile; import com.buuz135.industrial.jei.category.BioReactorRecipeCategory; import com.buuz135.industrial.jei.category.DissolutionChamberCategory; import com.buuz135.industrial.jei.category.FluidExtractorCategory; import com.buuz135.industrial.jei.fluiddictionary.FluidDictionaryCategory; import com.buuz135.industrial.jei.laser.LaserRecipeCategory; import com.buuz135.industrial.jei.machineproduce.MachineProduceCategory; import com.buuz135.industrial.jei.ore.OreFermenterCategory; import com.buuz135.industrial.jei.ore.OreSieveCategory; import com.buuz135.industrial.jei.ore.OreWasherCategory; import com.buuz135.industrial.jei.petrifiedgen.PetrifiedBurnTimeCategory; import com.buuz135.industrial.jei.sludge.SludgeRefinerRecipeCategory; import com.buuz135.industrial.module.ModuleCore; import com.buuz135.industrial.module.ModuleGenerator; import com.buuz135.industrial.module.ModuleTool; import com.buuz135.industrial.recipe.DissolutionChamberRecipe; import com.buuz135.industrial.recipe.FluidExtractorRecipe; import com.buuz135.industrial.utils.Reference; import com.hrznstudio.titanium.util.RecipeUtil; import mezz.jei.api.IModPlugin; import mezz.jei.api.JeiPlugin; import mezz.jei.api.registration.*; import mezz.jei.api.runtime.IJeiRuntime; import mezz.jei.api.runtime.IRecipesGui; import net.minecraft.client.Minecraft; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.tags.Tag; import net.minecraft.util.ResourceLocation; import net.minecraftforge.fluids.FluidStack; import java.util.ArrayList; import java.util.List; @JeiPlugin public class JEICustomPlugin implements IModPlugin { private static IRecipesGui recipesGui; private SludgeRefinerRecipeCategory sludgeRefinerRecipeCategory; private BioReactorRecipeCategory bioReactorRecipeCategory; private BioReactorRecipeCategory proteinReactorRecipeCategory; private LaserRecipeCategory laserRecipeCategory; private MachineProduceCategory machineProduceCategory; private PetrifiedBurnTimeCategory petrifiedBurnTimeCategory; private FluidDictionaryCategory fluidDictionaryCategory; private FluidExtractorCategory fluidExtractorCategory; private OreWasherCategory oreWasherCategory; private OreFermenterCategory oreFermenterCategory; private OreSieveCategory oreSieveCategory; private DissolutionChamberCategory dissolutionChamberJEICategory; public static void showUses(ItemStack stack) { //if (recipesGui != null && recipeRegistry != null) // recipesGui.show(recipeRegistry.createFocus(IFocus.Mode.INPUT, stack)); } @Override public void registerItemSubtypes(ISubtypeRegistration registration) { registration.useNbtForSubtypes(ModuleTool.INFINITY_DRILL); } @Override public void registerIngredients(IModIngredientRegistration registry) { } @Override public void registerCategories(IRecipeCategoryRegistration registry) { // if (BlockRegistry.sludgeRefinerBlock.isEnabled()) { // sludgeRefinerRecipeCategory = new SludgeRefinerRecipeCategory(registry.getJeiHelpers().getGuiHelper()); // registry.addRecipeCategories(sludgeRefinerRecipeCategory); // } bioReactorRecipeCategory = new BioReactorRecipeCategory(registry.getJeiHelpers().getGuiHelper(), "Bioreactor accepted items"); registry.addRecipeCategories(bioReactorRecipeCategory); // if (BlockRegistry.proteinReactorBlock.isEnabled()) { // proteinReactorRecipeCategory = new ReactorRecipeCategory(registry.getJeiHelpers().getGuiHelper(), "Protein reactor accepted items"); // registry.addRecipeCategories(proteinReactorRecipeCategory); // } // if (BlockRegistry.laserBaseBlock.isEnabled() || BlockRegistry.laserDrillBlock.isEnabled()) { // laserRecipeCategory = new LaserRecipeCategory(registry.getJeiHelpers().getGuiHelper()); // registry.addRecipeCategories(laserRecipeCategory); // } // machineProduceCategory = new MachineProduceCategory(registry.getJeiHelpers().getGuiHelper()); // registry.addRecipeCategories(machineProduceCategory); // if (BlockRegistry.petrifiedFuelGeneratorBlock.isEnabled()) { // petrifiedBurnTimeCategory = new PetrifiedBurnTimeCategory(registry.getJeiHelpers().getGuiHelper()); // registry.addRecipeCategories(petrifiedBurnTimeCategory); // } // if (BlockRegistry.fluidDictionaryConverterBlock.isEnabled() && !FluidDictionaryEntry.FLUID_DICTIONARY_RECIPES.isEmpty()) { // fluidDictionaryCategory = new FluidDictionaryCategory(registry.getJeiHelpers().getGuiHelper()); // registry.addRecipeCategories(fluidDictionaryCategory); // } // if (BlockRegistry.materialStoneWorkFactoryBlock.isEnabled()) { // stoneWorkCategory = new StoneWorkCategory(registry.getJeiHelpers().getGuiHelper()); // registry.addRecipeCategories(stoneWorkCategory); // } // if (BlockRegistry.treeFluidExtractorBlock.isEnabled()) { fluidExtractorCategory = new FluidExtractorCategory(registry.getJeiHelpers().getGuiHelper()); registry.addRecipeCategories(fluidExtractorCategory); // } // if (CustomConfiguration.enableBookEntriesInJEI) { // manualCategory = new ManualCategory(registry.getJeiHelpers().getGuiHelper()); // registry.addRecipeCategories(manualCategory); // } // if (BlockRegistry.oreWasherBlock.isEnabled()) { // oreWasherCategory = new OreWasherCategory(registry.getJeiHelpers().getGuiHelper()); // registry.addRecipeCategories(oreWasherCategory); // } // if (BlockRegistry.oreFermenterBlock.isEnabled()) { // oreFermenterCategory = new OreFermenterCategory(registry.getJeiHelpers().getGuiHelper()); // registry.addRecipeCategories(oreFermenterCategory); // } // if (BlockRegistry.oreSieveBlock.isEnabled()) { // oreSieveCategory = new OreSieveCategory(registry.getJeiHelpers().getGuiHelper()); // registry.addRecipeCategories(oreSieveCategory); // } dissolutionChamberJEICategory = new DissolutionChamberCategory(registry.getJeiHelpers().getGuiHelper()); registry.addRecipeCategories(dissolutionChamberJEICategory); } @Override public void registerRecipes(IRecipeRegistration registration) { registration.addRecipes(RecipeUtil.getRecipes(Minecraft.getInstance().world, FluidExtractorRecipe.SERIALIZER.getRecipeType()), fluidExtractorCategory.getUid()); registration.addRecipes(RecipeUtil.getRecipes(Minecraft.getInstance().world, DissolutionChamberRecipe.SERIALIZER.getRecipeType()), dissolutionChamberJEICategory.getUid()); registration.addRecipes(generateBioreactorRecipes(), bioReactorRecipeCategory.getUid()); } private List<BioReactorRecipeCategory.ReactorRecipeWrapper> generateBioreactorRecipes() { List<BioReactorRecipeCategory.ReactorRecipeWrapper> recipes = new ArrayList<>(); for (Tag<Item> itemTag : BioReactorTile.VALID) { for (Item item : itemTag.getAllElements()) { recipes.add(new BioReactorRecipeCategory.ReactorRecipeWrapper(new ItemStack(item), new FluidStack(ModuleCore.BIOFUEL.getSourceFluid(), 80))); } } return recipes; } //@Override //public void register(IModRegistry registry) { // if (BlockRegistry.sludgeRefinerBlock.isEnabled()) { // int maxWeight = WeightedRandom.getTotalWeight(BlockRegistry.sludgeRefinerBlock.getItems()); // List<SludgeRefinerRecipeWrapper> wrapperList = new ArrayList<>(); // BlockRegistry.sludgeRefinerBlock.getItems().forEach(itemStackWeightedItem -> wrapperList.add(new SludgeRefinerRecipeWrapper(itemStackWeightedItem, maxWeight))); // registry.addRecipes(wrapperList, sludgeRefinerRecipeCategory.getUid()); // registry.addRecipeCatalyst(new ItemStack(BlockRegistry.sludgeRefinerBlock), sludgeRefinerRecipeCategory.getUid()); // } // if (BlockRegistry.bioReactorBlock.isEnabled()) { // List<ReactorRecipeWrapper> bioreactor = new ArrayList<>(); // BioReactorEntry.BIO_REACTOR_ENTRIES.forEach(entry -> bioreactor.add(new ReactorRecipeWrapper(entry.getStack(), FluidsRegistry.BIOFUEL, BlockRegistry.bioReactorBlock.getBaseAmount()))); // registry.addRecipes(bioreactor, bioReactorRecipeCategory.getUid()); // registry.addRecipeCatalyst(new ItemStack(BlockRegistry.bioReactorBlock), bioReactorRecipeCategory.getUid()); // } // if (BlockRegistry.proteinReactorBlock.isEnabled()) { // List<ReactorRecipeWrapper> proteinreactor = new ArrayList<>(); // ProteinReactorEntry.PROTEIN_REACTOR_ENTRIES.forEach(entry -> proteinreactor.add(new ReactorRecipeWrapper(entry.getStack(), FluidsRegistry.PROTEIN, BlockRegistry.proteinReactorBlock.getBaseAmount()))); // registry.addRecipes(proteinreactor, proteinReactorRecipeCategory.getUid()); // registry.addRecipeCatalyst(new ItemStack(BlockRegistry.proteinReactorBlock), proteinReactorRecipeCategory.getUid()); // } // if (BlockRegistry.laserBaseBlock.isEnabled() || BlockRegistry.laserDrillBlock.isEnabled()) { // List<LaserRecipeWrapper> laserRecipeWrappers = new ArrayList<>(); // LaserDrillEntry.LASER_DRILL_UNIQUE_VALUES.forEach(entry -> laserRecipeWrappers.add(new LaserRecipeWrapper(entry))); // registry.addRecipes(laserRecipeWrappers, laserRecipeCategory.getUid()); // registry.addRecipeCatalyst(new ItemStack(BlockRegistry.laserDrillBlock), laserRecipeCategory.getUid()); // registry.addRecipeCatalyst(new ItemStack(BlockRegistry.laserBaseBlock), laserRecipeCategory.getUid()); // } // if (BlockRegistry.resourcefulFurnaceBlock.isEnabled()) // registry.addRecipeCatalyst(new ItemStack(BlockRegistry.resourcefulFurnaceBlock), VanillaRecipeCategoryUid.SMELTING); // if (BlockRegistry.potionEnervatorBlock.isEnabled()) // registry.addRecipeCatalyst(new ItemStack(BlockRegistry.potionEnervatorBlock), VanillaRecipeCategoryUid.BREWING); // // registry.addRecipes(Stream.of( // new MachineProduceWrapper(BlockRegistry.sporesRecreatorBlock, new ItemStack(Blocks.BROWN_MUSHROOM)), // new MachineProduceWrapper(BlockRegistry.sporesRecreatorBlock, new ItemStack(Blocks.RED_MUSHROOM)), // new MachineProduceWrapper(BlockRegistry.sewageCompostSolidiferBlock, new ItemStack(ItemRegistry.fertilizer)), // new MachineProduceWrapper(BlockRegistry.dyeMixerBlock, new ItemStack(ItemRegistry.artificalDye, 1, OreDictionary.WILDCARD_VALUE)), // new MachineProduceWrapper(BlockRegistry.lavaFabricatorBlock, new ItemStack(Items.LAVA_BUCKET)), // new MachineProduceWrapper(BlockRegistry.waterResourcesCollectorBlock, new ItemStack(Items.FISH, 1, OreDictionary.WILDCARD_VALUE)), // new MachineProduceWrapper(BlockRegistry.mobRelocatorBlock, FluidUtil.getFilledBucket(new FluidStack(FluidsRegistry.ESSENCE, 1000))), // new MachineProduceWrapper(BlockRegistry.cropRecolectorBlock, FluidUtil.getFilledBucket(new FluidStack(FluidsRegistry.SLUDGE, 1000))), // new MachineProduceWrapper(BlockRegistry.waterCondensatorBlock, FluidUtil.getFilledBucket(new FluidStack(FluidRegistry.WATER, 1000))), // new MachineProduceWrapper(BlockRegistry.animalResourceHarvesterBlock, FluidUtil.getFilledBucket(new FluidStack(FluidsRegistry.MILK, 1000))), // new MachineProduceWrapper(BlockRegistry.mobSlaughterFactoryBlock, FluidUtil.getFilledBucket(new FluidStack(FluidsRegistry.MEAT, 1000))), // new MachineProduceWrapper(BlockRegistry.mobSlaughterFactoryBlock, FluidUtil.getFilledBucket(new FluidStack(FluidsRegistry.PINK_SLIME, 1000))), // new MachineProduceWrapper(BlockRegistry.latexProcessingUnitBlock, new ItemStack(ItemRegistry.tinyDryRubber)), // new MachineProduceWrapper(BlockRegistry.animalByproductRecolectorBlock, FluidUtil.getFilledBucket(new FluidStack(FluidsRegistry.SEWAGE, 1000))), // new MachineProduceWrapper(BlockRegistry.lavaFabricatorBlock, FluidUtil.getFilledBucket(new FluidStack(FluidRegistry.LAVA, 1000))), // new MachineProduceWrapper(BlockRegistry.proteinReactorBlock, FluidUtil.getFilledBucket(new FluidStack(FluidsRegistry.PROTEIN, 1000))), // new MachineProduceWrapper(BlockRegistry.frosterBlock, new ItemStack(Items.SNOWBALL)), // new MachineProduceWrapper(BlockRegistry.frosterBlock, new ItemStack(Blocks.ICE)), // new MachineProduceWrapper(BlockRegistry.frosterBlock, new ItemStack(Blocks.PACKED_ICE)) // ).filter(machineProduceWrapper -> ((CustomOrientedBlock) machineProduceWrapper.getBlock()).isEnabled()).collect(Collectors.toList()), // machineProduceCategory.getUid()); // // if (BlockRegistry.materialStoneWorkFactoryBlock.isEnabled()) { // List<StoneWorkWrapper> perfectStoneWorkWrappers = new ArrayList<>(); // List<StoneWorkWrapper> wrappers = findAllStoneWorkOutputs(new ArrayList<>()); // for (StoneWorkWrapper workWrapper : new ArrayList<>(wrappers)) { // if (perfectStoneWorkWrappers.stream().noneMatch(stoneWorkWrapper -> workWrapper.getOutput().isItemEqual(stoneWorkWrapper.getOutput()))) { // boolean isSomoneShorter = false; // for (StoneWorkWrapper workWrapperCompare : new ArrayList<>(wrappers)) { // if (workWrapper.getOutput().isItemEqual(workWrapperCompare.getOutput())) { // List<MaterialStoneWorkFactoryTile.Mode> workWrapperCompareModes = new ArrayList<>(workWrapperCompare.getModes()); // workWrapperCompareModes.removeIf(mode -> mode == MaterialStoneWorkFactoryTile.Mode.NONE); // List<MaterialStoneWorkFactoryTile.Mode> workWrapperModes = new ArrayList<>(workWrapper.getModes()); // workWrapperModes.removeIf(mode -> mode == MaterialStoneWorkFactoryTile.Mode.NONE); // if (workWrapperModes.size() > workWrapperCompareModes.size()) { // isSomoneShorter = true; // break; // } // } // } // if (!isSomoneShorter) perfectStoneWorkWrappers.add(workWrapper); // } // } // registry.addRecipes(perfectStoneWorkWrappers, stoneWorkCategory.getUid()); // registry.addRecipeCatalyst(new ItemStack(BlockRegistry.materialStoneWorkFactoryBlock), stoneWorkCategory.getUid()); // } // if (BlockRegistry.petrifiedFuelGeneratorBlock.isEnabled()) { // List<PetrifiedBurnTimeWrapper> petrifiedBurnTimeWrappers = new ArrayList<>(); // registry.getIngredientRegistry().getFuels().stream().filter(PetrifiedFuelGeneratorTile::acceptsInputStack).forEach(stack -> petrifiedBurnTimeWrappers.add(new PetrifiedBurnTimeWrapper(stack, TileEntityFurnace.getItemBurnTime(stack)))); // registry.addRecipes(petrifiedBurnTimeWrappers, petrifiedBurnTimeCategory.getUid()); // registry.addRecipeCatalyst(new ItemStack(BlockRegistry.petrifiedFuelGeneratorBlock), petrifiedBurnTimeCategory.getUid()); // } // if (CustomConfiguration.enableBookEntriesInJEI) { // for (BookCategory category : BookCategory.values()) { // registry.addRecipes(category.getEntries().values().stream().map(ManualWrapper::new).collect(Collectors.toList()), manualCategory.getUid()); // } // registry.addRecipeCatalyst(new ItemStack(ItemRegistry.bookManualItem), manualCategory.getUid()); // } // if (fluidDictionaryCategory != null) { // registry.addRecipeCatalyst(new ItemStack(BlockRegistry.fluidDictionaryConverterBlock), fluidDictionaryCategory.getUid()); // registry.addRecipes(FluidDictionaryEntry.FLUID_DICTIONARY_RECIPES.stream().map(FluidDictionaryWrapper::new).collect(Collectors.toList()), fluidDictionaryCategory.getUid()); // } // if (extractorRecipeCategory != null) { // registry.addRecipeCatalyst(new ItemStack(BlockRegistry.treeFluidExtractorBlock), extractorRecipeCategory.getUid()); // registry.addRecipes(ExtractorEntry.EXTRACTOR_ENTRIES.stream().map(ExtractorRecipeWrapper::new).collect(Collectors.toList()), extractorRecipeCategory.getUid()); // } // if (oreWasherCategory != null) { // registry.addRecipeCatalyst(new ItemStack(BlockRegistry.oreWasherBlock), oreWasherCategory.getUid()); // registry.addRecipes(OreFluidEntryRaw.ORE_RAW_ENTRIES.stream().map(OreWasherWrapper::new).collect(Collectors.toList()), oreWasherCategory.getUid()); // } // if (oreFermenterCategory != null) { // registry.addRecipeCatalyst(new ItemStack(BlockRegistry.oreFermenterBlock), oreFermenterCategory.getUid()); // registry.addRecipes(OreFluidEntryFermenter.ORE_FLUID_FERMENTER.stream().map(OreFermenterWrapper::new).collect(Collectors.toList()), oreFermenterCategory.getUid()); // } // if (oreSieveCategory != null) { // registry.addRecipeCatalyst(new ItemStack(BlockRegistry.oreSieveBlock), oreSieveCategory.getUid()); // registry.addRecipes(OreFluidEntrySieve.ORE_FLUID_SIEVE.stream().map(OreSieveWrapper::new).collect(Collectors.toList()), oreSieveCategory.getUid()); // } // registry.addGhostIngredientHandler(GuiConveyor.class, new ConveyorGhostSlotHandler()); //} @Override public void registerRecipeCatalysts(IRecipeCatalystRegistration registration) { registration.addRecipeCatalyst(new ItemStack(ModuleCore.FLUID_EXTRACTOR), FluidExtractorCategory.ID); registration.addRecipeCatalyst(new ItemStack(ModuleCore.DISSOLUTION_CHAMBER), DissolutionChamberCategory.ID); registration.addRecipeCatalyst(new ItemStack(ModuleGenerator.BIOREACTOR), BioReactorRecipeCategory.ID); } @Override public void onRuntimeAvailable(IJeiRuntime jeiRuntime) { //recipesGui = jeiRuntime.getRecipesGui(); //recipeRegistry = jeiRuntime.getRecipeRegistry(); } @Override public ResourceLocation getPluginUid() { return new ResourceLocation(Reference.MOD_ID, "default"); } }
/* * Copyright (c) 1997, 2005, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * (C) Copyright Taligent, Inc. 1996 - 1997, All Rights Reserved * (C) Copyright IBM Corp. 1996 - 1998, All Rights Reserved * * The original version of this source code and documentation is * copyrighted and owned by Taligent, Inc., a wholly-owned subsidiary * of IBM. These materials are provided under terms of a License * Agreement between Taligent and Sun. This technology is protected * by multiple US and International patents. * * This notice and attribution to Taligent may not be removed. * Taligent is a registered trademark of Taligent, Inc. * */ package java.awt.font; import java.awt.Font; import java.text.AttributedCharacterIterator; import java.text.AttributedString; import java.text.Bidi; import java.text.BreakIterator; import java.text.CharacterIterator; import java.awt.font.FontRenderContext; import java.util.Hashtable; import java.util.Map; import sun.font.AttributeValues; import sun.font.BidiUtils; import sun.font.TextLineComponent; import sun.font.TextLabelFactory; import sun.font.FontResolver; /** * The <code>TextMeasurer</code> class provides the primitive operations * needed for line break: measuring up to a given advance, determining the * advance of a range of characters, and generating a * <code>TextLayout</code> for a range of characters. It also provides * methods for incremental editing of paragraphs. * <p> * A <code>TextMeasurer</code> object is constructed with an * {@link java.text.AttributedCharacterIterator AttributedCharacterIterator} * representing a single paragraph of text. The value returned by the * {@link AttributedCharacterIterator#getBeginIndex() getBeginIndex} * method of <code>AttributedCharacterIterator</code> * defines the absolute index of the first character. The value * returned by the * {@link AttributedCharacterIterator#getEndIndex() getEndIndex} * method of <code>AttributedCharacterIterator</code> defines the index * past the last character. These values define the range of indexes to * use in calls to the <code>TextMeasurer</code>. For example, calls to * get the advance of a range of text or the line break of a range of text * must use indexes between the beginning and end index values. Calls to * {@link #insertChar(java.text.AttributedCharacterIterator, int) insertChar} * and * {@link #deleteChar(java.text.AttributedCharacterIterator, int) deleteChar} * reset the <code>TextMeasurer</code> to use the beginning index and end * index of the <code>AttributedCharacterIterator</code> passed in those calls. * <p> * Most clients will use the more convenient <code>LineBreakMeasurer</code>, * which implements the standard line break policy (placing as many words * as will fit on each line). * * @author John Raley * @see LineBreakMeasurer * @since 1.3 */ public final class TextMeasurer implements Cloneable { // Number of lines to format to. private static float EST_LINES = (float) 2.1; /* static { String s = System.getProperty("estLines"); if (s != null) { try { Float f = new Float(s); EST_LINES = f.floatValue(); } catch(NumberFormatException e) { } } //System.out.println("EST_LINES="+EST_LINES); } */ private FontRenderContext fFrc; private int fStart; // characters in source text private char[] fChars; // Bidi for this paragraph private Bidi fBidi; // Levels array for chars in this paragraph - needed to reorder // trailing counterdirectional whitespace private byte[] fLevels; // line components in logical order private TextLineComponent[] fComponents; // index where components begin private int fComponentStart; // index where components end private int fComponentLimit; private boolean haveLayoutWindow; // used to find valid starting points for line components private BreakIterator fLineBreak = null; private CharArrayIterator charIter = null; int layoutCount = 0; int layoutCharCount = 0; // paragraph, with resolved fonts and styles private StyledParagraph fParagraph; // paragraph data - same across all layouts private boolean fIsDirectionLTR; private byte fBaseline; private float[] fBaselineOffsets; private float fJustifyRatio = 1; /** * Constructs a <code>TextMeasurer</code> from the source text. * The source text should be a single entire paragraph. * @param text the source paragraph. Cannot be null. * @param frc the information about a graphics device which is needed * to measure the text correctly. Cannot be null. */ public TextMeasurer(AttributedCharacterIterator text, FontRenderContext frc) { fFrc = frc; initAll(text); } protected Object clone() { TextMeasurer other; try { other = (TextMeasurer) super.clone(); } catch(CloneNotSupportedException e) { throw new Error(); } if (fComponents != null) { other.fComponents = (TextLineComponent[]) fComponents.clone(); } return other; } private void invalidateComponents() { fComponentStart = fComponentLimit = fChars.length; fComponents = null; haveLayoutWindow = false; } /** * Initialize state, including fChars array, direction, and * fBidi. */ private void initAll(AttributedCharacterIterator text) { fStart = text.getBeginIndex(); // extract chars fChars = new char[text.getEndIndex() - fStart]; int n = 0; for (char c = text.first(); c != text.DONE; c = text.next()) { fChars[n++] = c; } text.first(); fBidi = new Bidi(text); if (fBidi.isLeftToRight()) { fBidi = null; } text.first(); Map paragraphAttrs = text.getAttributes(); NumericShaper shaper = AttributeValues.getNumericShaping(paragraphAttrs); if (shaper != null) { shaper.shape(fChars, 0, fChars.length); } fParagraph = new StyledParagraph(text, fChars); // set paragraph attributes { // If there's an embedded graphic at the start of the // paragraph, look for the first non-graphic character // and use it and its font to initialize the paragraph. // If not, use the first graphic to initialize. fJustifyRatio = AttributeValues.getJustification(paragraphAttrs); boolean haveFont = TextLine.advanceToFirstFont(text); if (haveFont) { Font defaultFont = TextLine.getFontAtCurrentPos(text); int charsStart = text.getIndex() - text.getBeginIndex(); LineMetrics lm = defaultFont.getLineMetrics(fChars, charsStart, charsStart+1, fFrc); fBaseline = (byte) lm.getBaselineIndex(); fBaselineOffsets = lm.getBaselineOffsets(); } else { // hmmm what to do here? Just try to supply reasonable // values I guess. GraphicAttribute graphic = (GraphicAttribute) paragraphAttrs.get(TextAttribute.CHAR_REPLACEMENT); fBaseline = TextLayout.getBaselineFromGraphic(graphic); Font dummyFont = new Font(new Hashtable(5, (float)0.9)); LineMetrics lm = dummyFont.getLineMetrics(" ", 0, 1, fFrc); fBaselineOffsets = lm.getBaselineOffsets(); } fBaselineOffsets = TextLine.getNormalizedOffsets(fBaselineOffsets, fBaseline); } invalidateComponents(); } /** * Generate components for the paragraph. fChars, fBidi should have been * initialized already. */ private void generateComponents(int startingAt, int endingAt) { if (collectStats) { formattedChars += (endingAt-startingAt); } int layoutFlags = 0; // no extra info yet, bidi determines run and line direction TextLabelFactory factory = new TextLabelFactory(fFrc, fChars, fBidi, layoutFlags); int[] charsLtoV = null; if (fBidi != null) { fLevels = BidiUtils.getLevels(fBidi); int[] charsVtoL = BidiUtils.createVisualToLogicalMap(fLevels); charsLtoV = BidiUtils.createInverseMap(charsVtoL); fIsDirectionLTR = fBidi.baseIsLeftToRight(); } else { fLevels = null; fIsDirectionLTR = true; } try { fComponents = TextLine.getComponents( fParagraph, fChars, startingAt, endingAt, charsLtoV, fLevels, factory); } catch(IllegalArgumentException e) { System.out.println("startingAt="+startingAt+"; endingAt="+endingAt); System.out.println("fComponentLimit="+fComponentLimit); throw e; } fComponentStart = startingAt; fComponentLimit = endingAt; //debugFormatCount += (endingAt-startingAt); } private int calcLineBreak(final int pos, final float maxAdvance) { // either of these statements removes the bug: //generateComponents(0, fChars.length); //generateComponents(pos, fChars.length); int startPos = pos; float width = maxAdvance; int tlcIndex; int tlcStart = fComponentStart; for (tlcIndex = 0; tlcIndex < fComponents.length; tlcIndex++) { int gaLimit = tlcStart + fComponents[tlcIndex].getNumCharacters(); if (gaLimit > startPos) { break; } else { tlcStart = gaLimit; } } // tlcStart is now the start of the tlc at tlcIndex for (; tlcIndex < fComponents.length; tlcIndex++) { TextLineComponent tlc = fComponents[tlcIndex]; int numCharsInGa = tlc.getNumCharacters(); int lineBreak = tlc.getLineBreakIndex(startPos - tlcStart, width); if (lineBreak == numCharsInGa && tlcIndex < fComponents.length) { width -= tlc.getAdvanceBetween(startPos - tlcStart, lineBreak); tlcStart += numCharsInGa; startPos = tlcStart; } else { return tlcStart + lineBreak; } } if (fComponentLimit < fChars.length) { // format more text and try again //if (haveLayoutWindow) { // outOfWindow++; //} generateComponents(pos, fChars.length); return calcLineBreak(pos, maxAdvance); } return fChars.length; } /** * According to the Unicode Bidirectional Behavior specification * (Unicode Standard 2.0, section 3.11), whitespace at the ends * of lines which would naturally flow against the base direction * must be made to flow with the line direction, and moved to the * end of the line. This method returns the start of the sequence * of trailing whitespace characters to move to the end of a * line taken from the given range. */ private int trailingCdWhitespaceStart(int startPos, int limitPos) { if (fLevels != null) { // Back up over counterdirectional whitespace final byte baseLevel = (byte) (fIsDirectionLTR? 0 : 1); for (int cdWsStart = limitPos; --cdWsStart >= startPos;) { if ((fLevels[cdWsStart] % 2) == baseLevel || Character.getDirectionality(fChars[cdWsStart]) != Character.DIRECTIONALITY_WHITESPACE) { return ++cdWsStart; } } } return startPos; } private TextLineComponent[] makeComponentsOnRange(int startPos, int limitPos) { // sigh I really hate to do this here since it's part of the // bidi algorithm. // cdWsStart is the start of the trailing counterdirectional // whitespace final int cdWsStart = trailingCdWhitespaceStart(startPos, limitPos); int tlcIndex; int tlcStart = fComponentStart; for (tlcIndex = 0; tlcIndex < fComponents.length; tlcIndex++) { int gaLimit = tlcStart + fComponents[tlcIndex].getNumCharacters(); if (gaLimit > startPos) { break; } else { tlcStart = gaLimit; } } // tlcStart is now the start of the tlc at tlcIndex int componentCount; { boolean split = false; int compStart = tlcStart; int lim=tlcIndex; for (boolean cont=true; cont; lim++) { int gaLimit = compStart + fComponents[lim].getNumCharacters(); if (cdWsStart > Math.max(compStart, startPos) && cdWsStart < Math.min(gaLimit, limitPos)) { split = true; } if (gaLimit >= limitPos) { cont=false; } else { compStart = gaLimit; } } componentCount = lim-tlcIndex; if (split) { componentCount++; } } TextLineComponent[] components = new TextLineComponent[componentCount]; int newCompIndex = 0; int linePos = startPos; int breakPt = cdWsStart; int subsetFlag; if (breakPt == startPos) { subsetFlag = fIsDirectionLTR? TextLineComponent.LEFT_TO_RIGHT : TextLineComponent.RIGHT_TO_LEFT; breakPt = limitPos; } else { subsetFlag = TextLineComponent.UNCHANGED; } while (linePos < limitPos) { int compLength = fComponents[tlcIndex].getNumCharacters(); int tlcLimit = tlcStart + compLength; int start = Math.max(linePos, tlcStart); int limit = Math.min(breakPt, tlcLimit); components[newCompIndex++] = fComponents[tlcIndex].getSubset( start-tlcStart, limit-tlcStart, subsetFlag); linePos += (limit-start); if (linePos == breakPt) { breakPt = limitPos; subsetFlag = fIsDirectionLTR? TextLineComponent.LEFT_TO_RIGHT : TextLineComponent.RIGHT_TO_LEFT; } if (linePos == tlcLimit) { tlcIndex++; tlcStart = tlcLimit; } } return components; } private TextLine makeTextLineOnRange(int startPos, int limitPos) { int[] charsLtoV = null; byte[] charLevels = null; if (fBidi != null) { Bidi lineBidi = fBidi.createLineBidi(startPos, limitPos); charLevels = BidiUtils.getLevels(lineBidi); int[] charsVtoL = BidiUtils.createVisualToLogicalMap(charLevels); charsLtoV = BidiUtils.createInverseMap(charsVtoL); } TextLineComponent[] components = makeComponentsOnRange(startPos, limitPos); return new TextLine(fFrc, components, fBaselineOffsets, fChars, startPos, limitPos, charsLtoV, charLevels, fIsDirectionLTR); } private void ensureComponents(int start, int limit) { if (start < fComponentStart || limit > fComponentLimit) { generateComponents(start, limit); } } private void makeLayoutWindow(int localStart) { int compStart = localStart; int compLimit = fChars.length; // If we've already gone past the layout window, format to end of paragraph if (layoutCount > 0 && !haveLayoutWindow) { float avgLineLength = Math.max(layoutCharCount / layoutCount, 1); compLimit = Math.min(localStart + (int)(avgLineLength*EST_LINES), fChars.length); } if (localStart > 0 || compLimit < fChars.length) { if (charIter == null) { charIter = new CharArrayIterator(fChars); } else { charIter.reset(fChars); } if (fLineBreak == null) { fLineBreak = BreakIterator.getLineInstance(); } fLineBreak.setText(charIter); if (localStart > 0) { if (!fLineBreak.isBoundary(localStart)) { compStart = fLineBreak.preceding(localStart); } } if (compLimit < fChars.length) { if (!fLineBreak.isBoundary(compLimit)) { compLimit = fLineBreak.following(compLimit); } } } ensureComponents(compStart, compLimit); haveLayoutWindow = true; } /** * Returns the index of the first character which will not fit on * on a line beginning at <code>start</code> and possible * measuring up to <code>maxAdvance</code> in graphical width. * * @param start the character index at which to start measuring. * <code>start</code> is an absolute index, not relative to the * start of the paragraph * @param maxAdvance the graphical width in which the line must fit * @return the index after the last character that will fit * on a line beginning at <code>start</code>, which is not longer * than <code>maxAdvance</code> in graphical width * @throws IllegalArgumentException if <code>start</code> is * less than the beginning of the paragraph. */ public int getLineBreakIndex(int start, float maxAdvance) { int localStart = start - fStart; if (!haveLayoutWindow || localStart < fComponentStart || localStart >= fComponentLimit) { makeLayoutWindow(localStart); } return calcLineBreak(localStart, maxAdvance) + fStart; } /** * Returns the graphical width of a line beginning at <code>start</code> * and including characters up to <code>limit</code>. * <code>start</code> and <code>limit</code> are absolute indices, * not relative to the start of the paragraph. * * @param start the character index at which to start measuring * @param limit the character index at which to stop measuring * @return the graphical width of a line beginning at <code>start</code> * and including characters up to <code>limit</code> * @throws IndexOutOfBoundsException if <code>limit</code> is less * than <code>start</code> * @throws IllegalArgumentException if <code>start</code> or * <code>limit</code> is not between the beginning of * the paragraph and the end of the paragraph. */ public float getAdvanceBetween(int start, int limit) { int localStart = start - fStart; int localLimit = limit - fStart; ensureComponents(localStart, localLimit); TextLine line = makeTextLineOnRange(localStart, localLimit); return line.getMetrics().advance; // could cache line in case getLayout is called with same start, limit } /** * Returns a <code>TextLayout</code> on the given character range. * * @param start the index of the first character * @param limit the index after the last character. Must be greater * than <code>start</code> * @return a <code>TextLayout</code> for the characters beginning at * <code>start</code> up to (but not including) <code>limit</code> * @throws IndexOutOfBoundsException if <code>limit</code> is less * than <code>start</code> * @throws IllegalArgumentException if <code>start</code> or * <code>limit</code> is not between the beginning of * the paragraph and the end of the paragraph. */ public TextLayout getLayout(int start, int limit) { int localStart = start - fStart; int localLimit = limit - fStart; ensureComponents(localStart, localLimit); TextLine textLine = makeTextLineOnRange(localStart, localLimit); if (localLimit < fChars.length) { layoutCharCount += limit-start; layoutCount++; } return new TextLayout(textLine, fBaseline, fBaselineOffsets, fJustifyRatio); } private int formattedChars = 0; private static boolean wantStats = false;/*"true".equals(System.getProperty("collectStats"));*/ private boolean collectStats = false; private void printStats() { System.out.println("formattedChars: " + formattedChars); //formattedChars = 0; collectStats = false; } /** * Updates the <code>TextMeasurer</code> after a single character has * been inserted * into the paragraph currently represented by this * <code>TextMeasurer</code>. After this call, this * <code>TextMeasurer</code> is equivalent to a new * <code>TextMeasurer</code> created from the text; however, it will * usually be more efficient to update an existing * <code>TextMeasurer</code> than to create a new one from scratch. * * @param newParagraph the text of the paragraph after performing * the insertion. Cannot be null. * @param insertPos the position in the text where the character was * inserted. Must not be less than the start of * <code>newParagraph</code>, and must be less than the end of * <code>newParagraph</code>. * @throws IndexOutOfBoundsException if <code>insertPos</code> is less * than the start of <code>newParagraph</code> or greater than * or equal to the end of <code>newParagraph</code> * @throws NullPointerException if <code>newParagraph</code> is * <code>null</code> */ public void insertChar(AttributedCharacterIterator newParagraph, int insertPos) { if (collectStats) { printStats(); } if (wantStats) { collectStats = true; } fStart = newParagraph.getBeginIndex(); int end = newParagraph.getEndIndex(); if (end - fStart != fChars.length+1) { initAll(newParagraph); } char[] newChars = new char[end-fStart]; int newCharIndex = insertPos - fStart; System.arraycopy(fChars, 0, newChars, 0, newCharIndex); char newChar = newParagraph.setIndex(insertPos); newChars[newCharIndex] = newChar; System.arraycopy(fChars, newCharIndex, newChars, newCharIndex+1, end-insertPos-1); fChars = newChars; if (fBidi != null || Bidi.requiresBidi(newChars, newCharIndex, newCharIndex + 1) || newParagraph.getAttribute(TextAttribute.BIDI_EMBEDDING) != null) { fBidi = new Bidi(newParagraph); if (fBidi.isLeftToRight()) { fBidi = null; } } fParagraph = StyledParagraph.insertChar(newParagraph, fChars, insertPos, fParagraph); invalidateComponents(); } /** * Updates the <code>TextMeasurer</code> after a single character has * been deleted * from the paragraph currently represented by this * <code>TextMeasurer</code>. After this call, this * <code>TextMeasurer</code> is equivalent to a new <code>TextMeasurer</code> * created from the text; however, it will usually be more efficient * to update an existing <code>TextMeasurer</code> than to create a new one * from scratch. * * @param newParagraph the text of the paragraph after performing * the deletion. Cannot be null. * @param deletePos the position in the text where the character was removed. * Must not be less than * the start of <code>newParagraph</code>, and must not be greater than the * end of <code>newParagraph</code>. * @throws IndexOutOfBoundsException if <code>deletePos</code> is * less than the start of <code>newParagraph</code> or greater * than the end of <code>newParagraph</code> * @throws NullPointerException if <code>newParagraph</code> is * <code>null</code> */ public void deleteChar(AttributedCharacterIterator newParagraph, int deletePos) { fStart = newParagraph.getBeginIndex(); int end = newParagraph.getEndIndex(); if (end - fStart != fChars.length-1) { initAll(newParagraph); } char[] newChars = new char[end-fStart]; int changedIndex = deletePos-fStart; System.arraycopy(fChars, 0, newChars, 0, deletePos-fStart); System.arraycopy(fChars, changedIndex+1, newChars, changedIndex, end-deletePos); fChars = newChars; if (fBidi != null) { fBidi = new Bidi(newParagraph); if (fBidi.isLeftToRight()) { fBidi = null; } } fParagraph = StyledParagraph.deleteChar(newParagraph, fChars, deletePos, fParagraph); invalidateComponents(); } /** * NOTE: This method is only for LineBreakMeasurer's use. It is package- * private because it returns internal data. */ char[] getChars() { return fChars; } }
package com.alexvasilkov.gestures.animation; import android.graphics.Matrix; import android.graphics.RectF; import android.support.annotation.FloatRange; import android.support.annotation.NonNull; import android.util.Log; import android.view.View; import android.widget.ImageView; import android.widget.ImageView.ScaleType; import com.alexvasilkov.gestures.GestureController; import com.alexvasilkov.gestures.GestureControllerForPager; import com.alexvasilkov.gestures.Settings; import com.alexvasilkov.gestures.State; import com.alexvasilkov.gestures.StateController; import com.alexvasilkov.gestures.internal.AnimationEngine; import com.alexvasilkov.gestures.internal.FloatScroller; import com.alexvasilkov.gestures.internal.GestureDebug; import com.alexvasilkov.gestures.views.GestureImageView; import com.alexvasilkov.gestures.views.interfaces.ClipView; import com.alexvasilkov.gestures.views.interfaces.GestureView; import java.util.ArrayList; import java.util.List; /** * Helper class to animate views from one position on screen to another. * <p/> * Animation can be performed from any view (e.g. {@link ImageView}) to any gestures controlled * view implementing {@link GestureView} (e.g. {@link GestureImageView}). * <p/> * Note, that initial and final views should have same aspect ratio for correct animation. * In case of {@link ImageView} initial and final images should have same aspect, but actual views * can have different aspects (e.g. animating from square thumb view with scale type * {@link ScaleType#CENTER_CROP} to rectangular full image view). * <p/> * To use this class first create an instance and than call {@link #enter(View, boolean)}.<br/> * Alternatively you can manually pass initial view position using * {@link #enter(ViewPosition, boolean)} method. <br/> * To exit back to initial view call {@link #exit(boolean)} method.<br/> * You can listen for position changes using * {@link #addPositionUpdateListener(PositionUpdateListener)}.<br/> * If initial view was changed you should call {@link #update(View)} method to update to new view. * You can also manually update initial view position using {@link #update(ViewPosition)} method. */ public class ViewPositionAnimator { private static final String TAG = "ViewPositionAnimator"; private static final Matrix TMP_MATRIX = new Matrix(); private final List<PositionUpdateListener> mListeners = new ArrayList<>(); private final List<PositionUpdateListener> mListenersToRemove = new ArrayList<>(); private boolean mIteratingListeners; private long mDuration = FloatScroller.DEFAULT_DURATION; private final FloatScroller mStateScroller = new FloatScroller(); private final AnimationEngine mAnimationEngine; private final GestureController mToController; private final ClipView mToClipView; private final State mFromState = new State(), mToState = new State(); private float mFromPivotX, mFromPivotY, mToPivotX, mToPivotY; private final RectF mFromClip = new RectF(), mToClip = new RectF(); private final RectF mClipRect = new RectF(); private ViewPosition mFromPos, mToPos; private View mFromView; private boolean mOrigRestrictBoundsFlag; private boolean mIsActivated = false; private float mPositionState = 0f; private boolean mIsLeaving = true; // Leaving by default private boolean mIsAnimating = false; private boolean mApplyingPositionState; private boolean mApplyingPositionStateScheduled; private boolean mIsFromUpdated, mIsToUpdated; // Marks that update for 'From' or 'To' is needed private final ViewPositionHolder mFromPosHolder = new ViewPositionHolder(); private final ViewPositionHolder mToPosHolder = new ViewPositionHolder(); private final ViewPositionHolder.OnViewPositionChangeListener mFromPositionListener = new ViewPositionHolder.OnViewPositionChangeListener() { @Override public void onViewPositionChanged(@NonNull ViewPosition position) { if (GestureDebug.isDebugAnimator()) { Log.d(TAG, "'From' view position updated: " + position.pack()); } mFromPos = position; requestUpdateFromState(); applyPositionState(); } }; public ViewPositionAnimator(@NonNull GestureView to) { if (!(to instanceof View)) { throw new IllegalArgumentException("Argument 'to' should be an instance of View"); } View toView = (View) to; mToClipView = to instanceof ClipView ? (ClipView) to : null; mAnimationEngine = new LocalAnimationEngine(toView); mToController = to.getController(); mToController.addOnStateChangeListener(new GestureController.OnStateChangeListener() { @Override public void onStateChanged(State state) { // No-op } @Override public void onStateReset(State oldState, State newState) { if (!mIsActivated) { return; } if (GestureDebug.isDebugAnimator()) { Log.d(TAG, "State reset in listener: " + newState); } resetToState(); applyPositionState(); } }); mToPosHolder.init(toView, new ViewPositionHolder.OnViewPositionChangeListener() { @Override public void onViewPositionChanged(@NonNull ViewPosition position) { if (GestureDebug.isDebugAnimator()) { Log.d(TAG, "'To' view position updated: " + position.pack()); } mToPos = position; requestUpdateToState(); requestUpdateFromState(); // Depends on 'to' position applyPositionState(); } }); } /** * Starts 'enter' animation from {@code from} view to {@code to}. * <p/> * Note, if {@code from} view was changed (i.e. during list adapter refresh) you should * update to new view using {@link #update(View)} method. */ public void enter(@NonNull View from, boolean withAnimation) { if (GestureDebug.isDebugAnimator()) { Log.d(TAG, "Entering from view, with animation = " + withAnimation); } enterInternal(withAnimation); updateInternal(from); } /** * Starts 'enter' animation from {@code from} position to {@code to} view. * <p/> * Note, if {@code from} view position was changed (i.e. during list adapter refresh) you * should * update to new view using {@link #update(ViewPosition)} method. */ public void enter(@NonNull ViewPosition fromPos, boolean withAnimation) { if (GestureDebug.isDebugAnimator()) { Log.d(TAG, "Entering from view position, with animation = " + withAnimation); } enterInternal(withAnimation); updateInternal(fromPos); } /** * Updates initial view in case it was changed. You should not call this method if view stays * the same since animator should automatically detect view position changes. */ public void update(@NonNull View from) { if (mFromView == null) { throw new IllegalStateException("Animation was not started using " + "enter(View, boolean) method, cannot update 'from' view"); } if (GestureDebug.isDebugAnimator()) { Log.d(TAG, "Updating view"); } updateInternal(from); } /** * Updates position of initial view in case it was changed. */ public void update(@NonNull ViewPosition fromPos) { if (mFromPos == null) { throw new IllegalStateException("Animation was not started using " + "enter(ViewPosition, boolean) method, cannot update 'from' position"); } if (GestureDebug.isDebugAnimator()) { Log.d(TAG, "Updating view position: " + fromPos.pack()); } updateInternal(fromPos); } /** * Starts 'exit' animation from {@code to} view back to {@code from} */ public void exit(boolean withAnimation) { if (GestureDebug.isDebugAnimator()) { Log.d(TAG, "Exiting, with animation = " + withAnimation); } if (!mIsActivated) { throw new IllegalStateException("You should call enter(...) before calling exit(...)"); } if (!mIsAnimating) { resetToState(); // Only resetting if not animating } // Starting animation from current position or applying initial state without animation setState(withAnimation ? mPositionState : 0f, true, withAnimation); } private void enterInternal(boolean withAnimation) { mIsActivated = true; // Starting animation from initial position or applying final state without animation setState(withAnimation ? 0f : 1f, false, withAnimation); } private void updateInternal(@NonNull View from) { if (!mIsActivated) { throw new IllegalStateException( "You should call enter(...) before calling update(...)"); } cleanup(); resetToState(); mFromView = from; mFromPosHolder.init(from, mFromPositionListener); from.setVisibility(View.INVISIBLE); // We don't want to have duplicate view during animation } private void updateInternal(@NonNull ViewPosition fromPos) { if (!mIsActivated) { throw new IllegalStateException( "You should call enter(...) before calling update(...)"); } cleanup(); resetToState(); mFromPos = fromPos; } private void cleanup() { if (GestureDebug.isDebugAnimator()) { Log.d(TAG, "Cleaning up"); } if (mFromView != null) { mFromView.setVisibility(View.VISIBLE); // Switching back to visible } if (mToClipView != null) { mToClipView.clipView(null); } mFromPosHolder.clear(); mFromView = null; mFromPos = null; mIsFromUpdated = mIsToUpdated = false; } /** * Adds listener to the set of position updates listeners that will be notified during * any position changes. */ public void addPositionUpdateListener(PositionUpdateListener listener) { mListeners.add(listener); mListenersToRemove.remove(listener); } /** * Removes listener added by {@link #addPositionUpdateListener(PositionUpdateListener)}. * <p/> * Note, this method may be called inside listener's callback without throwing * {@link IndexOutOfBoundsException}. */ public void removePositionUpdateListener(PositionUpdateListener listener) { if (mIteratingListeners) { mListenersToRemove.add(listener); } else { mListeners.remove(listener); } } private void ensurePositionUpdateListenersRemoved() { mListeners.removeAll(mListenersToRemove); mListenersToRemove.clear(); } @SuppressWarnings("unused") // Public API public long getDuration() { return mDuration; } @SuppressWarnings("unused") // Public API public void setDuration(long duration) { mDuration = duration; } /** * @return Current position state within range {@code [0, 1]}, where {@code 0} is for * initial (from) position and {@code 1} is for final (to) position. */ public float getPositionState() { return mPositionState; } /** * @return Whether animator is in leaving state. Means that animation direction is * from final (to) position back to initial (from) position. */ public boolean isLeaving() { return mIsLeaving; } /** * Stops current animation and sets position state to particular values. * <p/> * Note, that once animator reaches {@code state = 0f} and {@code isLeaving = true} * it will cleanup all internal stuff. So you will need to call {@link #enter(View, boolean)} * or {@link #enter(ViewPosition, boolean)} again in order to continue using animator. */ public void setState(@FloatRange(from = 0f, to = 1f) float state, boolean isLeaving, boolean isAnimating) { stopAnimation(); mPositionState = state; mIsLeaving = isLeaving; if (isAnimating) { startAnimationInternal(); } applyPositionState(); } /** * Whether view position animation is in progress or not. */ public boolean isAnimating() { return mIsAnimating; } /** * Starts animation from current position state ({@link #getPositionState()}) and in current * direction ({@link #isLeaving()}). */ private void startAnimationInternal() { stopAnimation(); float durationFraction = mIsLeaving ? mPositionState : 1f - mPositionState; mStateScroller.startScroll(mPositionState, mIsLeaving ? 0f : 1f); mStateScroller.setDuration((long) (mDuration * durationFraction)); mAnimationEngine.start(); onAnimationStarted(); } /** * Stops current animation, if any. */ public void stopAnimation() { mStateScroller.forceFinished(); onAnimationStopped(); } private void applyPositionState() { if (mApplyingPositionState) { // Excluding possible nested calls, scheduling sequential call instead mApplyingPositionStateScheduled = true; return; } mApplyingPositionState = true; // We do not need to update while 'to' view is fully visible or fully closed boolean paused = mIsLeaving ? mPositionState == 0f : mPositionState == 1f; mFromPosHolder.pause(paused); mToPosHolder.pause(paused); // Perform state updates if needed if (!mIsToUpdated) { updateToState(); } if (!mIsFromUpdated) { updateFromState(); } if (GestureDebug.isDebugAnimator()) { Log.d(TAG, "Applying state: " + mPositionState + " / " + mIsLeaving + ", 'to' ready = " + mIsToUpdated + ", 'from' ready = " + mIsFromUpdated); } if (mIsToUpdated && mIsFromUpdated) { State state = mToController.getState(); StateController.interpolate(state, mFromState, mFromPivotX, mFromPivotY, mToState, mToPivotX, mToPivotY, mPositionState); mToController.updateState(); interpolate(mClipRect, mFromClip, mToClip, mPositionState); if (mToClipView != null) { boolean skipClip = mPositionState == 1f || (mPositionState == 0f && mIsLeaving); mToClipView.clipView(skipClip ? null : mClipRect); } } mIteratingListeners = true; for (int i = 0, size = mListeners.size(); i < size; i++) { if (mApplyingPositionStateScheduled) { break; // No need to call listeners anymore } mListeners.get(i).onPositionUpdate(mPositionState, mIsLeaving); } mIteratingListeners = false; ensurePositionUpdateListenersRemoved(); if (mPositionState == 0f && mIsLeaving) { cleanup(); mIsActivated = false; mToController.resetState(); // Switching to initial state } mApplyingPositionState = false; if (mApplyingPositionStateScheduled) { mApplyingPositionStateScheduled = false; applyPositionState(); } } private void onAnimationStarted() { if (mIsAnimating) { return; } mIsAnimating = true; if (GestureDebug.isDebugAnimator()) { Log.d(TAG, "Animation started"); } // Saving bounds restrictions states mOrigRestrictBoundsFlag = mToController.getSettings().isRestrictBounds(); // Disabling bounds restrictions & any gestures mToController.getSettings().setRestrictBounds(false).disableGestures(); // Stopping all currently playing animations mToController.stopAllAnimations(); // Disabling ViewPager scroll if (mToController instanceof GestureControllerForPager) { ((GestureControllerForPager) mToController).disableViewPager(true); } } private void onAnimationStopped() { if (!mIsAnimating) { return; } mIsAnimating = false; if (GestureDebug.isDebugAnimator()) { Log.d(TAG, "Animation stopped"); } // Restoring original settings mToController.getSettings().setRestrictBounds(mOrigRestrictBoundsFlag).enableGestures(); mToController.updateState(); // Enabling ViewPager scroll if (mToController instanceof GestureControllerForPager) { ((GestureControllerForPager) mToController).disableViewPager(false); } } private void resetToState() { if (GestureDebug.isDebugAnimator()) { Log.d(TAG, "State reset internal: " + mToController.getState()); } mToState.set(mToController.getState()); requestUpdateToState(); requestUpdateFromState(); } private void requestUpdateToState() { mIsToUpdated = false; } private void requestUpdateFromState() { mIsFromUpdated = false; } private void updateToState() { if (mIsToUpdated) { return; } Settings settings = mToController == null ? null : mToController.getSettings(); if (mToPos == null || settings == null || !settings.hasImageSize()) { return; } // Computing 'To' clip by getting current 'To' image rect in 'To' view coordinates // (including view paddings which are not part of viewport) mToClip.set(0, 0, settings.getImageW(), settings.getImageH()); mToState.get(TMP_MATRIX); TMP_MATRIX.mapRect(mToClip); mToPivotX = mToClip.centerX(); mToPivotY = mToClip.centerY(); int paddingLeft = mToPos.viewport.left - mToPos.view.left; int paddingTop = mToPos.viewport.top - mToPos.view.top; mToClip.offset(paddingLeft, paddingTop); mIsToUpdated = true; if (GestureDebug.isDebugAnimator()) { Log.d(TAG, "'To' state updated"); } } private void updateFromState() { if (mIsFromUpdated) { return; } Settings settings = mToController == null ? null : mToController.getSettings(); if (mToPos == null || mFromPos == null || settings == null || !settings.hasImageSize()) { return; } // Computing starting zoom level float w = settings.getImageW(); float h = settings.getImageH(); float zoomW = w == 0f ? 1f : mFromPos.image.width() / w; float zoomH = h == 0f ? 1f : mFromPos.image.height() / h; float zoom = Math.max(zoomW, zoomH); // Computing 'From' image in 'To' viewport coordinates. // If 'To' image have different aspect ratio it will be centered within the 'From' image. float x = mFromPos.image.centerX() - 0.5f * w * zoom - mToPos.viewport.left; float y = mFromPos.image.centerY() - 0.5f * h * zoom - mToPos.viewport.top; mFromState.set(x, y, zoom, 0f); mFromPivotX = mFromPos.image.centerX() - mToPos.viewport.left; mFromPivotY = mFromPos.image.centerY() - mToPos.viewport.top; // 'From' clip is a 'From' view rect in coordinates of 'To' view. mFromClip.set(0, 0, mFromPos.view.width(), mFromPos.view.height()); float left = mFromPos.view.left - mToPos.view.left; float top = mFromPos.view.top - mToPos.view.top; mFromClip.offset(left, top); mIsFromUpdated = true; if (GestureDebug.isDebugAnimator()) { Log.d(TAG, "'From' state updated"); } } /** * Interpolates from start rect to the end rect by given factor (from 0 to 1), * storing result into out rect. */ private static void interpolate(RectF out, RectF start, RectF end, float factor) { out.left = StateController.interpolate(start.left, end.left, factor); out.top = StateController.interpolate(start.top, end.top, factor); out.right = StateController.interpolate(start.right, end.right, factor); out.bottom = StateController.interpolate(start.bottom, end.bottom, factor); } private class LocalAnimationEngine extends AnimationEngine { public LocalAnimationEngine(@NonNull View view) { super(view); } @Override public boolean onStep() { if (!mStateScroller.isFinished()) { mStateScroller.computeScroll(); mPositionState = mStateScroller.getCurr(); applyPositionState(); if (mStateScroller.isFinished()) { onAnimationStopped(); } return true; } return false; } } public interface PositionUpdateListener { /** * @param state Position state within range {@code [0, 1]}, where {@code 0} is for * initial (from) position and {@code 1} is for final (to) position. * @param isLeaving {@code false} if transitioning from initial to final position * (entering) or {@code true} for reverse transition. */ void onPositionUpdate(float state, boolean isLeaving); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.el.lang; import java.io.StringReader; import java.lang.reflect.Method; import java.security.AccessController; import java.security.PrivilegedAction; import jakarta.el.ELContext; import jakarta.el.ELException; import jakarta.el.FunctionMapper; import jakarta.el.MethodExpression; import jakarta.el.ValueExpression; import jakarta.el.VariableMapper; import org.apache.el.MethodExpressionImpl; import org.apache.el.MethodExpressionLiteral; import org.apache.el.ValueExpressionImpl; import org.apache.el.parser.AstDeferredExpression; import org.apache.el.parser.AstDynamicExpression; import org.apache.el.parser.AstFunction; import org.apache.el.parser.AstIdentifier; import org.apache.el.parser.AstLiteralExpression; import org.apache.el.parser.AstValue; import org.apache.el.parser.ELParser; import org.apache.el.parser.Node; import org.apache.el.parser.NodeVisitor; import org.apache.el.util.ConcurrentCache; import org.apache.el.util.MessageFactory; /** * @author Jacob Hookom [jacob@hookom.net] */ public final class ExpressionBuilder implements NodeVisitor { private static final SynchronizedStack<ELParser> parserCache = new SynchronizedStack<>(); private static final int CACHE_SIZE; private static final String CACHE_SIZE_PROP = "org.apache.el.ExpressionBuilder.CACHE_SIZE"; static { String cacheSizeStr; if (System.getSecurityManager() == null) { cacheSizeStr = System.getProperty(CACHE_SIZE_PROP, "5000"); } else { cacheSizeStr = AccessController.doPrivileged( (PrivilegedAction<String>) () -> System.getProperty(CACHE_SIZE_PROP, "5000")); } CACHE_SIZE = Integer.parseInt(cacheSizeStr); } private static final ConcurrentCache<String, Node> expressionCache = new ConcurrentCache<>(CACHE_SIZE); private FunctionMapper fnMapper; private VariableMapper varMapper; private final String expression; public ExpressionBuilder(String expression, ELContext ctx) throws ELException { this.expression = expression; FunctionMapper ctxFn = ctx.getFunctionMapper(); VariableMapper ctxVar = ctx.getVariableMapper(); if (ctxFn != null) { this.fnMapper = new FunctionMapperFactory(ctxFn); } if (ctxVar != null) { this.varMapper = new VariableMapperFactory(ctxVar); } } public static final Node createNode(String expr) throws ELException { Node n = createNodeInternal(expr); return n; } private static final Node createNodeInternal(String expr) throws ELException { if (expr == null) { throw new ELException(MessageFactory.get("error.null")); } Node n = expressionCache.get(expr); if (n == null) { ELParser parser = parserCache.pop(); try { if (parser == null) { parser = new ELParser(new StringReader(expr)); } else { parser.ReInit(new StringReader(expr)); } n = parser.CompositeExpression(); // validate composite expression int numChildren = n.jjtGetNumChildren(); if (numChildren == 1) { n = n.jjtGetChild(0); } else { Class<?> type = null; Node child = null; for (int i = 0; i < numChildren; i++) { child = n.jjtGetChild(i); if (child instanceof AstLiteralExpression) { continue; } if (type == null) { type = child.getClass(); } else { if (!type.equals(child.getClass())) { throw new ELException(MessageFactory.get( "error.mixed", expr)); } } } } if (n instanceof AstDeferredExpression || n instanceof AstDynamicExpression) { n = n.jjtGetChild(0); } expressionCache.put(expr, n); } catch (Exception e) { throw new ELException( MessageFactory.get("error.parseFail", expr), e); } finally { if (parser != null) { parserCache.push(parser); } } } return n; } private void prepare(Node node) throws ELException { try { node.accept(this); } catch (Exception e) { if (e instanceof ELException) { throw (ELException) e; } else { throw (new ELException(e)); } } if (this.fnMapper instanceof FunctionMapperFactory) { this.fnMapper = ((FunctionMapperFactory) this.fnMapper).create(); } if (this.varMapper instanceof VariableMapperFactory) { this.varMapper = ((VariableMapperFactory) this.varMapper).create(); } } private Node build() throws ELException { Node n = createNodeInternal(this.expression); this.prepare(n); if (n instanceof AstDeferredExpression || n instanceof AstDynamicExpression) { n = n.jjtGetChild(0); } return n; } /* * (non-Javadoc) * * @see com.sun.el.parser.NodeVisitor#visit(com.sun.el.parser.Node) */ @Override public void visit(Node node) throws ELException { if (node instanceof AstFunction) { AstFunction funcNode = (AstFunction) node; Method m = null; if (this.fnMapper != null) { m = fnMapper.resolveFunction(funcNode.getPrefix(), funcNode .getLocalName()); } // References to variables that refer to lambda expressions will be // parsed as functions. This is handled at runtime but at this point // need to treat it as a variable rather than a function. if (m == null && this.varMapper != null && funcNode.getPrefix().length() == 0) { this.varMapper.resolveVariable(funcNode.getLocalName()); return; } if (this.fnMapper == null) { throw new ELException(MessageFactory.get("error.fnMapper.null")); } if (m == null) { throw new ELException(MessageFactory.get( "error.fnMapper.method", funcNode.getOutputName())); } int methodParameterCount = m.getParameterTypes().length; // AstFunction->MethodParameters->Parameters() int inputParameterCount = node.jjtGetChild(0).jjtGetNumChildren(); if (m.isVarArgs() && inputParameterCount < methodParameterCount - 1 || !m.isVarArgs() && inputParameterCount != methodParameterCount) { throw new ELException(MessageFactory.get( "error.fnMapper.paramcount", funcNode.getOutputName(), "" + methodParameterCount, "" + node.jjtGetChild(0).jjtGetNumChildren())); } } else if (node instanceof AstIdentifier && this.varMapper != null) { String variable = node.getImage(); // simply capture it this.varMapper.resolveVariable(variable); } } public ValueExpression createValueExpression(Class<?> expectedType) throws ELException { Node n = this.build(); return new ValueExpressionImpl(this.expression, n, this.fnMapper, this.varMapper, expectedType); } public MethodExpression createMethodExpression(Class<?> expectedReturnType, Class<?>[] expectedParamTypes) throws ELException { Node n = this.build(); if (!n.isParametersProvided() && expectedParamTypes == null) { throw new NullPointerException(MessageFactory .get("error.method.nullParms")); } if (n instanceof AstValue || n instanceof AstIdentifier) { return new MethodExpressionImpl(expression, n, this.fnMapper, this.varMapper, expectedReturnType, expectedParamTypes); } else if (n instanceof AstLiteralExpression) { return new MethodExpressionLiteral(expression, expectedReturnType, expectedParamTypes); } else { throw new ELException(MessageFactory.get("error.invalidMethodExpression", expression)); } } /* * Copied from org.apache.tomcat.util.collections.SynchronizedStack since * we don't want the EL implementation to depend on the JAR where that * class resides. */ private static class SynchronizedStack<T> { public static final int DEFAULT_SIZE = 128; private static final int DEFAULT_LIMIT = -1; private int size; private final int limit; /* * Points to the next available object in the stack */ private int index = -1; private Object[] stack; public SynchronizedStack() { this(DEFAULT_SIZE, DEFAULT_LIMIT); } public SynchronizedStack(int size, int limit) { this.size = size; this.limit = limit; stack = new Object[size]; } public synchronized boolean push(T obj) { index++; if (index == size) { if (limit == -1 || size < limit) { expand(); } else { index--; return false; } } stack[index] = obj; return true; } @SuppressWarnings("unchecked") public synchronized T pop() { if (index == -1) { return null; } T result = (T) stack[index]; stack[index--] = null; return result; } private void expand() { int newSize = size * 2; if (limit != -1 && newSize > limit) { newSize = limit; } Object[] newStack = new Object[newSize]; System.arraycopy(stack, 0, newStack, 0, size); // This is the only point where garbage is created by throwing away the // old array. Note it is only the array, not the contents, that becomes // garbage. stack = newStack; size = newSize; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.coprocessor; import static org.apache.phoenix.query.QueryConstants.AGG_TIMESTAMP; import static org.apache.phoenix.query.QueryConstants.SINGLE_COLUMN; import static org.apache.phoenix.query.QueryConstants.SINGLE_COLUMN_FAMILY; import static org.apache.phoenix.query.QueryConstants.UNGROUPED_AGG_ROW_KEY; import static org.apache.phoenix.util.ScanUtil.isDummy; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import org.apache.phoenix.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.query.HBaseFactoryProvider; import org.apache.phoenix.util.ByteUtil; import org.apache.phoenix.hbase.index.parallel.Task; import org.apache.phoenix.hbase.index.parallel.TaskBatch; import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr; import org.apache.phoenix.index.GlobalIndexChecker; import org.apache.phoenix.mapreduce.index.IndexTool; import org.apache.phoenix.schema.types.PLong; import org.apache.phoenix.util.PhoenixKeyValueUtil; import org.apache.phoenix.util.ServerUtil; import org.apache.phoenix.thirdparty.com.google.common.collect.Maps; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This is a data table region scanner which scans data table rows locally. From the data table rows, expected * index table mutations are generated. These expected index mutations are used for both rebuilding index table * rows and also verifying them. The HBase client available to region servers are used to update or verify index * table rows. */ public class IndexRebuildRegionScanner extends GlobalIndexRegionScanner { private static final Logger LOGGER = LoggerFactory.getLogger(IndexRebuildRegionScanner.class); private static boolean ignoreIndexRebuildForTesting = false; private static boolean throwExceptionForRebuild = false; public static void setIgnoreIndexRebuildForTesting(boolean ignore) { ignoreIndexRebuildForTesting = ignore; } public static void setThrowExceptionForRebuild(boolean throwException) { throwExceptionForRebuild = throwException; } private int singleRowRebuildReturnCode; @VisibleForTesting public IndexRebuildRegionScanner(final RegionScanner innerScanner, final Region region, final Scan scan, final RegionCoprocessorEnvironment env, final UngroupedAggregateRegionObserver ungroupedAggregateRegionObserver) throws IOException { super(innerScanner, region, scan, env, ungroupedAggregateRegionObserver); indexHTable = hTableFactory.getTable(new ImmutableBytesPtr(indexMaintainer.getIndexTableName())); indexTableTTL = indexHTable.getTableDescriptor().getColumnFamilies()[0].getTimeToLive(); indexRowKeyforReadRepair = scan.getAttribute(BaseScannerRegionObserver.INDEX_ROW_KEY); if (indexRowKeyforReadRepair != null) { setReturnCodeForSingleRowRebuild(); pageSizeInRows = 1; return; } try (org.apache.hadoop.hbase.client.Connection connection = HBaseFactoryProvider.getHConnectionFactory().createConnection(env.getConfiguration())) { regionEndKeys = connection.getRegionLocator(indexHTable.getName()).getEndKeys(); } } private void setReturnCodeForSingleRowRebuild() throws IOException { try (RegionScanner scanner = region.getScanner(scan)) { List<Cell> row = new ArrayList<>(); scanner.next(row); // Check if the data table row we have just scanned matches with the index row key. // If not, there is no need to build the index row from this data table row, // and just return zero row count. if (row.isEmpty()) { singleRowRebuildReturnCode = GlobalIndexChecker.RebuildReturnCode.NO_DATA_ROW.getValue(); } else { Put put = new Put(CellUtil.cloneRow(row.get(0))); for (Cell cell : row) { put.add(cell); } if (checkIndexRow(indexRowKeyforReadRepair, put)) { singleRowRebuildReturnCode = GlobalIndexChecker.RebuildReturnCode.INDEX_ROW_EXISTS.getValue(); } else { singleRowRebuildReturnCode = GlobalIndexChecker.RebuildReturnCode.NO_INDEX_ROW.getValue(); } } } } private boolean checkIndexRow(final byte[] indexRowKey, final Put put) throws IOException { byte[] builtIndexRowKey = getIndexRowKey(indexMaintainer, put); if (Bytes.compareTo(builtIndexRowKey, 0, builtIndexRowKey.length, indexRowKey, 0, indexRowKey.length) != 0) { return false; } return true; } protected void commitBatch(List<Mutation> indexUpdates) throws IOException, InterruptedException { ungroupedAggregateRegionObserver.checkForRegionClosingOrSplitting(); indexHTable.batch(indexUpdates, null); } protected void rebuildIndexRows(Map<byte[], List<Mutation>> indexMutationMap, List<Mutation> indexRowsToBeDeleted, IndexToolVerificationResult verificationResult) throws IOException { if (ignoreIndexRebuildForTesting) { return; } if (throwExceptionForRebuild) { throw new IOException("Exception for testing. Something happened"); } updateIndexRows(indexMutationMap, indexRowsToBeDeleted, verificationResult); } private Map<byte[], List<Mutation>> populateActualIndexMutationMap(Map<byte[], List<Mutation>> expectedIndexMutationMap) throws IOException { Map<byte[], List<Mutation>> actualIndexMutationMap = Maps.newTreeMap(Bytes.BYTES_COMPARATOR); Scan indexScan = prepareIndexScan(expectedIndexMutationMap); try (ResultScanner resultScanner = indexHTable.getScanner(indexScan)) { for (Result result = resultScanner.next(); (result != null); result = resultScanner.next()) { if (!isRawFilterSupported && !expectedIndexMutationMap.containsKey(result.getRow())) { continue; } ungroupedAggregateRegionObserver.checkForRegionClosingOrSplitting(); List<Mutation> mutationList = prepareActualIndexMutations(result); actualIndexMutationMap.put(result.getRow(), mutationList); } } catch (Throwable t) { ServerUtil.throwIOException(indexHTable.getName().toString(), t); } return actualIndexMutationMap; } private void rebuildAndOrVerifyIndexRows(Map<byte[], List<Mutation>> expectedIndexMutationMap, Set<byte[]> mostRecentIndexRowKeys, IndexToolVerificationResult verificationResult) throws IOException { List<Mutation> indexRowsToBeDeleted = new ArrayList<>(); if (verifyType == IndexTool.IndexVerifyType.NONE) { rebuildIndexRows(expectedIndexMutationMap, indexRowsToBeDeleted, verificationResult); return; } if (verifyType == IndexTool.IndexVerifyType.ONLY) { Map<byte[], List<Mutation>> actualIndexMutationMap = populateActualIndexMutationMap(expectedIndexMutationMap); verifyIndexRows(actualIndexMutationMap, expectedIndexMutationMap, mostRecentIndexRowKeys, Collections.EMPTY_LIST, verificationResult.getBefore(), true); return; } if (verifyType == IndexTool.IndexVerifyType.BEFORE) { Map<byte[], List<Mutation>> actualIndexMutationMap = populateActualIndexMutationMap(expectedIndexMutationMap); verifyIndexRows(actualIndexMutationMap, expectedIndexMutationMap, mostRecentIndexRowKeys, indexRowsToBeDeleted, verificationResult.getBefore(), true); if (!expectedIndexMutationMap.isEmpty() || !indexRowsToBeDeleted.isEmpty()) { rebuildIndexRows(expectedIndexMutationMap, indexRowsToBeDeleted, verificationResult); } return; } if (verifyType == IndexTool.IndexVerifyType.AFTER) { rebuildIndexRows(expectedIndexMutationMap, Collections.EMPTY_LIST, verificationResult); Map<byte[], List<Mutation>> actualIndexMutationMap = populateActualIndexMutationMap(expectedIndexMutationMap); verifyIndexRows(actualIndexMutationMap, expectedIndexMutationMap, mostRecentIndexRowKeys, Collections.EMPTY_LIST, verificationResult.getAfter(), false); return; } if (verifyType == IndexTool.IndexVerifyType.BOTH) { Map<byte[], List<Mutation>> actualIndexMutationMap = populateActualIndexMutationMap(expectedIndexMutationMap); verifyIndexRows(actualIndexMutationMap,expectedIndexMutationMap, mostRecentIndexRowKeys, indexRowsToBeDeleted, verificationResult.getBefore(), true); if (!expectedIndexMutationMap.isEmpty() || !indexRowsToBeDeleted.isEmpty()) { rebuildIndexRows(expectedIndexMutationMap, indexRowsToBeDeleted, verificationResult); } if (!expectedIndexMutationMap.isEmpty()) { actualIndexMutationMap = populateActualIndexMutationMap(expectedIndexMutationMap); verifyIndexRows(actualIndexMutationMap, expectedIndexMutationMap, mostRecentIndexRowKeys, Collections.EMPTY_LIST, verificationResult.getAfter(), false); } } } private void addRebuildAndOrVerifyTask(TaskBatch<Boolean> tasks, final Map<byte[], List<Mutation>> indexMutationMap, final Set<byte[]> mostRecentIndexRowKeys, final IndexToolVerificationResult verificationResult) { tasks.add(new Task<Boolean>() { @Override public Boolean call() throws Exception { try { //in HBase 1.x we could check if the coproc environment was closed or aborted, //but in HBase 2.x the coproc environment can't check region server services if (Thread.currentThread().isInterrupted()) { exceptionMessage = "Pool closed, not attempting to rebuild and/or verify index rows! " + indexHTable.getName(); throw new IOException(exceptionMessage); } rebuildAndOrVerifyIndexRows(indexMutationMap, mostRecentIndexRowKeys, verificationResult); } catch (Exception e) { throw e; } return Boolean.TRUE; } }); } public static List<Map<byte[], List<Mutation>>> getPerTaskIndexMutationMaps( TreeMap<byte[], List<Mutation>> indexMutationMap, byte[][] endKeys, int maxMapSize) { List<Map<byte[], List<Mutation>>> mapList = new ArrayList<>(); int regionCount = endKeys.length; int regionIndex = 0; byte[] indexKey = indexMutationMap.firstKey(); Map<byte[], List<Mutation>> perTaskIndexMutationMap = Maps.newTreeMap(Bytes.BYTES_COMPARATOR); mapList.add(perTaskIndexMutationMap); // Find the region including the first index key while (regionIndex < regionCount - 1 && Bytes.BYTES_COMPARATOR.compare(indexKey, endKeys[regionIndex]) > 0) { regionIndex++; } for (Map.Entry<byte[], List<Mutation>> entry: indexMutationMap.entrySet()) { indexKey = entry.getKey(); if (perTaskIndexMutationMap.size() == maxMapSize || (regionIndex < regionCount - 1 && Bytes.BYTES_COMPARATOR.compare(indexKey, endKeys[regionIndex]) > 0)) { perTaskIndexMutationMap = Maps.newTreeMap(Bytes.BYTES_COMPARATOR); mapList.add(perTaskIndexMutationMap); // Find the region including indexKey while (regionIndex < regionCount - 1 && Bytes.BYTES_COMPARATOR.compare(indexKey, endKeys[regionIndex]) > 0) { regionIndex++; } } perTaskIndexMutationMap.put(indexKey, entry.getValue()); } return mapList; } private void verifyAndOrRebuildIndex(Map<byte[], List<Mutation>> indexMutationMap, Set<byte[]> mostRecentIndexRowKeys) throws IOException { if (indexMutationMap.size() == 0) { return; } List<Map<byte[], List<Mutation>>> mapList = getPerTaskIndexMutationMaps((TreeMap)indexMutationMap, regionEndKeys, rowCountPerTask); int taskCount = mapList.size(); TaskBatch<Boolean> tasks = new TaskBatch<>(taskCount); List<IndexToolVerificationResult> verificationResultList = new ArrayList<>(taskCount); for (int i = 0; i < taskCount; i++) { IndexToolVerificationResult perTaskVerificationResult = new IndexToolVerificationResult(scan); verificationResultList.add(perTaskVerificationResult); addRebuildAndOrVerifyTask(tasks, mapList.get(i), mostRecentIndexRowKeys, perTaskVerificationResult); } submitTasks(tasks); if (verify) { for (IndexToolVerificationResult result : verificationResultList) { verificationResult.add(result); } } } @Override public boolean next(List<Cell> results) throws IOException { if (indexRowKeyforReadRepair != null && singleRowRebuildReturnCode == GlobalIndexChecker.RebuildReturnCode.NO_DATA_ROW.getValue()) { byte[] rowCountBytes = PLong.INSTANCE.toBytes(Long.valueOf(singleRowRebuildReturnCode)); final Cell aggKeyValue = PhoenixKeyValueUtil.newKeyValue(UNGROUPED_AGG_ROW_KEY, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, AGG_TIMESTAMP, rowCountBytes, 0, rowCountBytes.length); results.add(aggKeyValue); return false; } Map<byte[], List<Mutation>> indexMutationMap = Maps.newTreeMap(Bytes.BYTES_COMPARATOR); Set<byte[]> mostRecentIndexRowKeys = new TreeSet<>(Bytes.BYTES_COMPARATOR); Cell lastCell = null; int dataRowCount = 0; int indexMutationCount = 0; region.startRegionOperation(); RegionScanner localScanner = null; try { localScanner = getLocalScanner(); if (localScanner == null) { return false; } synchronized (localScanner) { if (!shouldVerify()) { skipped = true; return false; } do { /* If region is closing and there are large number of rows being verified/rebuilt with IndexTool, not having this check will impact/delay the region closing -- affecting the availability as this method holds the read lock on the region. * */ ungroupedAggregateRegionObserver.checkForRegionClosingOrSplitting(); List<Cell> row = new ArrayList<>(); hasMore = localScanner.nextRaw(row); if (!row.isEmpty()) { lastCell = row.get(0); // lastCell is any cell from the last visited row if (isDummy(row)) { break; } Put put = null; Delete del = null; for (Cell cell : row) { if (cell.getType().equals(Cell.Type.Put)) { if (familyMap != null && !isColumnIncluded(cell)) { continue; } if (put == null) { put = new Put(CellUtil.cloneRow(cell)); } put.add(cell); } else { if (del == null) { del = new Delete(CellUtil.cloneRow(cell)); } del.add(cell); } } if (put == null && del == null) { continue; } indexMutationCount += prepareIndexMutations(put, del, indexMutationMap, mostRecentIndexRowKeys); dataRowCount++; } } while (hasMore && indexMutationCount < pageSizeInRows && dataRowCount < pageSizeInRows); if (!indexMutationMap.isEmpty()) { if (indexRowKeyforReadRepair != null) { rebuildIndexRows(indexMutationMap, Collections.EMPTY_LIST, verificationResult); } else { verifyAndOrRebuildIndex(indexMutationMap, mostRecentIndexRowKeys); } } if (verify) { verificationResult.setScannedDataRowCount(verificationResult.getScannedDataRowCount() + dataRowCount); } } } catch (Throwable e) { LOGGER.error("Exception in IndexRebuildRegionScanner for region " + region.getRegionInfo().getRegionNameAsString(), e); this.shouldRetry = true; throw e; } finally { region.closeRegionOperation(); if (localScanner!=null && localScanner!=innerScanner) { localScanner.close(); } } if (indexRowKeyforReadRepair != null) { dataRowCount = singleRowRebuildReturnCode; } if (minTimestamp != 0) { nextStartKey = ByteUtil.calculateTheClosestNextRowKeyForPrefix(CellUtil.cloneRow(lastCell)); } byte[] rowCountBytes = PLong.INSTANCE.toBytes(Long.valueOf(dataRowCount)); final Cell aggKeyValue; if (lastCell == null) { aggKeyValue = PhoenixKeyValueUtil.newKeyValue(UNGROUPED_AGG_ROW_KEY, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, AGG_TIMESTAMP, rowCountBytes, 0, rowCountBytes.length); } else { aggKeyValue = PhoenixKeyValueUtil.newKeyValue(CellUtil.cloneRow(lastCell), SINGLE_COLUMN_FAMILY, SINGLE_COLUMN, AGG_TIMESTAMP, rowCountBytes, 0, rowCountBytes.length); } results.add(aggKeyValue); return hasMore || hasMoreIncr; } }
/* * Copyright (c) 2009, PostgreSQL Global Development Group * See the LICENSE file in the project root for more information. */ package org.postgresql.test.xa; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.junit.Assume.assumeTrue; import org.postgresql.test.TestUtil; import org.postgresql.test.jdbc2.optional.BaseDataSourceTest; import org.postgresql.xa.PGXADataSource; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Arrays; import java.util.Random; import javax.sql.XAConnection; import javax.sql.XADataSource; import javax.transaction.xa.XAException; import javax.transaction.xa.XAResource; import javax.transaction.xa.Xid; public class XADataSourceTest { private XADataSource xaDs; private Connection dbConn; private boolean connIsSuper; private XAConnection xaconn; private XAResource xaRes; private Connection conn; public XADataSourceTest() { xaDs = new PGXADataSource(); BaseDataSourceTest.setupDataSource((PGXADataSource) xaDs); } @Before public void setUp() throws Exception { dbConn = TestUtil.openDB(); assumeTrue(isPreparedTransactionEnabled(dbConn)); // Check if we're operating as a superuser; some tests require it. Statement st = dbConn.createStatement(); st.executeQuery("SHOW is_superuser;"); ResultSet rs = st.getResultSet(); rs.next(); // One row is guaranteed connIsSuper = rs.getBoolean(1); // One col is guaranteed st.close(); TestUtil.createTable(dbConn, "testxa1", "foo int"); TestUtil.createTable(dbConn, "testxa2", "foo int primary key"); TestUtil.createTable(dbConn, "testxa3", "foo int references testxa2(foo) deferrable"); clearAllPrepared(); xaconn = xaDs.getXAConnection(); xaRes = xaconn.getXAResource(); conn = xaconn.getConnection(); } private static boolean isPreparedTransactionEnabled(Connection connection) throws SQLException { Statement stmt = connection.createStatement(); ResultSet rs = stmt.executeQuery("SHOW max_prepared_transactions"); rs.next(); int mpt = rs.getInt(1); rs.close(); stmt.close(); return mpt > 0; } @After public void tearDown() throws SQLException { try { xaconn.close(); } catch (Exception ignored) { } clearAllPrepared(); TestUtil.dropTable(dbConn, "testxa3"); TestUtil.dropTable(dbConn, "testxa2"); TestUtil.dropTable(dbConn, "testxa1"); TestUtil.closeDB(dbConn); } private void clearAllPrepared() throws SQLException { Statement st = dbConn.createStatement(); try { ResultSet rs = st.executeQuery( "SELECT x.gid, x.owner = current_user " + "FROM pg_prepared_xacts x " + "WHERE x.database = current_database()"); Statement st2 = dbConn.createStatement(); while (rs.next()) { // TODO: This should really use org.junit.Assume once we move to JUnit 4 assertTrue("Only prepared xacts owned by current user may be present in db", rs.getBoolean(2)); st2.executeUpdate("ROLLBACK PREPARED '" + rs.getString(1) + "'"); } st2.close(); } finally { st.close(); } } static class CustomXid implements Xid { private static Random rand = new Random(System.currentTimeMillis()); byte[] gtrid = new byte[Xid.MAXGTRIDSIZE]; byte[] bqual = new byte[Xid.MAXBQUALSIZE]; CustomXid(int i) { rand.nextBytes(gtrid); gtrid[0] = (byte) i; gtrid[1] = (byte) i; gtrid[2] = (byte) i; gtrid[3] = (byte) i; gtrid[4] = (byte) i; bqual[0] = 4; bqual[1] = 5; bqual[2] = 6; } @Override public int getFormatId() { return 0; } @Override public byte[] getGlobalTransactionId() { return gtrid; } @Override public byte[] getBranchQualifier() { return bqual; } @Override public boolean equals(Object o) { if (!(o instanceof Xid)) { return false; } Xid other = (Xid) o; if (other.getFormatId() != this.getFormatId()) { return false; } if (!Arrays.equals(other.getBranchQualifier(), this.getBranchQualifier())) { return false; } if (!Arrays.equals(other.getGlobalTransactionId(), this.getGlobalTransactionId())) { return false; } return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + Arrays.hashCode(getBranchQualifier()); result = prime * result + getFormatId(); result = prime * result + Arrays.hashCode(getGlobalTransactionId()); return result; } } /* * Check that the equals method works for the connection wrapper returned by * PGXAConnection.getConnection(). */ @Test public void testWrapperEquals() throws Exception { assertTrue("Wrappers should be equal", conn.equals(conn)); assertFalse("Wrapper should be unequal to null", conn.equals(null)); assertFalse("Wrapper should be unequal to unrelated object", conn.equals("dummy string object")); } @Test public void testOnePhase() throws Exception { Xid xid = new CustomXid(1); xaRes.start(xid, XAResource.TMNOFLAGS); conn.createStatement().executeQuery("SELECT * FROM testxa1"); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.commit(xid, true); } @Test public void testTwoPhaseCommit() throws Exception { Xid xid = new CustomXid(1); xaRes.start(xid, XAResource.TMNOFLAGS); conn.createStatement().executeQuery("SELECT * FROM testxa1"); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); xaRes.commit(xid, false); } @Test public void testCloseBeforeCommit() throws Exception { Xid xid = new CustomXid(5); xaRes.start(xid, XAResource.TMNOFLAGS); assertEquals(1, conn.createStatement().executeUpdate("INSERT INTO testxa1 VALUES (1)")); conn.close(); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.commit(xid, true); ResultSet rs = dbConn.createStatement().executeQuery("SELECT foo FROM testxa1"); assertTrue(rs.next()); assertEquals(1, rs.getInt(1)); } @Test public void testRecover() throws Exception { Xid xid = new CustomXid(12345); xaRes.start(xid, XAResource.TMNOFLAGS); conn.createStatement().executeQuery("SELECT * FROM testxa1"); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); { Xid[] recoveredXidArray = xaRes.recover(XAResource.TMSTARTRSCAN); boolean recoveredXid = false; for (Xid aRecoveredXidArray : recoveredXidArray) { if (xid.equals(aRecoveredXidArray)) { recoveredXid = true; break; } } assertTrue("Did not recover prepared xid", recoveredXid); assertEquals(0, xaRes.recover(XAResource.TMNOFLAGS).length); } xaRes.rollback(xid); { Xid[] recoveredXidArray = xaRes.recover(XAResource.TMSTARTRSCAN); boolean recoveredXid = false; for (Xid aRecoveredXidArray : recoveredXidArray) { if (xaRes.equals(aRecoveredXidArray)) { recoveredXid = true; break; } } assertFalse("Recovered rolled back xid", recoveredXid); } } @Test public void testRollback() throws XAException { Xid xid = new CustomXid(3); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); xaRes.rollback(xid); } @Test public void testRollbackWithoutPrepare() throws XAException { Xid xid = new CustomXid(4); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.rollback(xid); } @Test public void testAutoCommit() throws Exception { Xid xid = new CustomXid(6); // When not in an XA transaction, autocommit should be true // per normal JDBC rules. assertTrue(conn.getAutoCommit()); // When in an XA transaction, autocommit should be false xaRes.start(xid, XAResource.TMNOFLAGS); assertFalse(conn.getAutoCommit()); xaRes.end(xid, XAResource.TMSUCCESS); assertFalse(conn.getAutoCommit()); xaRes.commit(xid, true); assertTrue(conn.getAutoCommit()); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); assertTrue(conn.getAutoCommit()); xaRes.commit(xid, false); assertTrue(conn.getAutoCommit()); // Check that autocommit is reset to true after a 1-phase rollback xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.rollback(xid); assertTrue(conn.getAutoCommit()); // Check that autocommit is reset to true after a 2-phase rollback xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); xaRes.rollback(xid); assertTrue(conn.getAutoCommit()); // Check that autoCommit is set correctly after a getConnection-call conn = xaconn.getConnection(); assertTrue(conn.getAutoCommit()); xaRes.start(xid, XAResource.TMNOFLAGS); conn.createStatement().executeQuery("SELECT * FROM testxa1"); java.sql.Timestamp ts1 = getTransactionTimestamp(conn); conn.close(); conn = xaconn.getConnection(); assertFalse(conn.getAutoCommit()); java.sql.Timestamp ts2 = getTransactionTimestamp(conn); /* * Check that we're still in the same transaction. close+getConnection() should not rollback the * XA-transaction implicitly. */ assertEquals(ts1, ts2); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); xaRes.rollback(xid); assertTrue(conn.getAutoCommit()); } /** * <p>Get the time the current transaction was started from the server.</p> * * <p>This can be used to check that transaction doesn't get committed/ rolled back inadvertently, by * calling this once before and after the suspected piece of code, and check that they match. It's * a bit iffy, conceivably you might get the same timestamp anyway if the suspected piece of code * runs fast enough, and/or the server clock is very coarse grained. But it'll do for testing * purposes.</p> */ private static java.sql.Timestamp getTransactionTimestamp(Connection conn) throws SQLException { ResultSet rs = conn.createStatement().executeQuery("SELECT now()"); rs.next(); return rs.getTimestamp(1); } @Test public void testEndThenJoin() throws XAException { Xid xid = new CustomXid(5); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.start(xid, XAResource.TMJOIN); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.commit(xid, true); } @Test public void testRestoreOfAutoCommit() throws Exception { conn.setAutoCommit(false); Xid xid = new CustomXid(14); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.commit(xid, true); assertFalse( "XaResource should have restored connection autocommit mode after commit or rollback to the initial state.", conn.getAutoCommit()); // Test true case conn.setAutoCommit(true); xid = new CustomXid(15); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.commit(xid, true); assertTrue( "XaResource should have restored connection autocommit mode after commit or rollback to the initial state.", conn.getAutoCommit()); } @Test public void testRestoreOfAutoCommitEndThenJoin() throws Exception { // Test with TMJOIN conn.setAutoCommit(true); Xid xid = new CustomXid(16); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.start(xid, XAResource.TMJOIN); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.commit(xid, true); assertTrue( "XaResource should have restored connection autocommit mode after start(TMNOFLAGS) end() start(TMJOIN) and then commit or rollback to the initial state.", conn.getAutoCommit()); } /** * Test how the driver responds to rolling back a transaction that has already been rolled back. * Check the driver reports the xid does not exist. The db knows the fact. ERROR: prepared * transaction with identifier "blah" does not exist */ @Test public void testRepeatedRolledBack() throws Exception { Xid xid = new CustomXid(654321); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); // tm crash xaRes.recover(XAResource.TMSTARTRSCAN); xaRes.rollback(xid); try { xaRes.rollback(xid); fail("Rollback was successful"); } catch (XAException xae) { assertEquals("Checking the errorCode is XAER_NOTA indicating the " + "xid does not exist.", xae.errorCode, XAException.XAER_NOTA); } } /** * Invoking prepare on already prepared {@link Xid} causes {@link XAException} being thrown * with error code {@link XAException#XAER_PROTO}. */ @Test public void testPreparingPreparedXid() throws Exception { Xid xid = new CustomXid(1); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); try { xaRes.prepare(xid); fail("Prepare is expected to fail with XAER_PROTO as xid was already prepared"); } catch (XAException xae) { assertEquals("Prepare call on already prepared xid " + xid + " expects XAER_PROTO", XAException.XAER_PROTO, xae.errorCode); } finally { xaRes.rollback(xid); } } /** * Invoking commit on already committed {@link Xid} causes {@link XAException} being thrown * with error code {@link XAException#XAER_NOTA}. */ @Test public void testCommitingCommittedXid() throws Exception { Xid xid = new CustomXid(1); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); xaRes.commit(xid, false); try { xaRes.commit(xid, false); fail("Commit is expected to fail with XAER_NOTA as xid was already committed"); } catch (XAException xae) { assertEquals("Commit call on already committed xid " + xid + " expects XAER_NOTA", XAException.XAER_NOTA, xae.errorCode); } } /** * Invoking commit on {@link Xid} committed by different connection. * That different connection could be for example transaction manager recovery. */ @Test public void testCommitByDifferentConnection() throws Exception { Xid xid = new CustomXid(1); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); XADataSource secondDs = null; try { secondDs = new PGXADataSource(); BaseDataSourceTest.setupDataSource((PGXADataSource) secondDs); XAResource secondXaRes = secondDs.getXAConnection().getXAResource(); secondXaRes.recover(XAResource.TMSTARTRSCAN | XAResource.TMENDRSCAN); secondXaRes.commit(xid, false); } finally { if (secondDs != null) { secondDs.getXAConnection().close(); } } try { xaRes.commit(xid, false); fail("Commit is expected to fail with XAER_RMERR as somebody else already committed"); } catch (XAException xae) { assertEquals("Commit call on already committed xid " + xid + " expects XAER_RMERR", XAException.XAER_RMERR, xae.errorCode); } } /** * Invoking rollback on {@link Xid} rolled-back by different connection. * That different connection could be for example transaction manager recovery. */ @Test public void testRollbackByDifferentConnection() throws Exception { Xid xid = new CustomXid(1); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); XADataSource secondDs = null; try { secondDs = new PGXADataSource(); BaseDataSourceTest.setupDataSource((PGXADataSource) secondDs); XAResource secondXaRes = secondDs.getXAConnection().getXAResource(); secondXaRes.recover(XAResource.TMSTARTRSCAN | XAResource.TMENDRSCAN); secondXaRes.rollback(xid); } finally { if (secondDs != null) { secondDs.getXAConnection().close(); } } try { xaRes.rollback(xid); fail("Rollback is expected to fail with XAER_RMERR as somebody else already rolled-back"); } catch (XAException xae) { assertEquals("Rollback call on already rolled-back xid " + xid + " expects XAER_RMERR", XAException.XAER_RMERR, xae.errorCode); } } /** * One-phase commit of prepared {@link Xid} should throw exception. */ @Test public void testOnePhaseCommitOfPrepared() throws Exception { Xid xid = new CustomXid(1); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); try { xaRes.commit(xid, true); fail("One-phase commit is expected to fail with XAER_PROTO when called on prepared xid"); } catch (XAException xae) { assertEquals("One-phase commit of prepared xid " + xid + " expects XAER_PROTO", XAException.XAER_PROTO, xae.errorCode); } } /** * Invoking one-phase commit on already one-phase committed {@link Xid} causes * {@link XAException} being thrown with error code {@link XAException#XAER_NOTA}. */ @Test public void testOnePhaseCommitingCommittedXid() throws Exception { Xid xid = new CustomXid(1); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.commit(xid, true); try { xaRes.commit(xid, true); fail("One-phase commit is expected to fail with XAER_NOTA as xid was already committed"); } catch (XAException xae) { assertEquals("One-phase commit call on already committed xid " + xid + " expects XAER_NOTA", XAException.XAER_NOTA, xae.errorCode); } } /** * When unknown xid is tried to be prepared the expected {@link XAException#errorCode} * is {@link XAException#XAER_NOTA}. */ @Test public void testPrepareUnknownXid() throws Exception { Xid xid = new CustomXid(1); try { xaRes.prepare(xid); fail("Prepare is expected to fail with XAER_NOTA as used unknown xid"); } catch (XAException xae) { assertEquals("Prepare call on unknown xid " + xid + " expects XAER_NOTA", XAException.XAER_NOTA, xae.errorCode); } } /** * When unknown xid is tried to be committed the expected {@link XAException#errorCode} * is {@link XAException#XAER_NOTA}. */ @Test public void testCommitUnknownXid() throws Exception { Xid xid = new CustomXid(1); Xid unknownXid = new CustomXid(42); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); try { xaRes.commit(unknownXid, false); fail("Commit is expected to fail with XAER_NOTA as used unknown xid"); } catch (XAException xae) { assertEquals("Commit call on unknown xid " + unknownXid + " expects XAER_NOTA", XAException.XAER_NOTA, xae.errorCode); } finally { xaRes.rollback(xid); } } /** * When unknown xid is tried to be committed with one-phase commit optimization * the expected {@link XAException#errorCode} is {@link XAException#XAER_NOTA}. */ @Test public void testOnePhaseCommitUnknownXid() throws Exception { Xid xid = new CustomXid(1); Xid unknownXid = new CustomXid(42); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); try { xaRes.commit(unknownXid, true); fail("One-phase commit is expected to fail with XAER_NOTA as used unknown xid"); } catch (XAException xae) { assertEquals("Commit call on unknown xid " + unknownXid + " expects XAER_NOTA", XAException.XAER_NOTA, xae.errorCode); } finally { xaRes.rollback(xid); } } /** * When unknown xid is tried to be rolled-back the expected {@link XAException#errorCode} * is {@link XAException#XAER_NOTA}. */ @Test public void testRollbackUnknownXid() throws Exception { Xid xid = new CustomXid(1); Xid unknownXid = new CustomXid(42); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); try { xaRes.rollback(unknownXid); fail("Rollback is expected to fail as used unknown xid"); } catch (XAException xae) { assertEquals("Commit call on unknown xid " + unknownXid + " expects XAER_NOTA", XAException.XAER_NOTA, xae.errorCode); } finally { xaRes.rollback(xid); } } /** * When trying to commit xid which was already removed by arbitrary action of database. * Resource manager can't expect state of the {@link Xid}. */ @Test public void testDatabaseRemovesPreparedBeforeCommit() throws Exception { Xid xid = new CustomXid(1); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); clearAllPrepared(); try { xaRes.commit(xid, false); fail("Commit is expected to fail as committed xid was removed before"); } catch (XAException xae) { assertEquals("Commit call on xid " + xid + " not known to DB expects XAER_RMERR", XAException.XAER_RMERR, xae.errorCode); } } /** * When trying to rollback xid which was already removed by arbitrary action of database. * Resource manager can't expect state of the {@link Xid}. */ @Test public void testDatabaseRemovesPreparedBeforeRollback() throws Exception { Xid xid = new CustomXid(1); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); clearAllPrepared(); try { xaRes.rollback(xid); fail("Rollback is expected to fail as committed xid was removed before"); } catch (XAException xae) { assertEquals("Rollback call on xid " + xid + " not known to DB expects XAER_RMERR", XAException.XAER_RMERR, xae.errorCode); } } /** * When trying to commit and connection issue happens then * {@link XAException} error code {@link XAException#XAER_RMFAIL} is expected. */ @Test public void testNetworkIssueOnCommit() throws Exception { Xid xid = new CustomXid(1); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); xaconn.close(); try { xaRes.commit(xid, false); fail("Commit is expected to fail as connection was closed"); } catch (XAException xae) { assertEquals("Commit call on closed connection expects XAER_RMFAIL", XAException.XAER_RMFAIL, xae.errorCode); } } /** * When trying to one-phase commit and connection issue happens then * {@link XAException} error code {@link XAException#XAER_RMFAIL} is expected. */ @Test public void testNetworkIssueOnOnePhaseCommit() throws Exception { Xid xid = new CustomXid(1); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaconn.close(); try { xaRes.commit(xid, true); fail("One-phase commit is expected to fail as connection was closed"); } catch (XAException xae) { assertEquals("One-phase commit call on closed connection expects XAER_RMFAIL", XAException.XAER_RMFAIL, xae.errorCode); } } /** * When trying to rollback and connection issue happens then * {@link XAException} error code {@link XAException#XAER_RMFAIL} is expected. */ @Test public void testNetworkIssueOnRollback() throws Exception { Xid xid = new CustomXid(1); xaRes.start(xid, XAResource.TMNOFLAGS); xaRes.end(xid, XAResource.TMSUCCESS); xaRes.prepare(xid); xaconn.close(); try { xaRes.rollback(xid); fail("Rollback is expected to fail as connection was closed"); } catch (XAException xae) { assertEquals("Rollback call on closed connection expects XAER_RMFAIL", XAException.XAER_RMFAIL, xae.errorCode); } } /** * When using deferred constraints a contraint violation can occur on prepare. This has to be * mapped to the correct XA Error Code */ @Test public void testMappingOfConstraintViolations() throws Exception { Xid xid = new CustomXid(1); xaRes.start(xid, XAResource.TMNOFLAGS); assertEquals(0, conn.createStatement().executeUpdate("SET CONSTRAINTS ALL DEFERRED")); assertEquals(1, conn.createStatement().executeUpdate("INSERT INTO testxa3 VALUES (4)")); xaRes.end(xid, XAResource.TMSUCCESS); try { xaRes.prepare(xid); fail("Prepare is expected to fail as an integrity violation occurred"); } catch (XAException xae) { assertEquals("Prepare call with deferred constraints violations expects XA_RBINTEGRITY", XAException.XA_RBINTEGRITY, xae.errorCode); } } /* * We don't support transaction interleaving. public void testInterleaving1() throws Exception { * Xid xid1 = new CustomXid(1); Xid xid2 = new CustomXid(2); * * xaRes.start(xid1, XAResource.TMNOFLAGS); conn.createStatement().executeUpdate( * "UPDATE testxa1 SET foo = 'ccc'"); xaRes.end(xid1, XAResource.TMSUCCESS); * * xaRes.start(xid2, XAResource.TMNOFLAGS); conn.createStatement().executeUpdate( * "UPDATE testxa2 SET foo = 'bbb'"); * * xaRes.commit(xid1, true); * * xaRes.end(xid2, XAResource.TMSUCCESS); * * xaRes.commit(xid2, true); * * } public void testInterleaving2() throws Exception { Xid xid1 = new CustomXid(1); Xid xid2 = * new CustomXid(2); Xid xid3 = new CustomXid(3); * * xaRes.start(xid1, XAResource.TMNOFLAGS); conn.createStatement().executeUpdate( * "UPDATE testxa1 SET foo = 'aa'"); xaRes.end(xid1, XAResource.TMSUCCESS); * * xaRes.start(xid2, XAResource.TMNOFLAGS); conn.createStatement().executeUpdate( * "UPDATE testxa2 SET foo = 'bb'"); xaRes.end(xid2, XAResource.TMSUCCESS); * * xaRes.start(xid3, XAResource.TMNOFLAGS); conn.createStatement().executeUpdate( * "UPDATE testxa3 SET foo = 'cc'"); xaRes.end(xid3, XAResource.TMSUCCESS); * * xaRes.commit(xid1, true); xaRes.commit(xid2, true); xaRes.commit(xid3, true); } */ }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package edu.wpi.first.wpilibj.templates.subsystems; import edu.wpi.first.wpilibj.Servo; import edu.wpi.first.wpilibj.command.Subsystem; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; import edu.wpi.first.wpilibj.templates.FrameMath; /** * * @author Brinton */ public abstract class Rod extends Subsystem { /** @param dTapeAngle angle of tape to frame in degrees while climbing * returned by getClimbTapeAngle */ protected float dTapeAngle; /** * Servo travels 360 degrees in 72 seconds 50 degrees per second or 1 degree * per minute The angular range is about 270 degrees as servo input goes * from 0 to 1 which translates into about 0.0037 servo units per period. * This is a maximum velocity of 50 degrees per second, way too fast. * Propose slowing it down by a factor of 4, so the incremental adjustment * is about .0009. A reasonable error value is 1 degree say about 0.005 PID * is possible, if the setpoint is not dynamic, but unnecessary, I think * * @param maxServoVelocity is maximum servo velocity per above */ protected float maxServoVelocity; protected float servoError; protected float servoErrorMultiplier; int pulleyNumber; Servo servo; SmartDashboard smartdashboard; private int count; private int setCount; /** * this is a constructor * Initialzes */ public Rod(String name) { super(name); maxServoVelocity = .006f ; servoError = .005f; servoErrorMultiplier=1.2f; count=0; setCount=0; } /** * * @param flr true if climbing from floor */ public void adjustAngleClimbing(boolean flr) { //get tapelength from Pulley boolean floor = flr; float currTapeLen = (float)this.getTapeLength(); float dTpAn = FrameMath.getClimbTapeAngle(floor, currTapeLen, pulleyNumber); float rTpAn=(float)Math.toRadians(dTpAn); float dServVal =FrameMath.calcServoFromAngle(false,(float)rTpAn,(float)currTapeLen, pulleyNumber); // Sets the servo by position given the current tape length this.setRodServoValue(dServVal); } /** * * @param serVeloc a number from zero to 1 representing fraction of maximum * servo velocity. Gives value of incremental change in servo value per cycle * @param dTaAn goal angle of tape to frame in degrees */ public void setRodAngleFree(float serVeloc, float dTaAn) { //need current frameangle and tapelength for next calculations float currTapeLen =this.getTapeLength(); // calculates servo value to achieve target angle at current tape length float dServVal = FrameMath.calcServoFromAngle(true, (float)Math.toRadians(dTaAn), currTapeLen,pulleyNumber); // makes a small incremental change to servo value chasing goal angle setRodServoVelocity(serVeloc, dServVal, servoError); } /** * This function reports whether the servo has reached its target value. * * @param target * @return has servo hit target within error of 20% above servoError .005 * about 1.5 degrees */ public boolean isServoFinished(float target) { SmartDashboard.putNumber("Rod isServofinished" +"Math.abs(servo.get() - target)", Math.abs(servo.get() - target)); SmartDashboard.putNumber("Rod isServofinished" +"Math.abs(servo.get() - target)", Math.abs(servo.get() - target)); return (Math.abs(servo.get() - target) < Math.abs(servoErrorMultiplier * servoError)); } // To DO add other rods // servo angle controlled by joystick /** * * @return the value of the servo from 0 to 1 */ public float getRodServo() { return (float)servo.get(); } /** * * @return */ public abstract float getTapeLength() ; /** * * @param val */ public void setRodServoValue(float val) { // setCount =setCount+1; // SmartDashboard.putNumber("setRodServValue" // +"setCount", setCount); SmartDashboard.putNumber("setRodServValue " +"val", val); // if (setCount==3){ servo.set(val); // setCount=0; // } float checkval =(float)servo.get(); SmartDashboard.putNumber("setRodServValue " +"checkval", checkval); } /** * This resets servo value in small increments which effectively slows it * down. It deals with two problems in setting the servos that control the * rod angle. 1.Fast jerky servo movements- it slows the servo down 2.The * servo goes to its target position immediately and would therefore pass * any test I can think of for it being finished. In a parallel that * involved the tape length, the rod angle command would be "finished" based * on a tape length that was either not final or not current. If we pick the * final tape length as the input, the initial angle will be very different * from the final and movements will be very jerky. If we pick the current * value of the tape length as the input, the outcome will be even worse, * since the tape angle command will be finished at a tape length that is * not the final target. Meanwhile, the pulley would continue for several * more seconds command to its final tape length, This approach is to * increment the servo value by a small amount each cycle, chasing the * current tape length. The Servo travels 360 degrees in 72 seconds 50 * degrees per second or 1 degree per minute The angular range is about 270 * degrees as servo input goes from 0 to 1 which translates into about * 0.0037 servo units per period. This is a maximum velocity of 50 degrees * per second, way too fast. Propose slowing it down by a factor of 4, so * the incremental adjustment is about .0009. A reasonable tolerance value is * 1.5 degree say about 0.005 PWM is possible, if the setpoint is not * dynamic, but unnecessary, I think * * @param serVel * @param goalVal * @param tolerance * @return new value for the rod servo that is a small increment to its * previous value */ public void setRodServoVelocity(float serVel, float goalVal, float tolerance){ float curVal = (float)servo.get(); count = count +1 ; SmartDashboard.putNumber("setRodServoVelocity" +"curVal", curVal); SmartDashboard.putNumber("setRodServoVelocity" +"count", count); SmartDashboard.putNumber("setRodServoVelocity" +"serVel", serVel); SmartDashboard.putNumber("setRodServoVelocity" +"goalVal", goalVal); SmartDashboard.putNumber("setRodServoVelocity" +"tolerance", tolerance); // Check servo velocity out of bounds if (serVel > 1f) { serVel = 1.f; } if (serVel < 0.1f) { serVel = 0.1f; } float increVal = serVel * maxServoVelocity; SmartDashboard.putNumber("setRodServoVelocity" +"maxServoVelocity", maxServoVelocity); // if we are geeting so close to the target value that the increment // exceeds the tolerance, cut the increment in half. while (increVal > Math.abs(goalVal-curVal)) { increVal = .5f * (float)Math.abs((float)goalVal-(float)curVal); } SmartDashboard.putNumber("setRodServoVelocity" +"increVal", increVal); SmartDashboard.putNumber("setRodServoVelocity" +" curVal + increVal", curVal + increVal); SmartDashboard.putNumber("setRodServoVelocity" +"(Math.abs(goalVal - (curVal + increVal)", Math.abs(goalVal - (curVal + increVal))); if (Math.abs(curVal - goalVal) < tolerance) { setRodServoValue(goalVal); return; } if ((goalVal > curVal) & ((Math.abs(goalVal - (curVal + increVal))) > tolerance)) { setRodServoValue(curVal + increVal); return; } if (Math.abs(goalVal - (curVal + increVal)) <= tolerance) { setRodServoValue(goalVal); return; } if ((goalVal < curVal) & (Math.abs(goalVal - (curVal - increVal)) > tolerance)) { setRodServoValue(curVal - increVal); return; } if (Math.abs(goalVal - (curVal - increVal)) <= tolerance) { setRodServoValue(goalVal); } } // set default joystick public void initDefaultCommand() { } }
/* * Copyright 2008-2013 Hippo B.V. (http://www.onehippo.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hippoecm.repository.jackrabbit; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; import java.util.UUID; import java.util.WeakHashMap; import javax.jcr.InvalidItemStateException; import javax.jcr.ItemNotFoundException; import javax.jcr.NamespaceException; import javax.jcr.ReferentialIntegrityException; import javax.jcr.RepositoryException; import org.apache.jackrabbit.core.HierarchyManager; import org.apache.jackrabbit.core.id.ItemId; import org.apache.jackrabbit.core.id.NodeId; import org.apache.jackrabbit.core.id.PropertyId; import org.apache.jackrabbit.core.nodetype.NodeTypeRegistry; import org.apache.jackrabbit.core.observation.EventStateCollectionFactory; import org.apache.jackrabbit.core.security.AccessManager; import org.apache.jackrabbit.core.state.ChangeLog; import org.apache.jackrabbit.core.state.ChildNodeEntry; import org.apache.jackrabbit.core.state.ItemState; import org.apache.jackrabbit.core.state.ItemStateCacheFactory; import org.apache.jackrabbit.core.state.ItemStateException; import org.apache.jackrabbit.core.state.ItemStateListener; import org.apache.jackrabbit.core.state.ItemStateManager; import org.apache.jackrabbit.core.state.NoSuchItemStateException; import org.apache.jackrabbit.core.state.NodeReferences; import org.apache.jackrabbit.core.state.NodeState; import org.apache.jackrabbit.core.state.PropertyState; import org.apache.jackrabbit.core.state.SharedItemStateManager; import org.apache.jackrabbit.core.state.StaleItemStateException; import org.apache.jackrabbit.core.state.XAItemStateManager; import org.apache.jackrabbit.spi.Name; import org.apache.jackrabbit.spi.Path; import org.apache.jackrabbit.spi.commons.conversion.IllegalNameException; import org.apache.jackrabbit.spi.commons.conversion.MalformedPathException; import org.hippoecm.repository.FacetedNavigationEngine; import org.hippoecm.repository.FacetedNavigationEngine.Context; import org.hippoecm.repository.FacetedNavigationEngine.Query; import org.hippoecm.repository.Modules; import org.hippoecm.repository.api.HippoNodeType; import org.hippoecm.repository.dataprovider.DataProviderContext; import org.hippoecm.repository.dataprovider.DataProviderModule; import org.hippoecm.repository.dataprovider.HippoNodeId; import org.hippoecm.repository.dataprovider.HippoVirtualProvider; import org.hippoecm.repository.dataprovider.ParameterizedNodeId; import org.hippoecm.repository.dataprovider.StateProviderContext; import org.hippoecm.repository.security.HippoAccessManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class HippoLocalItemStateManager extends XAItemStateManager implements DataProviderContext, HandleListener { protected final Logger log = LoggerFactory.getLogger(HippoLocalItemStateManager.class); /** Mask pattern indicating a regular, non-virtual JCR item */ static final int ITEM_TYPE_REGULAR = 0x00; /** Mask pattern indicating an externally defined node, patterns can * be OR-ed to indicate both external and virtual nodes. */ static final int ITEM_TYPE_EXTERNAL = 0x01; /** Mask pattern indicating a virtual node, patterns can be OR-ed to * indicate both external and virtual nodes. */ static final int ITEM_TYPE_VIRTUAL = 0x02; private NodeTypeRegistry ntReg; private org.apache.jackrabbit.core.SessionImpl session; private HierarchyManager hierMgr; private FacetedNavigationEngine<Query, Context> facetedEngine; private FacetedNavigationEngine.Context facetedContext; private HippoLocalItemStateManager.FilteredChangeLog filteredChangeLog = null; private boolean noUpdateChangeLog = false; private Map<String, HippoVirtualProvider> virtualProviders; private Map<Name, HippoVirtualProvider> virtualNodeNames; private Set<Name> virtualPropertyNames; private Set<ItemState> virtualStates = new HashSet<ItemState>(); private Set<ItemId> modifiedExternals = new HashSet<ItemId>(); private Map<NodeId, ItemState> virtualNodes = new HashMap<NodeId, ItemState>(); private Map<ItemId, Object> deletedExternals = new WeakHashMap<ItemId, Object>(); private NodeId rootNodeId; private final boolean virtualLayerEnabled; private int virtualLayerEnabledCount = 0; private boolean virtualLayerRefreshing = true; private boolean parameterizedView = false; private StateProviderContext currentContext = null; private static Modules<DataProviderModule> dataProviderModules = null; private boolean editFakeMode = false; private boolean editRealMode = false; private AccessManager accessManager; private Name handleNodeName; public HippoLocalItemStateManager(SharedItemStateManager sharedStateMgr, EventStateCollectionFactory factory, ItemStateCacheFactory cacheFactory, String attributeName, NodeTypeRegistry ntReg, boolean enabled, NodeId rootNodeId) { super(sharedStateMgr, factory, attributeName, cacheFactory); this.ntReg = ntReg; virtualLayerEnabled = enabled; this.rootNodeId = rootNodeId; virtualProviders = new HashMap<String, HippoVirtualProvider>(); virtualNodeNames = new HashMap<Name, HippoVirtualProvider>(); virtualPropertyNames = new HashSet<Name>(); } public boolean isEnabled() { return virtualLayerEnabled && virtualLayerEnabledCount == 0; } public void setEnabled(boolean enabled) { if (enabled) { --virtualLayerEnabledCount; } else { ++virtualLayerEnabledCount; } } public void setRefreshing(boolean enabled) { virtualLayerRefreshing = enabled; } public NodeTypeRegistry getNodeTypeRegistry() { return ntReg; } public HierarchyManager getHierarchyManager() { return hierMgr; } public FacetedNavigationEngine<FacetedNavigationEngine.Query, Context> getFacetedEngine() { return facetedEngine; } public FacetedNavigationEngine.Context getFacetedContext() { return facetedContext; } public void registerProvider(Name nodeTypeName, HippoVirtualProvider provider) { virtualNodeNames.put(nodeTypeName, provider); } public void registerProviderProperty(Name propName) { virtualPropertyNames.add(propName); } public void registerProvider(String moduleName, HippoVirtualProvider provider) { virtualProviders.put(moduleName, provider); } public HippoVirtualProvider lookupProvider(String moduleName) { return virtualProviders.get(moduleName); } public HippoVirtualProvider lookupProvider(Name nodeTypeName) { return virtualNodeNames.get(nodeTypeName); } private long virtualNodeIdLsb = 1L; private final long virtualNodeIdMsb = NodeId.valueOf("cafeface-0000-0000-0000-000000000000").getMostSignificantBits(); public UUID generateUuid(StateProviderContext context, NodeId canonical) { /* There are alternative implementations possible here. The default implementation, that would be * similar to Jackrabbit is simply to return UUID.randomUUID(); However this can be slow at times. * Another implemetnation is to use a global AtomicLong and return "new UUID(known-start-value, * AtomicLong.getAndIncrement())". However an atomic long isn't needed since these nodes only live * during a single session. Therefor a simple long is sufficient since no concurrent access is allowed. */ return new UUID(virtualNodeIdMsb, virtualNodeIdLsb++); } public Name getQName(String name) throws IllegalNameException, NamespaceException { return session.getQName(name); } public Path getQPath(String path) throws MalformedPathException, IllegalNameException, NamespaceException { return session.getQPath(path); } private static synchronized Modules<DataProviderModule> getDataProviderModules(ClassLoader loader) { if (dataProviderModules == null) { dataProviderModules = new Modules<DataProviderModule>(loader, DataProviderModule.class); } return new Modules(dataProviderModules); } void initialize(org.apache.jackrabbit.core.SessionImpl session, FacetedNavigationEngine<Query, Context> facetedEngine, FacetedNavigationEngine.Context facetedContext) throws IllegalNameException, NamespaceException { this.session = session; this.accessManager = session.getAccessManager(); this.hierMgr = session.getHierarchyManager(); this.facetedEngine = facetedEngine; this.facetedContext = facetedContext; this.handleNodeName = session.getQName(HippoNodeType.NT_HANDLE); LinkedHashSet<DataProviderModule> providerInstances = new LinkedHashSet<DataProviderModule>(); if (virtualLayerEnabled) { Modules<DataProviderModule> modules = getDataProviderModules(getClass().getClassLoader()); for (DataProviderModule module : modules) { log.info("Provider module " + module.toString()); providerInstances.add(module); } } for (DataProviderModule provider : providerInstances) { if (provider instanceof HippoVirtualProvider) { registerProvider(provider.getClass().getName(), (HippoVirtualProvider)provider); } } for (DataProviderModule provider : providerInstances) { try { provider.initialize(this); } catch (RepositoryException ex) { log.error("cannot initialize virtual provider " + provider.getClass().getName() + ": " + ex.getMessage(), ex); } } } @Override public void dispose() { if (facetedEngine != null) { facetedEngine.unprepare(facetedContext); } super.dispose(); } @Override public synchronized void edit() throws IllegalStateException { if (!editFakeMode) editRealMode = true; boolean editPreviousMode = editFakeMode; editFakeMode = false; if (super.inEditMode()) { editFakeMode = editPreviousMode; return; } editFakeMode = editPreviousMode; super.edit(); } @Override public boolean inEditMode() { if (editFakeMode) return false; return editRealMode; } void clearChangeLog() { virtualStates.clear(); virtualNodes.clear(); filteredChangeLog = null; modifiedExternals.clear(); } @Override protected void update(ChangeLog changeLog) throws ReferentialIntegrityException, StaleItemStateException, ItemStateException { filteredChangeLog = new FilteredChangeLog(changeLog); virtualStates.clear(); virtualNodes.clear(); filteredChangeLog.invalidate(); if (!noUpdateChangeLog) { super.update(filteredChangeLog); } modifiedExternals.clear(); deletedExternals.putAll(filteredChangeLog.deletedExternals); } @Override public void update() throws ReferentialIntegrityException, StaleItemStateException, ItemStateException, IllegalStateException { super.update(); editRealMode = false; try { editFakeMode = true; edit(); FilteredChangeLog tempChangeLog = filteredChangeLog; filteredChangeLog = null; parameterizedView = false; if (tempChangeLog != null) { tempChangeLog.repopulate(); } } finally { editFakeMode = false; } } void refresh() throws ReferentialIntegrityException, StaleItemStateException, ItemStateException { if (!inEditMode()) { edit(); } noUpdateChangeLog = true; update(); noUpdateChangeLog = false; editRealMode = false; } public ItemState getCanonicalItemState(ItemId id) throws NoSuchItemStateException, ItemStateException { try { if (!accessManager.isGranted(id, AccessManager.READ)) { return null; } } catch (RepositoryException ex) { return null; } return super.getItemState(id); } @Override public ItemState getItemState(ItemId id) throws NoSuchItemStateException, ItemStateException { currentContext = null; ItemState state; boolean editPreviousMode = editFakeMode; editFakeMode = true; try { if (id instanceof ParameterizedNodeId) { currentContext = new StateProviderContext(((ParameterizedNodeId)id).getParameterString()); id = ((ParameterizedNodeId)id).getUnparameterizedNodeId(); parameterizedView = true; } state = super.getItemState(id); if (deletedExternals.containsKey(id)) return state; if (id instanceof HippoNodeId) { if (!virtualNodes.containsKey(id)) { edit(); NodeState nodeState = (NodeState)state; if (isEnabled()) { nodeState = ((HippoNodeId)id).populate(currentContext, nodeState); Name nodeTypeName = nodeState.getNodeTypeName(); if (virtualNodeNames.containsKey(nodeTypeName) && !virtualStates.contains(state)) { int type = isVirtual(nodeState); if ((type & ITEM_TYPE_EXTERNAL) != 0 && (type & ITEM_TYPE_VIRTUAL) != 0) { nodeState.removeAllChildNodeEntries(); } nodeState = ((HippoNodeId)id).populate(virtualNodeNames.get(nodeTypeName), nodeState); } virtualNodes.put((HippoNodeId)id, nodeState); forceStore(nodeState); } return nodeState; } } else if (state instanceof NodeState) { NodeState nodeState = (NodeState)state; Name nodeTypeName = nodeState.getNodeTypeName(); if (virtualNodeNames.containsKey(nodeTypeName) && !virtualStates.contains(state)) { edit(); int type = isVirtual(nodeState); if ((type & ITEM_TYPE_EXTERNAL) != 0) { nodeState.removeAllChildNodeEntries(); } try { if (virtualLayerEnabled) { if (id instanceof ParameterizedNodeId) { if (isEnabled()) { state = virtualNodeNames.get(nodeTypeName).populate(new StateProviderContext(((ParameterizedNodeId)id).getParameterString()), nodeState); parameterizedView = true; } } else if (id instanceof HippoNodeId) { if (isEnabled()) { state = ((HippoNodeId)id).populate(virtualNodeNames.get(nodeTypeName), nodeState); } } else { if (isEnabled()) { state = virtualNodeNames.get(nodeTypeName).populate(currentContext, nodeState); } else { state = virtualNodeNames.get(nodeTypeName).populate(currentContext, nodeState); ((NodeState)state).removeAllChildNodeEntries(); } } } else { log.error("Populating while virtual layer disabled", new Exception()); } virtualStates.add(state); forceStore(state); return nodeState; } catch(InvalidItemStateException ex) { log.debug("InvalidItemStateException for nodeTypeName '"+nodeTypeName+"'. ", ex); return nodeState; } catch (RepositoryException ex) { log.error(ex.getClass().getName() + ": " + ex.getMessage(), ex); throw new ItemStateException("Failed to populate node state", ex); } } } } catch(InvalidItemStateException ex) { throw new ItemStateException("Source location has changed", ex); } finally { currentContext = null; editFakeMode = editPreviousMode; } return state; } @Override public boolean hasItemState(ItemId id) { if (id instanceof HippoNodeId || id instanceof ParameterizedNodeId) { return true; } else if (id instanceof PropertyId && ((PropertyId)id).getParentId() instanceof HippoNodeId) { return true; } return super.hasItemState(id); } @Override public NodeState getNodeState(NodeId id) throws NoSuchItemStateException, ItemStateException { NodeState state = null; if (!(id instanceof HippoNodeId)) { try { state = super.getNodeState(id); } catch (NoSuchItemStateException ex) { if (!(id instanceof ParameterizedNodeId)) { throw ex; } } } if (virtualNodes.containsKey(id)) { state = (NodeState)virtualNodes.get(id); } else if (state == null && id instanceof HippoNodeId) { boolean editPreviousMode = editFakeMode; editFakeMode = true; NodeState nodeState; try { edit(); if (isEnabled()) { nodeState = ((HippoNodeId)id).populate(currentContext); if (nodeState == null) { throw new NoSuchItemStateException("Populating node failed"); } } else { nodeState = populate((HippoNodeId)id); } virtualNodes.put((HippoNodeId)id, nodeState); forceStore(nodeState); Name nodeTypeName = nodeState.getNodeTypeName(); if (virtualNodeNames.containsKey(nodeTypeName)) { int type = isVirtual(nodeState); /* * If a node is EXTERNAL && VIRTUAL, we are dealing with an already populated nodestate. * Since the parent EXTERNAL node can impose new constaints, like an inherited filter, we * first need to remove all the childNodeEntries, and then populate it again */ if ((type & ITEM_TYPE_EXTERNAL) != 0 && (type & ITEM_TYPE_VIRTUAL) != 0) { nodeState.removeAllChildNodeEntries(); } try { state = ((HippoNodeId)id).populate(virtualNodeNames.get(nodeTypeName), nodeState); } catch(InvalidItemStateException ex) { throw new ItemStateException("Node has been modified", ex); } } } finally { editFakeMode = editPreviousMode; } return nodeState; } else if (isHandle(state)) { reorderHandleChildNodeEntries(state); } return state; } private void reorderHandleChildNodeEntries(final NodeState state) { if (accessManager == null) { return; } // returns a copy of the list List<ChildNodeEntry> cnes = state.getChildNodeEntries(); LinkedList<ChildNodeEntry> updatedList = new LinkedList<ChildNodeEntry>(); int readableIndex = 0; for (ChildNodeEntry current : cnes) { boolean added = false; // if there is a same-name-sibling with a bigger index, check authorization // there is no need to check last one, because it's already last int index = current.getIndex(); ChildNodeEntry next = state.getChildNodeEntry(current.getName(), index + 1); if (next != null) { try { // this is SNS number 2, so check previous one, if (!accessManager.isGranted(current.getId(), AccessManager.READ)) { updatedList.addLast(current); added = true; } } catch (ItemNotFoundException t) { log.error("Unable to order documents below handle " + state.getId(), t); } catch (RepositoryException t) { log.error("Unable to determine access rights for " + current.getId()); } } if (!added) { updatedList.add(readableIndex, current); readableIndex++; } } // always invoke {@link NodeState#setChildNodeEntries} (even when there are no changes) // so that the hierarchy manager cache is verified and updated. state.setChildNodeEntries(updatedList); } @Override public PropertyState getPropertyState(PropertyId id) throws NoSuchItemStateException, ItemStateException { if (id.getParentId() instanceof HippoNodeId) { throw new NoSuchItemStateException("Property of a virtual node cannot be retrieved from shared ISM"); } return super.getPropertyState(id); } private NodeState populate(HippoNodeId nodeId) throws NoSuchItemStateException, ItemStateException { try { NodeState dereference = getNodeState(rootNodeId); NodeState state = createNew(nodeId, dereference.getNodeTypeName(), nodeId.parentId); state.setNodeTypeName(dereference.getNodeTypeName()); return state; } catch(RepositoryException ex) { throw new NoSuchItemStateException(ex.getMessage(), ex); } } boolean isPureVirtual(ItemId id) { if (id.denotesNode()) { if (id instanceof HippoNodeId) { return true; } } else { try { PropertyState propState = (PropertyState)getItemState(id); return (propState.getParentId() instanceof HippoNodeId); } catch (NoSuchItemStateException ex) { return true; } catch (ItemStateException ex) { return true; } } return false; } int isVirtual(ItemState state) { if (state.isNode()) { int type = ITEM_TYPE_REGULAR; if (state.getId() instanceof HippoNodeId) { type |= ITEM_TYPE_VIRTUAL; } if (virtualNodeNames.containsKey(((NodeState)state).getNodeTypeName())) { type |= ITEM_TYPE_EXTERNAL; } return type; } else { /* it is possible to do a check on type name of the property * using Name name = ((PropertyState)state).getName().toString().equals(...) * to check and return whether a property is virtual. * * FIXME: this would be better if these properties would not be * named for all node types, but bound to a specific node type * for which there is already a provider defined. */ PropertyState propState = (PropertyState)state; if (propState.getPropertyId() instanceof HippoPropertyId) { return ITEM_TYPE_VIRTUAL; } else if (virtualPropertyNames.contains(propState.getName())) { return ITEM_TYPE_VIRTUAL; } else if (propState.getParentId() instanceof HippoNodeId) { return ITEM_TYPE_VIRTUAL; } else { return ITEM_TYPE_REGULAR; } } } boolean isHandle(ItemState state) { if (handleNodeName != null && state.isNode()) { return handleNodeName.equals(((NodeState) state).getNodeTypeName()); } return false; } class FilteredChangeLog extends ChangeLog { private ChangeLog upstream; Map<ItemId, Object> deletedExternals = new HashMap<ItemId, Object>(); FilteredChangeLog(ChangeLog changelog) { upstream = changelog; } void invalidate() { if (!virtualLayerRefreshing) { for (ItemState state : upstream.modifiedStates()) { if ((isVirtual(state) & ITEM_TYPE_EXTERNAL) != 0) { forceUpdate(state); } } return; } List<ItemState> deletedStates = new LinkedList<ItemState>(); for (ItemState state : upstream.deletedStates()) { deletedStates.add(state); } List<ItemState> addedStates = new LinkedList<ItemState>(); for (ItemState state : upstream.addedStates()) { addedStates.add(state); } List<ItemState> modifiedStates = new LinkedList<ItemState>(); for (ItemState state : upstream.modifiedStates()) { modifiedStates.add(state); } for (ItemState state : deletedStates) { if ((isVirtual(state) & ITEM_TYPE_EXTERNAL) != 0) { deletedExternals.put(state.getId(), null); ((NodeState)state).removeAllChildNodeEntries(); forceUpdate(state); } } for (ItemState state : addedStates) { if ((isVirtual(state) & ITEM_TYPE_VIRTUAL) != 0) { if (state.isNode()) { NodeState nodeState = (NodeState)state; try { NodeState parentNodeState = (NodeState)get(nodeState.getParentId()); if (parentNodeState != null) { parentNodeState.removeChildNodeEntry(nodeState.getNodeId()); forceUpdate(nodeState); } } catch (NoSuchItemStateException ex) { } } else { forceUpdate(state); } } else if ((isVirtual(state) & ITEM_TYPE_EXTERNAL) != 0) { if (!deletedExternals.containsKey(state.getId()) && !HippoLocalItemStateManager.this.deletedExternals.containsKey(state.getId())) { ((NodeState)state).removeAllChildNodeEntries(); forceUpdate((NodeState)state); } } } for (ItemState state : modifiedStates) { if ((isVirtual(state) & ITEM_TYPE_EXTERNAL) != 0) { if (!deletedExternals.containsKey(state.getId()) && !HippoLocalItemStateManager.this.deletedExternals.containsKey(state.getId())) { forceUpdate((NodeState)state); ((NodeState)state).removeAllChildNodeEntries(); } } } } private void repopulate() { for (Iterator iter = new HashSet<ItemState>(virtualStates).iterator(); iter.hasNext(); ) { ItemState state = (ItemState)iter.next(); // only repopulate ITEM_TYPE_EXTERNAL, not state that are ITEM_TYPE_EXTERNAL && ITEM_TYPE_VIRTUAL if (((isVirtual(state) & ITEM_TYPE_EXTERNAL)) != 0 && ((isVirtual(state) & ITEM_TYPE_VIRTUAL) == 0) && !deleted(state.getId()) && !deletedExternals.containsKey(state.getId()) && !HippoLocalItemStateManager.this.deletedExternals.containsKey(state.getId())) { try { if (state.getId() instanceof ParameterizedNodeId) { virtualNodeNames.get(((NodeState)state).getNodeTypeName()).populate(new StateProviderContext(((ParameterizedNodeId)state.getId()).getParameterString()), (NodeState)state); parameterizedView = true; } else if (state.getId() instanceof HippoNodeId) { ((HippoNodeId)state.getId()).populate(virtualNodeNames.get(((NodeState)state).getNodeTypeName()), (NodeState)state); } else { virtualNodeNames.get(((NodeState)state).getNodeTypeName()).populate(null, (NodeState)state); } } catch (InvalidItemStateException ex) { log.info(ex.getClass().getName() + ": " + ex.getMessage(), ex); } catch (ItemStateException ex) { log.error(ex.getClass().getName() + ": " + ex.getMessage(), ex); } catch (RepositoryException ex) { log.error(ex.getClass().getName() + ": " + ex.getMessage(), ex); } } } } @Override public void added(ItemState state) { upstream.added(state); } @Override public void modified(ItemState state) { upstream.modified(state); } @Override public void deleted(ItemState state) { upstream.deleted(state); } @Override public void modified(NodeReferences refs) { upstream.modified(refs); } @Override public boolean isModified(ItemId id) { return upstream.isModified(id); } @Override public ItemState get(ItemId id) throws NoSuchItemStateException { return upstream.get(id); } @Override public boolean has(ItemId id) { return upstream.has(id) && !HippoLocalItemStateManager.this.deletedExternals.containsKey(id); } @Override public boolean deleted(ItemId id) { return upstream.deleted(id) && !HippoLocalItemStateManager.this.deletedExternals.containsKey(id); } @Override public NodeReferences getReferencesTo(NodeId id) { return upstream.getReferencesTo(id); } @Override public Iterable<ItemState> addedStates() { return new FilteredStateIterator(upstream.addedStates(), false); } @Override public Iterable<ItemState> modifiedStates() { return new FilteredStateIterator(upstream.modifiedStates(), true); } @Override public Iterable<ItemState> deletedStates() { return new FilteredStateIterator(upstream.deletedStates(), false); } @Override public Iterable<NodeReferences> modifiedRefs() { return new FilteredReferencesIterator(upstream.modifiedRefs()); } @Override public void merge(ChangeLog other) { upstream.merge(other); } @Override public void push() { upstream.push(); } @Override public void persisted() { upstream.persisted(); } @Override public void reset() { upstream.reset(); } @Override public void disconnect() { upstream.disconnect(); } @Override public void undo(ItemStateManager parent) { upstream.undo(parent); } @Override public String toString() { return upstream.toString(); } class FilteredStateIterator implements Iterable<ItemState> { Iterable<ItemState> actualIterable; ItemState current; boolean modified; FilteredStateIterator(Iterable<ItemState> actualIterable, boolean modified) { this.actualIterable = actualIterable; current = null; this.modified = modified; } public Iterator<ItemState> iterator() { final Iterator<ItemState> actualIterator = actualIterable.iterator(); return new Iterator<ItemState>() { public boolean hasNext() { while (current == null) { if (!actualIterator.hasNext()) return false; current = (ItemState)actualIterator.next(); if (needsSkip(current)) { current = null; } } return true; } public boolean needsSkip(ItemState current) { if (HippoLocalItemStateManager.this.deletedExternals.containsKey(current.getId())) { return true; } if ((isVirtual(current) & ITEM_TYPE_VIRTUAL) != 0) { if (!current.isNode()) { PropertyState propState = (PropertyState)current; if (modifiedExternals.contains(propState.getParentId())) { return false; } } return true; } if (modified) { if ((isVirtual(current) & ITEM_TYPE_EXTERNAL) != 0) { return !modifiedExternals.contains(current.getId()); } } return false; } public ItemState next() throws NoSuchElementException { while (current == null) { if (!actualIterator.hasNext()) { throw new NoSuchElementException(); } current = actualIterator.next(); if (needsSkip(current)) { current = null; } } ItemState rtValue = current; current = null; return rtValue; } public void remove() throws UnsupportedOperationException, IllegalStateException { actualIterator.remove(); } }; } } class FilteredReferencesIterator implements Iterable<NodeReferences> { Iterable<NodeReferences> actualIterable; NodeReferences current; FilteredReferencesIterator(Iterable<NodeReferences> actualIterable) { this.actualIterable = actualIterable; current = null; } public Iterator<NodeReferences> iterator() { final Iterator<NodeReferences> actualIterator = actualIterable.iterator(); return new Iterator<NodeReferences>() { public boolean hasNext() { while (current == null) { if (!actualIterator.hasNext()) return false; current = (NodeReferences)actualIterator.next(); if (needsSkip(current)) { current = null; } } return (current != null); } public boolean needsSkip(NodeReferences current) { return isPureVirtual(current.getTargetId()); } public NodeReferences next() throws NoSuchElementException { NodeReferences rtValue = null; while (current == null) { if (!actualIterator.hasNext()) { throw new NoSuchElementException(); } current = (NodeReferences)actualIterator.next(); if (needsSkip(current)) { current = null; } } rtValue = new NodeReferences(current.getTargetId()); for (PropertyId propId : (List<PropertyId>)current.getReferences()) { if (!isPureVirtual(propId)) { rtValue.addReference(propId); } } current = null; if (rtValue == null) throw new NoSuchElementException(); return rtValue; } public void remove() throws UnsupportedOperationException, IllegalStateException { actualIterator.remove(); } }; } } } @Override public void handleModified(final NodeState sharedState) { NodeId handleId = sharedState.getNodeId(); ItemState localState = cache.retrieve(handleId); if (localState != null) { reorderHandleChildNodeEntries((NodeState) localState); } else { clearHierarchyManagerCacheForHandle(sharedState); } } private void clearHierarchyManagerCacheForHandle(final NodeState sharedState) { final NodeId handleId = sharedState.getNodeId(); final Name nodeTypeName = sharedState.getNodeTypeName(); final NodeId parentId = sharedState.getParentId(); NodeState wrappedState = new NodeState(handleId, nodeTypeName, parentId, ItemState.STATUS_EXISTING, false); nodesReplaced(wrappedState); } @Override public void stateModified(final ItemState modified) { super.stateModified(modified); if (accessManager != null && modified.getContainer() != this && !cache.isCached(modified.getId())) { ((ItemStateListener) accessManager).stateModified(modified); } } @Override public void stateDestroyed(ItemState destroyed) { if (destroyed.getContainer() != this) { if ((isVirtual(destroyed) & ITEM_TYPE_EXTERNAL) != 0) { deletedExternals.put(destroyed.getId(), null); } } super.stateDestroyed(destroyed); } private void forceUpdate(ItemState state) { stateDiscarded(state); } @Override public void store(ItemState state) { if ((isVirtual(state) & ITEM_TYPE_EXTERNAL) != 0) { modifiedExternals.add(state.getId()); } super.store(state); } private void forceStore(ItemState state) { super.store(state); } }
import java.awt.BorderLayout; import java.awt.Color; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.BorderFactory; import javax.swing.BoxLayout; import javax.swing.JButton; import javax.swing.JComboBox; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTabbedPane; import javax.swing.JTextArea; import javax.swing.JTextField; import javax.swing.JTree; import javax.swing.ScrollPaneConstants; import javax.swing.ScrollPaneLayout; public class SeaPortProgram extends JFrame { // Object of world class to read file and create data structures World world; // Text area displaying the text in TEXT tab JTextArea jtextArea; // Text Area displaying the search result in SEARCH tab JTextArea searchResult; // ScrollPane for TEXT and TREE tab JScrollPane jsp, jspTree; // Parent panel for JOB tab (populated dynamically) JPanel jobPanel; /** * Constructor for JFrame constructor to create components */ SeaPortProgram() { /** * JFRAME initialization and creation */ // Set title for JFrame setTitle("Sea Port"); // Set size for JFrame setSize(1200, 600); setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); // Set layout for JFrame setLayout(new BorderLayout()); // Create buttons for Read and Display JButton jbRead = new JButton("Read"); JButton jbDisplay = new JButton("Display"); // Create Panel for Buttons at the top JPanel buttonPanel = new JPanel(); buttonPanel.setLayout(new FlowLayout()); buttonPanel.add(jbRead); buttonPanel.add(jbDisplay); // Implement action listener for read button jbRead.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { eventHandlerReadButton(); } }); // Implement action listener for display button jbDisplay.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { eventHandlerDisplayButton(); } }); /** * GUI Element JTEXTAREA for TEXT TAB (Display of read data) */ // Creating text area for Text TAB in the GUI jtextArea = new JTextArea("No Data read yet!"); // Scroll pane for the text area jsp = new JScrollPane(jtextArea); /** * PANEL FOR SEARCH TAB */ JPanel searchPanelParent = new JPanel(new BorderLayout()); JPanel searchPanel = new JPanel(new FlowLayout(FlowLayout.CENTER, 50, 50)); JTextField searchText = new JTextField(" "); searchText.setSize(100, 20); // Labels for search attribute String labels[] = { "Name", "Index", "Skills" }; JComboBox comboBox1 = new JComboBox(labels); JButton searchButton = new JButton("Search"); searchResult = new JTextArea(""); // Add textfield for search text searchPanel.add(searchText); // Add combo box with search attributes searchPanel.add(comboBox1); // Adding search button searchPanel.add(searchButton); // Adding the search panel to search parent panel searchPanelParent.add(searchPanel, BorderLayout.NORTH); // Implement SEARCH button handler searchButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { // Call search event handler with search text, and selected // attribute eventHandlerSearchButton(searchText.getText(), (String) comboBox1.getItemAt(comboBox1.getSelectedIndex())); } }); // Panel for Results from SEARCH operation JPanel searchResultPanel = new JPanel(); searchResultPanel.add(searchResult); // Adding scrollpane to the panel JScrollPane jsp2 = new JScrollPane(searchResultPanel); searchPanelParent.add(jsp2, BorderLayout.CENTER); /** * PANEL FOR SORT TAB */ JPanel sortPanel = new JPanel(new FlowLayout(FlowLayout.CENTER, 50, 50)); JLabel selectFieldLabel = new JLabel("Select Field to Sort By: "); // Creating combo box with options to sort by String fields[] = { "Ships in Port's Que", "Ships in Port", "Person", "Dock", "Job" }; JComboBox fieldText = new JComboBox(fields); fieldText.setSelectedIndex(2); JLabel selectAttributeLabel = new JLabel("Select attribute to Sort by: "); // Create combo box with default Sorting attribute i.e. name JComboBox attrText = new JComboBox(); attrText.addItem("Name"); // Implement combo box change listener for Sort Field selection fieldText.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { // Get the selected Field to sort by String item = (String) fieldText.getSelectedItem(); if (item.equals("Ships in Port's Que") || item.equals("Ships in Port")) { // If Ships sort is selected, then Sorting can be done by // various // attributes as mentioned below. Default option is Name for // other // fields attrText.removeAllItems(); // Attribute - 1 - weight, 2- length, 3-width, 4- draft, 5 // -Name String[] arr = { "Weight", "Length", "Width", "Draft", "Name", "All (in sequence)" }; for (int i = 0; i < arr.length; i++) { attrText.addItem(arr[i]); } } else { // Sort by name only for other fields attrText.removeAllItems(); attrText.addItem("Name"); } } }); // Add Buttong for SORT JButton sortButton = new JButton("Sort Data"); // Implement Buttong listener for SORT button sortButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { // Call event handler method with Selected field to sort and // attribute // by which sorting need to be done eventHandlerSortButton((String) fieldText.getSelectedItem(), attrText.getSelectedIndex()); } }); // Add GUI elements to the SORT panel sortPanel.add(selectFieldLabel); sortPanel.add(fieldText); sortPanel.add(selectAttributeLabel); sortPanel.add(attrText); sortPanel.add(sortButton); /** * PANEL FOR JOB TAB */ // This will be populated dynamically when jobs are created, will be // passed // as a parameter to the World class jobPanel = new JPanel(new GridLayout(20,20)); JScrollPane scrollPane = new JScrollPane(jobPanel, ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED,ScrollPaneConstants.HORIZONTAL_SCROLLBAR_AS_NEEDED); /** * GUI element JTREE for TREE TAB */ // Creating Tree for TREE tab in the GUI jspTree = new JScrollPane(new JLabel("No data read yet")); /** * TABS for all the Panels/GUI elements for program features */ JTabbedPane tabbedPane = new JTabbedPane(); // Add all the tabs to the tabbedPane tabbedPane.addTab("Text", null, jsp, "Display all the text"); tabbedPane.addTab("Search", null, searchPanelParent, "Search data"); tabbedPane.addTab("Sort", null, sortPanel, "Sort Data"); tabbedPane.addTab("Tree", null, jspTree, "Tree display"); tabbedPane.addTab("Jobs", null, scrollPane, "Jobs"); // Add button panel to the JFRAME add(buttonPanel, BorderLayout.NORTH); // Add TabbedPane to the JFRAME add(tabbedPane, BorderLayout.CENTER); // make jframe visible setVisible(true); } /** * Method to handle SORT feature of the program * * @param field * Field to SORT the data by * @param attribute * Attribute for Field to SORT by */ public void eventHandlerSortButton(String field, int attribute) { // Check if data is uploaded yet, if yes, SORT else return if (world != null) { // Call method to sort the data world.sortData(field, attribute); // Refresh Display data - emulate display button handler eventHandlerDisplayButton(); } } /** * Method to handle SEARCH button click/selection * * @param searchText * Data text to search for * @param selType * Type of data to search for (name, index,skills) */ public void eventHandlerSearchButton(String searchText, String selType) { // Set search result to searchResult text area searchText = searchText.trim(); // Check if data is uploaded yet, if yes, SEARCH, else return message // no data uploaded if (world != null) { // If no search data exists, display message to user if (searchText.equals("")) { searchResult.setText("Please enter target value!"); } else { // Call method to search the data searchResult.setText(world.searchData(searchText, selType)); } } else searchResult.setText("No data uploaded yet"); } /** * Method to upload and read the input file */ public void eventHandlerReadButton() { jobPanel.removeAll(); // Create object of World class to upload data // Pass jobPanel i.e. panel for jobs that is populated dynamically world = new World(jobPanel); } /** * Method to display file contents as TEXT in TEXT tab and TREE in TREE tab */ public void eventHandlerDisplayButton() { // Check if data is uploaded, if yes, display that in the tabs if (world != null) { // Creating a Tree with the data using below method JTree tree = new JTree(world.createNodes("World")); // Add TREE to TREE TAB jspTree.setViewportView(tree); // Display uploaded text in the TEXT TAB jtextArea.setText(world.getAllData()); } else { // If no data read, display the same to user jtextArea.setText("No data uploaded yet"); } } /** * Main method of the program * * @param args * Argument for the program */ public static void main(String args[]) { // Create object of the class to start the program SeaPortProgram newObj = new SeaPortProgram(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.authorization; import java.nio.charset.StandardCharsets; import java.util.Collections; import java.util.HashSet; import java.util.Objects; import java.util.Set; import java.util.UUID; /** * A group that users can belong to. */ public class Group { // TODO rename to UserGroup private final String identifier; private final String name; private final Set<String> users; private Group(final Builder builder) { this.identifier = builder.identifier; this.name = builder.name; this.users = Collections.unmodifiableSet(new HashSet<>(builder.users)); if (this.identifier == null || this.identifier.trim().isEmpty()) { throw new IllegalArgumentException("Identifier can not be null or empty"); } if (this.name == null || this.name.trim().isEmpty()) { throw new IllegalArgumentException("Name can not be null or empty"); } } /** * @return the identifier of the group */ public String getIdentifier() { return identifier; } /** * @return the name of the group */ public String getName() { return name; } /** * @return an unmodifiable set of user identifiers that belong to this group */ public Set<String> getUsers() { return users; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Group other = (Group) obj; return Objects.equals(this.identifier, other.identifier); } @Override public int hashCode() { return Objects.hashCode(this.identifier); } @Override public String toString() { return String.format("identifier[%s], name[%s]", getIdentifier(), getName()); } /** * Builder for creating Groups. */ public static class Builder { private String identifier; private String name; private Set<String> users = new HashSet<>(); private final boolean fromGroup; public Builder() { this.fromGroup = false; } /** * Initializes the builder with the state of the provided group. When using this constructor * the identifier field of the builder can not be changed and will result in an IllegalStateException * if attempting to do so. * * @param other the existing access policy to initialize from */ public Builder(final Group other) { if (other == null) { throw new IllegalArgumentException("Provided group can not be null"); } this.identifier = other.getIdentifier(); this.name = other.getName(); this.users.clear(); this.users.addAll(other.getUsers()); this.fromGroup = true; } /** * Sets the identifier of the builder. * * @param identifier the identifier * @return the builder * @throws IllegalStateException if this method is called when this builder was constructed from an existing Group */ public Builder identifier(final String identifier) { if (fromGroup) { throw new IllegalStateException( "Identifier can not be changed when initialized from an existing group"); } this.identifier = identifier; return this; } /** * Sets the identifier of the builder to a random UUID. * * @return the builder * @throws IllegalStateException if this method is called when this builder was constructed from an existing Group */ public Builder identifierGenerateRandom() { if (fromGroup) { throw new IllegalStateException( "Identifier can not be changed when initialized from an existing group"); } this.identifier = UUID.randomUUID().toString(); return this; } /** * Sets the identifier of the builder with a UUID generated from the specified seed string. * * @return the builder * @throws IllegalStateException if this method is called when this builder was constructed from an existing Group */ public Builder identifierGenerateFromSeed(final String seed) { if (fromGroup) { throw new IllegalStateException( "Identifier can not be changed when initialized from an existing group"); } if (seed == null) { throw new IllegalArgumentException("Cannot seed the group identifier with a null value."); } this.identifier = UUID.nameUUIDFromBytes(seed.getBytes(StandardCharsets.UTF_8)).toString(); return this; } /** * Sets the name of the builder. * * @param name the name * @return the builder */ public Builder name(final String name) { this.name = name; return this; } /** * Adds all users from the provided set to the builder's set of users. * * @param users a set of users to add * @return the builder */ public Builder addUsers(final Set<String> users) { if (users != null) { this.users.addAll(users); } return this; } /** * Adds the given user to the builder's set of users. * * @param user the user to add * @return the builder */ public Builder addUser(final String user) { if (user != null) { this.users.add(user); } return this; } /** * Removes the given user from the builder's set of users. * * @param user the user to remove * @return the builder */ public Builder removeUser(final String user) { if (user != null) { this.users.remove(user); } return this; } /** * Removes all users from the provided set from the builder's set of users. * * @param users the users to remove * @return the builder */ public Builder removeUsers(final Set<String> users) { if (users != null) { this.users.removeAll(users); } return this; } /** * Clears the builder's set of users so that users is non-null with size 0. * * @return the builder */ public Builder clearUsers() { this.users.clear(); return this; } /** * @return a new Group constructed from the state of the builder */ public Group build() { return new Group(this); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.markup.html.form; import org.apache.wicket.Component; import org.apache.wicket.MarkupContainer; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.markup.MarkupStream; import org.apache.wicket.markup.WicketTag; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.AutoLabelResolver.AutoLabel; import org.apache.wicket.markup.html.internal.ResponseBufferZone; import org.apache.wicket.markup.parser.XmlTag; import org.apache.wicket.markup.parser.filter.WicketTagIdentifier; import org.apache.wicket.markup.resolver.IComponentResolver; import org.apache.wicket.model.IModel; import org.apache.wicket.model.LoadableDetachableModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.StringResourceModel; import org.apache.wicket.request.cycle.RequestCycle; import org.apache.wicket.util.string.Strings; /** * Resolver that provides the <code>{@literal <wicket:label>}</code> tag, which will output a * FormComponent's {@link FormComponent#getLabel() label} without requiring a manual extra component * such as {@link Label} or {@link FormComponentLabel}. * * <code>{@literal <wicket:label>}</code> can be used * <ul> * <li>together with <code>{@literal <label wicket:for="...">}</code>: * * <pre> * {@literal * <label wicket:for="myFormComponent">some other markup, optionally<wicket:label/></label> * } * </pre> * * </li> * <li> * standalone, with a <code>for</code> attribute: * * <pre> * {@literal * <wicket:label for="myFormComponent"/> * } * </pre> * * </li> * </ul> * <p> * It also supports both input and output: * <ul> * <li>If the FormComponent has a label model, the <code>{@literal <wicket:label>}</code> tag will * be replaced by the contents of that label.</li> * <li>If the FormComponent's label model is null, it can be picked up from * <code>{@literal <wicket:label>}</code>: * <ul> * <li><code>{@literal <wicket:label>}</code> can contain some raw markup, like this: * * <pre> * {@literal * <wicket:label>I will become the component's label!</wicket:label> * } * </pre> * * </li> * <li>Or it can be a message pulled from resources, similar to * <code>{@literal <wicket:message/>}</code>: * * <pre> * {@literal * <wicket:label key="messagekey"/> * } * </pre> * * </li> * </ul> * </li> * </ul> * * * @author Carl-Eric Menzel <cmenzel@wicketbuch.de> * @author igor */ public class AutoLabelTextResolver implements IComponentResolver { static { WicketTagIdentifier.registerWellKnownTagName("label"); } /** * This is inserted by the resolver to render the label. */ private static class TextLabel extends WebMarkupContainer { private final Component labeled; public TextLabel(String id, Component labeled) { super(id); this.labeled = labeled; setRenderBodyOnly(true); } @Override protected void onComponentTag(final ComponentTag tag) { if (tag.isOpenClose()) { tag.setType(XmlTag.TagType.OPEN); } super.onComponentTag(tag); } @Override public void onComponentTagBody(final MarkupStream markupStream, final ComponentTag openTag) { // try and find some form of label content... IModel<String> labelModel = findLabelContent(markupStream, openTag); // print the label text replaceComponentTagBody(markupStream, openTag, labelModel != null ? labelModel.getObject() : ""); // store the label text in FormComponent's label model so its available to errors if (labelModel != null) { if (labeled instanceof FormComponent) { FormComponent<?> fc = (FormComponent<?>)labeled; fc.setLabel(labelModel); } else { // if we can't hand off the labelmodel to a component, we have to detach it labelModel.detach(); } } } private IModel<String> findLabelContent(final MarkupStream markupStream, final ComponentTag tag) { if (labeled instanceof ILabelProvider) { // check if the labeled component is a label provider ILabelProvider<String> provider = (ILabelProvider<String>)labeled; if (provider.getLabel() != null) { if (!Strings.isEmpty(provider.getLabel().getObject())) { return provider.getLabel(); } } } // check if the labeled component is a form component if (labeled instanceof FormComponent) { final FormComponent<?> formComponent = (FormComponent<?>)labeled; String text = formComponent.getDefaultLabel("wicket:unknown"); if (!"wicket:unknown".equals(text) && !Strings.isEmpty(text)) { return new LoadableDetachableModel<String>() { @Override protected String load() { return formComponent.getDefaultLabel("wicket:unknown"); } }; } } // check if wicket:label tag has a message key { String resourceKey = tag.getAttribute("key"); if (resourceKey != null) { String text = labeled.getString(resourceKey); if (!Strings.isEmpty(text)) { return new StringResourceModel(resourceKey, labeled, null); } } } // as last resort use the tag body { String text = new ResponseBufferZone(RequestCycle.get(), markupStream) { @Override protected void executeInsideBufferedZone() { TextLabel.super.onComponentTagBody(markupStream, tag); } }.execute().toString(); if (!Strings.isEmpty(text)) { return Model.of(text); } } return null; } } @Override public Component resolve(MarkupContainer container, MarkupStream markupStream, ComponentTag tag) { if (tag instanceof WicketTag && "label".equals(((WicketTag)tag).getName())) { // We need to find a FormComponent... Component related = null; // ...which could be explicitly specified... if (tag.getAttribute("for") != null) { Component component = AutoLabelResolver.findRelatedComponent(container, tag.getAttribute("for")); related = component; } if (related == null) { // ...or available through an AutoLabel, either directly above us... if (container instanceof AutoLabel) { related = ((AutoLabel)container).getRelatedComponent(); } if (related == null) { // ...or perhaps further up... AutoLabel autoLabel = container.findParent(AutoLabel.class); if (autoLabel != null) { related = autoLabel.getRelatedComponent(); } } } if (related == null) { // ...or it might just not be available. throw new IllegalStateException("no related component found for <wicket:label>"); } else { // ...found the form component, so we can return our label. return new TextLabel("label" + container.getPage().getAutoIndex(), related); } } return null; } }
/** * <copyright> * </copyright> * * $Id$ */ package org.wso2.developerstudio.eclipse.gmf.esb.provider; import java.util.Collection; import java.util.List; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.IEditingDomainItemProvider; import org.eclipse.emf.edit.provider.IItemLabelProvider; import org.eclipse.emf.edit.provider.IItemPropertyDescriptor; import org.eclipse.emf.edit.provider.IItemPropertySource; import org.eclipse.emf.edit.provider.IStructuredItemContentProvider; import org.eclipse.emf.edit.provider.ITreeItemContentProvider; import org.eclipse.emf.edit.provider.ItemPropertyDescriptor; import org.eclipse.emf.edit.provider.ViewerNotification; import org.wso2.developerstudio.eclipse.gmf.esb.EsbFactory; import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage; import org.wso2.developerstudio.eclipse.gmf.esb.SpringMediator; /** * This is the item provider adapter for a {@link org.wso2.developerstudio.eclipse.gmf.esb.SpringMediator} object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class SpringMediatorItemProvider extends MediatorItemProvider { /** * This constructs an instance from a factory and a notifier. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public SpringMediatorItemProvider(AdapterFactory adapterFactory) { super(adapterFactory); } /** * This returns the property descriptors for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated NOT */ public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) { if (itemPropertyDescriptors != null) { itemPropertyDescriptors.clear(); } super.getPropertyDescriptors(object); addBeanNamePropertyDescriptor(object); addConfigurationKeyPropertyDescriptor(object); addDescriptionPropertyDescriptor(object); return itemPropertyDescriptors; } /** * This adds a property descriptor for the Bean Name feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addBeanNamePropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_SpringMediator_beanName_feature"), getString("_UI_PropertyDescriptor_description", "_UI_SpringMediator_beanName_feature", "_UI_SpringMediator_type"), EsbPackage.Literals.SPRING_MEDIATOR__BEAN_NAME, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } protected void addConfigurationKeyPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_SpringMediator_configurationKey_feature"), getString("_UI_PropertyDescriptor_description", "_UI_SpringMediator_configurationKey_feature", "_UI_SpringMediator_type"), EsbPackage.Literals.SPRING_MEDIATOR__CONFIGURATION_KEY, true, false, false, null, null, null)); } /** * This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an * {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or * {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) { if (childrenFeatures == null) { super.getChildrenFeatures(object); childrenFeatures.add(EsbPackage.Literals.SPRING_MEDIATOR__CONFIGURATION_KEY); childrenFeatures.add(EsbPackage.Literals.SPRING_MEDIATOR__INPUT_CONNECTOR); childrenFeatures.add(EsbPackage.Literals.SPRING_MEDIATOR__OUTPUT_CONNECTOR); } return childrenFeatures; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EStructuralFeature getChildFeature(Object object, Object child) { // Check the type of the specified child object and return the proper feature to use for // adding (see {@link AddCommand}) it as a child. return super.getChildFeature(object, child); } /** * This returns SpringMediator.gif. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object getImage(Object object) { return overlayImage(object, getResourceLocator().getImage("full/obj16/SpringMediator")); } /** * This returns the label text for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String getText(Object object) { String label = ((SpringMediator)object).getBeanName(); return label == null || label.length() == 0 ? getString("_UI_SpringMediator_type") : getString("_UI_SpringMediator_type") + " " + label; } /** * This handles model notifications by calling {@link #updateChildren} to update any cached * children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void notifyChanged(Notification notification) { updateChildren(notification); switch (notification.getFeatureID(SpringMediator.class)) { case EsbPackage.SPRING_MEDIATOR__BEAN_NAME: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true)); return; case EsbPackage.SPRING_MEDIATOR__CONFIGURATION_KEY: case EsbPackage.SPRING_MEDIATOR__INPUT_CONNECTOR: case EsbPackage.SPRING_MEDIATOR__OUTPUT_CONNECTOR: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false)); return; } super.notifyChanged(notification); } /** * This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children * that can be created under this object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) { super.collectNewChildDescriptors(newChildDescriptors, object); newChildDescriptors.add (createChildParameter (EsbPackage.Literals.SPRING_MEDIATOR__CONFIGURATION_KEY, EsbFactory.eINSTANCE.createRegistryKeyProperty())); newChildDescriptors.add (createChildParameter (EsbPackage.Literals.SPRING_MEDIATOR__INPUT_CONNECTOR, EsbFactory.eINSTANCE.createSpringMediatorInputConnector())); newChildDescriptors.add (createChildParameter (EsbPackage.Literals.SPRING_MEDIATOR__OUTPUT_CONNECTOR, EsbFactory.eINSTANCE.createSpringMediatorOutputConnector())); } }
package net.minecraft.block; import java.util.List; import java.util.Random; import net.minecraft.block.material.MapColor; import net.minecraft.block.material.Material; import net.minecraft.block.properties.IProperty; import net.minecraft.block.state.BlockState; import net.minecraft.block.state.IBlockState; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.enchantment.EnchantmentHelper; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.item.EntityItem; import net.minecraft.entity.item.EntityXPOrb; import net.minecraft.entity.player.Player; import net.minecraft.item.Item; import net.minecraft.item.ItemBlock; import net.minecraft.item.ItemStack; import net.minecraft.stats.StatList; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.AxisAlignedBB; import net.minecraft.util.BlockPos; import net.minecraft.util.EnumFacing; import net.minecraft.util.MovingObjectPosition; import net.minecraft.util.ObjectIntIdentityMap; import net.minecraft.util.RegistryNamespacedDefaultedByKey; import net.minecraft.util.ResourceLocation; import net.minecraft.util.StatCollector; import net.minecraft.util.Vec3; import net.minecraft.world.Explosion; import net.minecraft.world.IBlockAccess; import net.minecraft.world.World; public class Block { /** ResourceLocation for the Air block */ private static final ResourceLocation AIR_ID = new ResourceLocation("air"); public static final RegistryNamespacedDefaultedByKey<ResourceLocation, Block> blockRegistry = new RegistryNamespacedDefaultedByKey(AIR_ID); public static final ObjectIntIdentityMap<IBlockState> BLOCK_STATE_IDS = new ObjectIntIdentityMap(); private CreativeTabs displayOnCreativeTab; public static final Block.SoundType soundTypeStone = new Block.SoundType("stone", 1.0F, 1.0F); /** the wood sound type */ public static final Block.SoundType soundTypeWood = new Block.SoundType("wood", 1.0F, 1.0F); /** the gravel sound type */ public static final Block.SoundType soundTypeGravel = new Block.SoundType("gravel", 1.0F, 1.0F); public static final Block.SoundType soundTypeGrass = new Block.SoundType("grass", 1.0F, 1.0F); public static final Block.SoundType soundTypePiston = new Block.SoundType("stone", 1.0F, 1.0F); public static final Block.SoundType soundTypeMetal = new Block.SoundType("stone", 1.0F, 1.5F); public static final Block.SoundType soundTypeGlass = new Block.SoundType("stone", 1.0F, 1.0F) { public String getBreakSound() { return "dig.glass"; } public String getPlaceSound() { return "step.stone"; } }; public static final Block.SoundType soundTypeCloth = new Block.SoundType("cloth", 1.0F, 1.0F); public static final Block.SoundType soundTypeSand = new Block.SoundType("sand", 1.0F, 1.0F); public static final Block.SoundType soundTypeSnow = new Block.SoundType("snow", 1.0F, 1.0F); public static final Block.SoundType soundTypeLadder = new Block.SoundType("ladder", 1.0F, 1.0F) { public String getBreakSound() { return "dig.wood"; } }; public static final Block.SoundType soundTypeAnvil = new Block.SoundType("anvil", 0.3F, 1.0F) { public String getBreakSound() { return "dig.stone"; } public String getPlaceSound() { return "random.anvil_land"; } }; public static final Block.SoundType SLIME_SOUND = new Block.SoundType("slime", 1.0F, 1.0F) { public String getBreakSound() { return "mob.slime.big"; } public String getPlaceSound() { return "mob.slime.big"; } public String getStepSound() { return "mob.slime.small"; } }; protected boolean fullBlock; /** How much light is subtracted for going through this block */ protected int lightOpacity; protected boolean translucent; /** Amount of light emitted */ protected int lightValue; /** * Flag if block should use the brightest neighbor light value as its own */ protected boolean useNeighborBrightness; /** Indicates how many hits it takes to break a block. */ protected float blockHardness; /** Indicates how much this block can resist explosions */ protected float blockResistance; protected boolean enableStats; /** * Flags whether or not this block is of a type that needs random ticking. Ref-counted by ExtendedBlockStorage in * order to broadly cull a chunk from the random chunk update list for efficiency's sake. */ protected boolean needsRandomTick; /** true if the Block contains a Tile Entity */ protected boolean isBlockContainer; protected double minX; protected double minY; protected double minZ; protected double maxX; protected double maxY; protected double maxZ; /** Sound of stepping on the block */ public Block.SoundType stepSound; public float blockParticleGravity; protected final Material blockMaterial; protected final MapColor field_181083_K; /** * Determines how much velocity is maintained while moving on top of this block */ public float slipperiness; protected final BlockState blockState; private IBlockState defaultBlockState; private String unlocalizedName; public static int getIdFromBlock(Block blockIn) { return blockRegistry.getIDForObject(blockIn); } /** * Get a unique ID for the given BlockState, containing both BlockID and metadata */ public static int getStateId(IBlockState state) { Block block = state.getBlock(); return getIdFromBlock(block) + (block.getMetaFromState(state) << 12); } public static Block getBlockById(int id) { return (Block)blockRegistry.getObjectById(id); } /** * Get a BlockState by it's ID (see getStateId) */ public static IBlockState getStateById(int id) { int i = id & 4095; int j = id >> 12 & 15; return getBlockById(i).getStateFromMeta(j); } public static Block getBlockFromItem(Item itemIn) { return itemIn instanceof ItemBlock ? ((ItemBlock)itemIn).getBlock() : null; } public static Block getBlockFromName(String name) { ResourceLocation resourcelocation = new ResourceLocation(name); if (blockRegistry.containsKey(resourcelocation)) { return (Block)blockRegistry.getObject(resourcelocation); } else { try { return (Block)blockRegistry.getObjectById(Integer.parseInt(name)); } catch (NumberFormatException var3) { return null; } } } public boolean isFullBlock() { return this.fullBlock; } public int getLightOpacity() { return this.lightOpacity; } public int getLightValue() { return this.lightValue; } /** * Should block use the brightest neighbor light value as its own */ public boolean getUseNeighborBrightness() { return this.useNeighborBrightness; } /** * Get a material of block */ public Material getMaterial() { return this.blockMaterial; } /** * Get the MapColor for this Block and the given BlockState */ public MapColor getMapColor(IBlockState state) { return this.field_181083_K; } /** * Convert the given metadata into a BlockState for this Block */ public IBlockState getStateFromMeta(int meta) { return this.getDefaultState(); } /** * Convert the BlockState into the correct metadata value */ public int getMetaFromState(IBlockState state) { if (state != null && !state.getPropertyNames().isEmpty()) { throw new IllegalArgumentException("Don\'t know how to convert " + state + " back into data..."); } else { return 0; } } /** * Get the actual Block state of this Block at the given position. This applies properties not visible in the * metadata, such as fence connections. */ public IBlockState getActualState(IBlockState state, IBlockAccess worldIn, BlockPos pos) { return state; } public Block(Material p_i46399_1_, MapColor p_i46399_2_) { this.enableStats = true; this.stepSound = soundTypeStone; this.blockParticleGravity = 1.0F; this.slipperiness = 0.6F; this.blockMaterial = p_i46399_1_; this.field_181083_K = p_i46399_2_; this.setBlockBounds(0.0F, 0.0F, 0.0F, 1.0F, 1.0F, 1.0F); this.fullBlock = this.isOpaqueCube(); this.lightOpacity = this.isOpaqueCube() ? 255 : 0; this.translucent = !p_i46399_1_.blocksLight(); this.blockState = this.createBlockState(); this.setDefaultState(this.blockState.getBaseState()); } protected Block(Material materialIn) { this(materialIn, materialIn.getMaterialMapColor()); } /** * Sets the footstep sound for the block. Returns the object for convenience in constructing. */ protected Block setStepSound(Block.SoundType sound) { this.stepSound = sound; return this; } /** * Sets how much light is blocked going through this block. Returns the object for convenience in constructing. */ protected Block setLightOpacity(int opacity) { this.lightOpacity = opacity; return this; } /** * Sets the light value that the block emits. Returns resulting block instance for constructing convenience. Args: * level */ protected Block setLightLevel(float value) { this.lightValue = (int)(15.0F * value); return this; } /** * Sets the the blocks resistance to explosions. Returns the object for convenience in constructing. */ protected Block setResistance(float resistance) { this.blockResistance = resistance * 3.0F; return this; } /** * Indicate if a material is a normal solid opaque cube */ public boolean isBlockNormalCube() { return this.blockMaterial.blocksMovement() && this.isFullCube(); } /** * Used for nearly all game logic (non-rendering) purposes. Use Forge-provided isNormalCube(IBlockAccess, BlockPos) * instead. */ public boolean isNormalCube() { return this.blockMaterial.isOpaque() && this.isFullCube() && !this.canProvidePower(); } public boolean isVisuallyOpaque() { return this.blockMaterial.blocksMovement() && this.isFullCube(); } public boolean isFullCube() { return true; } public boolean isPassable(IBlockAccess worldIn, BlockPos pos) { return !this.blockMaterial.blocksMovement(); } /** * The type of render function called. 3 for standard block models, 2 for TESR's, 1 for liquids, -1 is no render */ public int getRenderType() { return 3; } /** * Whether this Block can be replaced directly by other blocks (true for e.g. tall grass) */ public boolean isReplaceable(World worldIn, BlockPos pos) { return false; } /** * Sets how many hits it takes to break a block. */ protected Block setHardness(float hardness) { this.blockHardness = hardness; if (this.blockResistance < hardness * 5.0F) { this.blockResistance = hardness * 5.0F; } return this; } protected Block setBlockUnbreakable() { this.setHardness(-1.0F); return this; } public float getBlockHardness(World worldIn, BlockPos pos) { return this.blockHardness; } /** * Sets whether this block type will receive random update ticks */ protected Block setTickRandomly(boolean shouldTick) { this.needsRandomTick = shouldTick; return this; } /** * Returns whether or not this block is of a type that needs random ticking. Called for ref-counting purposes by * ExtendedBlockStorage in order to broadly cull a chunk from the random chunk update list for efficiency's sake. */ public boolean getTickRandomly() { return this.needsRandomTick; } public boolean hasTileEntity() { return this.isBlockContainer; } protected final void setBlockBounds(float minX, float minY, float minZ, float maxX, float maxY, float maxZ) { this.minX = (double)minX; this.minY = (double)minY; this.minZ = (double)minZ; this.maxX = (double)maxX; this.maxY = (double)maxY; this.maxZ = (double)maxZ; } /** * Whether this Block is solid on the given Side */ public boolean isBlockSolid(IBlockAccess worldIn, BlockPos pos, EnumFacing side) { return worldIn.getBlockState(pos).getBlock().getMaterial().isSolid(); } /** * Add all collision boxes of this Block to the list that intersect with the given mask. */ public void addCollisionBoxesToList(World worldIn, BlockPos pos, IBlockState state, AxisAlignedBB mask, List<AxisAlignedBB> list, Entity collidingEntity) { AxisAlignedBB axisalignedbb = this.getCollisionBoundingBox(worldIn, pos, state); if (axisalignedbb != null && mask.intersectsWith(axisalignedbb)) { list.add(axisalignedbb); } } public AxisAlignedBB getCollisionBoundingBox(World worldIn, BlockPos pos, IBlockState state) { return new AxisAlignedBB((double)pos.getX() + this.minX, (double)pos.getY() + this.minY, (double)pos.getZ() + this.minZ, (double)pos.getX() + this.maxX, (double)pos.getY() + this.maxY, (double)pos.getZ() + this.maxZ); } /** * Used to determine ambient occlusion and culling when rebuilding chunks for render */ public boolean isOpaqueCube() { return true; } public boolean canCollideCheck(IBlockState state, boolean hitIfLiquid) { return this.isCollidable(); } /** * Returns if this block is collidable (only used by Fire). Args: x, y, z */ public boolean isCollidable() { return true; } /** * Called randomly when setTickRandomly is set to true (used by e.g. crops to grow, etc.) */ public void randomTick(World worldIn, BlockPos pos, IBlockState state, Random random) { this.updateTick(worldIn, pos, state, random); } public void updateTick(World worldIn, BlockPos pos, IBlockState state, Random rand) { } /** * Called when a player destroys this Block */ public void onBlockDestroyedByPlayer(World worldIn, BlockPos pos, IBlockState state) { } /** * Called when a neighboring block changes. */ public void onNeighborBlockChange(World worldIn, BlockPos pos, IBlockState state, Block neighborBlock) { } /** * How many world ticks before ticking */ public int tickRate(World worldIn) { return 10; } public void onBlockAdded(World worldIn, BlockPos pos, IBlockState state) { } public void breakBlock(World worldIn, BlockPos pos, IBlockState state) { } /** * Returns the quantity of items to drop on block destruction. */ public int quantityDropped(Random random) { return 1; } /** * Get the Item that this Block should drop when harvested. */ public Item getItemDropped(IBlockState state, Random rand, int fortune) { return Item.getItemFromBlock(this); } /** * Get the hardness of this Block relative to the ability of the given player */ public float getPlayerRelativeBlockHardness(Player playerIn, World worldIn, BlockPos pos) { float f = this.getBlockHardness(worldIn, pos); return f < 0.0F ? 0.0F : (!playerIn.canHarvestBlock(this) ? playerIn.getToolDigEfficiency(this) / f / 100.0F : playerIn.getToolDigEfficiency(this) / f / 30.0F); } /** * Spawn this Block's drops into the World as EntityItems */ public final void dropBlockAsItem(World worldIn, BlockPos pos, IBlockState state, int forture) { this.dropBlockAsItemWithChance(worldIn, pos, state, 1.0F, forture); } /** * Spawns this Block's drops into the World as EntityItems. */ public void dropBlockAsItemWithChance(World worldIn, BlockPos pos, IBlockState state, float chance, int fortune) { if (!worldIn.isRemote) { int i = this.quantityDroppedWithBonus(fortune, worldIn.rand); for (int j = 0; j < i; ++j) { if (worldIn.rand.nextFloat() <= chance) { Item item = this.getItemDropped(state, worldIn.rand, fortune); if (item != null) { spawnAsEntity(worldIn, pos, new ItemStack(item, 1, this.damageDropped(state))); } } } } } /** * Spawns the given ItemStack as an EntityItem into the World at the given position */ public static void spawnAsEntity(World worldIn, BlockPos pos, ItemStack stack) { if (!worldIn.isRemote && worldIn.getGameRules().getBoolean("doTileDrops")) { float f = 0.5F; double d0 = (double)(worldIn.rand.nextFloat() * f) + (double)(1.0F - f) * 0.5D; double d1 = (double)(worldIn.rand.nextFloat() * f) + (double)(1.0F - f) * 0.5D; double d2 = (double)(worldIn.rand.nextFloat() * f) + (double)(1.0F - f) * 0.5D; EntityItem entityitem = new EntityItem(worldIn, (double)pos.getX() + d0, (double)pos.getY() + d1, (double)pos.getZ() + d2, stack); entityitem.setDefaultPickupDelay(); worldIn.spawnEntityInWorld(entityitem); } } /** * Spawns the given amount of experience into the World as XP orb entities */ protected void dropXpOnBlockBreak(World worldIn, BlockPos pos, int amount) { if (!worldIn.isRemote) { while (amount > 0) { int i = EntityXPOrb.getXPSplit(amount); amount -= i; worldIn.spawnEntityInWorld(new EntityXPOrb(worldIn, (double)pos.getX() + 0.5D, (double)pos.getY() + 0.5D, (double)pos.getZ() + 0.5D, i)); } } } /** * Gets the metadata of the item this Block can drop. This method is called when the block gets destroyed. It * returns the metadata of the dropped item based on the old metadata of the block. */ public int damageDropped(IBlockState state) { return 0; } /** * Returns how much this block can resist explosions from the passed in entity. */ public float getExplosionResistance(Entity exploder) { return this.blockResistance / 5.0F; } /** * Ray traces through the blocks collision from start vector to end vector returning a ray trace hit. */ public MovingObjectPosition collisionRayTrace(World worldIn, BlockPos pos, Vec3 start, Vec3 end) { this.setBlockBoundsBasedOnState(worldIn, pos); start = start.addVector((double)(-pos.getX()), (double)(-pos.getY()), (double)(-pos.getZ())); end = end.addVector((double)(-pos.getX()), (double)(-pos.getY()), (double)(-pos.getZ())); Vec3 vec3 = start.getIntermediateWithXValue(end, this.minX); Vec3 vec31 = start.getIntermediateWithXValue(end, this.maxX); Vec3 vec32 = start.getIntermediateWithYValue(end, this.minY); Vec3 vec33 = start.getIntermediateWithYValue(end, this.maxY); Vec3 vec34 = start.getIntermediateWithZValue(end, this.minZ); Vec3 vec35 = start.getIntermediateWithZValue(end, this.maxZ); if (!this.isVecInsideYZBounds(vec3)) { vec3 = null; } if (!this.isVecInsideYZBounds(vec31)) { vec31 = null; } if (!this.isVecInsideXZBounds(vec32)) { vec32 = null; } if (!this.isVecInsideXZBounds(vec33)) { vec33 = null; } if (!this.isVecInsideXYBounds(vec34)) { vec34 = null; } if (!this.isVecInsideXYBounds(vec35)) { vec35 = null; } Vec3 vec36 = null; if (vec3 != null && (vec36 == null || start.squareDistanceTo(vec3) < start.squareDistanceTo(vec36))) { vec36 = vec3; } if (vec31 != null && (vec36 == null || start.squareDistanceTo(vec31) < start.squareDistanceTo(vec36))) { vec36 = vec31; } if (vec32 != null && (vec36 == null || start.squareDistanceTo(vec32) < start.squareDistanceTo(vec36))) { vec36 = vec32; } if (vec33 != null && (vec36 == null || start.squareDistanceTo(vec33) < start.squareDistanceTo(vec36))) { vec36 = vec33; } if (vec34 != null && (vec36 == null || start.squareDistanceTo(vec34) < start.squareDistanceTo(vec36))) { vec36 = vec34; } if (vec35 != null && (vec36 == null || start.squareDistanceTo(vec35) < start.squareDistanceTo(vec36))) { vec36 = vec35; } if (vec36 == null) { return null; } else { EnumFacing enumfacing = null; if (vec36 == vec3) { enumfacing = EnumFacing.WEST; } if (vec36 == vec31) { enumfacing = EnumFacing.EAST; } if (vec36 == vec32) { enumfacing = EnumFacing.DOWN; } if (vec36 == vec33) { enumfacing = EnumFacing.UP; } if (vec36 == vec34) { enumfacing = EnumFacing.NORTH; } if (vec36 == vec35) { enumfacing = EnumFacing.SOUTH; } return new MovingObjectPosition(vec36.addVector((double)pos.getX(), (double)pos.getY(), (double)pos.getZ()), enumfacing, pos); } } /** * Checks if a vector is within the Y and Z bounds of the block. */ private boolean isVecInsideYZBounds(Vec3 point) { return point == null ? false : point.yCoord >= this.minY && point.yCoord <= this.maxY && point.zCoord >= this.minZ && point.zCoord <= this.maxZ; } /** * Checks if a vector is within the X and Z bounds of the block. */ private boolean isVecInsideXZBounds(Vec3 point) { return point == null ? false : point.xCoord >= this.minX && point.xCoord <= this.maxX && point.zCoord >= this.minZ && point.zCoord <= this.maxZ; } /** * Checks if a vector is within the X and Y bounds of the block. */ private boolean isVecInsideXYBounds(Vec3 point) { return point == null ? false : point.xCoord >= this.minX && point.xCoord <= this.maxX && point.yCoord >= this.minY && point.yCoord <= this.maxY; } /** * Called when this Block is destroyed by an Explosion */ public void onBlockDestroyedByExplosion(World worldIn, BlockPos pos, Explosion explosionIn) { } public boolean canReplace(World worldIn, BlockPos pos, EnumFacing side, ItemStack stack) { return this.canPlaceBlockOnSide(worldIn, pos, side); } /** * Check whether this Block can be placed on the given side */ public boolean canPlaceBlockOnSide(World worldIn, BlockPos pos, EnumFacing side) { return this.canPlaceBlockAt(worldIn, pos); } public boolean canPlaceBlockAt(World worldIn, BlockPos pos) { return worldIn.getBlockState(pos).getBlock().blockMaterial.isReplaceable(); } public boolean onBlockActivated(World worldIn, BlockPos pos, IBlockState state, Player playerIn, EnumFacing side, float hitX, float hitY, float hitZ) { return false; } /** * Triggered whenever an entity collides with this block (enters into the block) */ public void onEntityCollidedWithBlock(World worldIn, BlockPos pos, Entity entityIn) { } /** * Called by ItemBlocks just before a block is actually set in the world, to allow for adjustments to the * IBlockstate */ public IBlockState onBlockPlaced(World worldIn, BlockPos pos, EnumFacing facing, float hitX, float hitY, float hitZ, int meta, EntityLivingBase placer) { return this.getStateFromMeta(meta); } public void onBlockClicked(World worldIn, BlockPos pos, Player playerIn) { } public Vec3 modifyAcceleration(World worldIn, BlockPos pos, Entity entityIn, Vec3 motion) { return motion; } public void setBlockBoundsBasedOnState(IBlockAccess worldIn, BlockPos pos) { } /** * returns the block bounderies minX value */ public final double getBlockBoundsMinX() { return this.minX; } /** * returns the block bounderies maxX value */ public final double getBlockBoundsMaxX() { return this.maxX; } /** * returns the block bounderies minY value */ public final double getBlockBoundsMinY() { return this.minY; } /** * returns the block bounderies maxY value */ public final double getBlockBoundsMaxY() { return this.maxY; } /** * returns the block bounderies minZ value */ public final double getBlockBoundsMinZ() { return this.minZ; } /** * returns the block bounderies maxZ value */ public final double getBlockBoundsMaxZ() { return this.maxZ; } public int getWeakPower(IBlockAccess worldIn, BlockPos pos, IBlockState state, EnumFacing side) { return 0; } /** * Can this block provide power. Only wire currently seems to have this change based on its state. */ public boolean canProvidePower() { return false; } /** * Called When an Entity Collided with the Block */ public void onEntityCollidedWithBlock(World worldIn, BlockPos pos, IBlockState state, Entity entityIn) { } public int getStrongPower(IBlockAccess worldIn, BlockPos pos, IBlockState state, EnumFacing side) { return 0; } /** * Sets the block's bounds for rendering it as an item */ public void setBlockBoundsForItemRender() { } public void harvestBlock(World worldIn, Player player, BlockPos pos, IBlockState state, TileEntity te) { player.triggerAchievement(StatList.mineBlockStatArray[getIdFromBlock(this)]); player.addExhaustion(0.025F); if (this.canSilkHarvest() && EnchantmentHelper.getSilkTouchModifier(player)) { ItemStack itemstack = this.createStackedBlock(state); if (itemstack != null) { spawnAsEntity(worldIn, pos, itemstack); } } else { int i = EnchantmentHelper.getFortuneModifier(player); this.dropBlockAsItem(worldIn, pos, state, i); } } protected boolean canSilkHarvest() { return this.isFullCube() && !this.isBlockContainer; } protected ItemStack createStackedBlock(IBlockState state) { int i = 0; Item item = Item.getItemFromBlock(this); if (item != null && item.getHasSubtypes()) { i = this.getMetaFromState(state); } return new ItemStack(item, 1, i); } /** * Get the quantity dropped based on the given fortune level */ public int quantityDroppedWithBonus(int fortune, Random random) { return this.quantityDropped(random); } /** * Called by ItemBlocks after a block is set in the world, to allow post-place logic */ public void onBlockPlacedBy(World worldIn, BlockPos pos, IBlockState state, EntityLivingBase placer, ItemStack stack) { } public boolean func_181623_g() { return !this.blockMaterial.isSolid() && !this.blockMaterial.isLiquid(); } public Block setUnlocalizedName(String name) { this.unlocalizedName = name; return this; } /** * Gets the localized name of this block. Used for the statistics page. */ public String getLocalizedName() { return StatCollector.translateToLocal(this.getUnlocalizedName() + ".name"); } /** * Returns the unlocalized name of the block with "tile." appended to the front. */ public String getUnlocalizedName() { return "tile." + this.unlocalizedName; } /** * Called on both Client and Server when World#addBlockEvent is called */ public boolean onBlockEventReceived(World worldIn, BlockPos pos, IBlockState state, int eventID, int eventParam) { return false; } /** * Return the state of blocks statistics flags - if the block is counted for mined and placed. */ public boolean getEnableStats() { return this.enableStats; } protected Block disableStats() { this.enableStats = false; return this; } public int getMobilityFlag() { return this.blockMaterial.getMaterialMobility(); } /** * Block's chance to react to a living entity falling on it. */ public void onFallenUpon(World worldIn, BlockPos pos, Entity entityIn, float fallDistance) { entityIn.fall(fallDistance, 1.0F); } /** * Called when an Entity lands on this Block. This method *must* update motionY because the entity will not do that * on its own */ public void onLanded(World worldIn, Entity entityIn) { entityIn.motionY = 0.0D; } public int getDamageValue(World worldIn, BlockPos pos) { return this.damageDropped(worldIn.getBlockState(pos)); } public Block setCreativeTab(CreativeTabs tab) { this.displayOnCreativeTab = tab; return this; } public void onBlockHarvested(World worldIn, BlockPos pos, IBlockState state, Player player) { } /** * Called similar to random ticks, but only when it is raining. */ public void fillWithRain(World worldIn, BlockPos pos) { } public boolean requiresUpdates() { return true; } /** * Return whether this block can drop from an explosion. */ public boolean canDropFromExplosion(Explosion explosionIn) { return true; } public boolean isAssociatedBlock(Block other) { return this == other; } public static boolean isEqualTo(Block blockIn, Block other) { return blockIn != null && other != null ? (blockIn == other ? true : blockIn.isAssociatedBlock(other)) : false; } public boolean hasComparatorInputOverride() { return false; } public int getComparatorInputOverride(World worldIn, BlockPos pos) { return 0; } protected BlockState createBlockState() { return new BlockState(this, new IProperty[0]); } public BlockState getBlockState() { return this.blockState; } protected final void setDefaultState(IBlockState state) { this.defaultBlockState = state; } public final IBlockState getDefaultState() { return this.defaultBlockState; } public String toString() { return "Block{" + blockRegistry.getNameForObject(this) + "}"; } public static void registerBlocks() { registerBlock(0, AIR_ID, (new BlockAir()).setUnlocalizedName("air")); registerBlock(1, "stone", (new BlockStone()).setHardness(1.5F).setResistance(10.0F).setStepSound(soundTypePiston).setUnlocalizedName("stone")); registerBlock(2, "grass", (new BlockGrass()).setHardness(0.6F).setStepSound(soundTypeGrass).setUnlocalizedName("grass")); registerBlock(3, "dirt", (new BlockDirt()).setHardness(0.5F).setStepSound(soundTypeGravel).setUnlocalizedName("dirt")); Block block = (new Block(Material.rock)).setHardness(2.0F).setResistance(10.0F).setStepSound(soundTypePiston).setUnlocalizedName("stonebrick").setCreativeTab(CreativeTabs.tabBlock); registerBlock(4, "cobblestone", block); Block block1 = (new BlockPlanks()).setHardness(2.0F).setResistance(5.0F).setStepSound(soundTypeWood).setUnlocalizedName("wood"); registerBlock(5, "planks", block1); registerBlock(6, "sapling", (new BlockSapling()).setHardness(0.0F).setStepSound(soundTypeGrass).setUnlocalizedName("sapling")); registerBlock(7, "bedrock", (new Block(Material.rock)).setBlockUnbreakable().setResistance(6000000.0F).setStepSound(soundTypePiston).setUnlocalizedName("bedrock").disableStats().setCreativeTab(CreativeTabs.tabBlock)); registerBlock(8, "flowing_water", (new BlockDynamicLiquid(Material.water)).setHardness(100.0F).setLightOpacity(3).setUnlocalizedName("water").disableStats()); registerBlock(9, "water", (new BlockStaticLiquid(Material.water)).setHardness(100.0F).setLightOpacity(3).setUnlocalizedName("water").disableStats()); registerBlock(10, "flowing_lava", (new BlockDynamicLiquid(Material.lava)).setHardness(100.0F).setLightLevel(1.0F).setUnlocalizedName("lava").disableStats()); registerBlock(11, "lava", (new BlockStaticLiquid(Material.lava)).setHardness(100.0F).setLightLevel(1.0F).setUnlocalizedName("lava").disableStats()); registerBlock(12, "sand", (new BlockSand()).setHardness(0.5F).setStepSound(soundTypeSand).setUnlocalizedName("sand")); registerBlock(13, "gravel", (new BlockGravel()).setHardness(0.6F).setStepSound(soundTypeGravel).setUnlocalizedName("gravel")); registerBlock(14, "gold_ore", (new BlockOre()).setHardness(3.0F).setResistance(5.0F).setStepSound(soundTypePiston).setUnlocalizedName("oreGold")); registerBlock(15, "iron_ore", (new BlockOre()).setHardness(3.0F).setResistance(5.0F).setStepSound(soundTypePiston).setUnlocalizedName("oreIron")); registerBlock(16, "coal_ore", (new BlockOre()).setHardness(3.0F).setResistance(5.0F).setStepSound(soundTypePiston).setUnlocalizedName("oreCoal")); registerBlock(17, "log", (new BlockOldLog()).setUnlocalizedName("log")); registerBlock(18, "leaves", (new BlockOldLeaf()).setUnlocalizedName("leaves")); registerBlock(19, "sponge", (new BlockSponge()).setHardness(0.6F).setStepSound(soundTypeGrass).setUnlocalizedName("sponge")); registerBlock(20, "glass", (new BlockGlass(Material.glass, false)).setHardness(0.3F).setStepSound(soundTypeGlass).setUnlocalizedName("glass")); registerBlock(21, "lapis_ore", (new BlockOre()).setHardness(3.0F).setResistance(5.0F).setStepSound(soundTypePiston).setUnlocalizedName("oreLapis")); registerBlock(22, "lapis_block", (new Block(Material.iron, MapColor.lapisColor)).setHardness(3.0F).setResistance(5.0F).setStepSound(soundTypePiston).setUnlocalizedName("blockLapis").setCreativeTab(CreativeTabs.tabBlock)); registerBlock(23, "dispenser", (new BlockDispenser()).setHardness(3.5F).setStepSound(soundTypePiston).setUnlocalizedName("dispenser")); Block block2 = (new BlockSandStone()).setStepSound(soundTypePiston).setHardness(0.8F).setUnlocalizedName("sandStone"); registerBlock(24, "sandstone", block2); registerBlock(25, "noteblock", (new BlockNote()).setHardness(0.8F).setUnlocalizedName("musicBlock")); registerBlock(26, "bed", (new BlockBed()).setStepSound(soundTypeWood).setHardness(0.2F).setUnlocalizedName("bed").disableStats()); registerBlock(27, "golden_rail", (new BlockRailPowered()).setHardness(0.7F).setStepSound(soundTypeMetal).setUnlocalizedName("goldenRail")); registerBlock(28, "detector_rail", (new BlockRailDetector()).setHardness(0.7F).setStepSound(soundTypeMetal).setUnlocalizedName("detectorRail")); registerBlock(29, "sticky_piston", (new BlockPistonBase(true)).setUnlocalizedName("pistonStickyBase")); registerBlock(30, "web", (new BlockWeb()).setLightOpacity(1).setHardness(4.0F).setUnlocalizedName("web")); registerBlock(31, "tallgrass", (new BlockTallGrass()).setHardness(0.0F).setStepSound(soundTypeGrass).setUnlocalizedName("tallgrass")); registerBlock(32, "deadbush", (new BlockDeadBush()).setHardness(0.0F).setStepSound(soundTypeGrass).setUnlocalizedName("deadbush")); registerBlock(33, "piston", (new BlockPistonBase(false)).setUnlocalizedName("pistonBase")); registerBlock(34, "piston_head", (new BlockPistonExtension()).setUnlocalizedName("pistonBase")); registerBlock(35, "wool", (new BlockColored(Material.cloth)).setHardness(0.8F).setStepSound(soundTypeCloth).setUnlocalizedName("cloth")); registerBlock(36, "piston_extension", new BlockPistonMoving()); registerBlock(37, "yellow_flower", (new BlockYellowFlower()).setHardness(0.0F).setStepSound(soundTypeGrass).setUnlocalizedName("flower1")); registerBlock(38, "red_flower", (new BlockRedFlower()).setHardness(0.0F).setStepSound(soundTypeGrass).setUnlocalizedName("flower2")); Block block3 = (new BlockMushroom()).setHardness(0.0F).setStepSound(soundTypeGrass).setLightLevel(0.125F).setUnlocalizedName("mushroom"); registerBlock(39, "brown_mushroom", block3); Block block4 = (new BlockMushroom()).setHardness(0.0F).setStepSound(soundTypeGrass).setUnlocalizedName("mushroom"); registerBlock(40, "red_mushroom", block4); registerBlock(41, "gold_block", (new Block(Material.iron, MapColor.goldColor)).setHardness(3.0F).setResistance(10.0F).setStepSound(soundTypeMetal).setUnlocalizedName("blockGold").setCreativeTab(CreativeTabs.tabBlock)); registerBlock(42, "iron_block", (new Block(Material.iron, MapColor.ironColor)).setHardness(5.0F).setResistance(10.0F).setStepSound(soundTypeMetal).setUnlocalizedName("blockIron").setCreativeTab(CreativeTabs.tabBlock)); registerBlock(43, "double_stone_slab", (new BlockDoubleStoneSlab()).setHardness(2.0F).setResistance(10.0F).setStepSound(soundTypePiston).setUnlocalizedName("stoneSlab")); registerBlock(44, "stone_slab", (new BlockHalfStoneSlab()).setHardness(2.0F).setResistance(10.0F).setStepSound(soundTypePiston).setUnlocalizedName("stoneSlab")); Block block5 = (new Block(Material.rock, MapColor.redColor)).setHardness(2.0F).setResistance(10.0F).setStepSound(soundTypePiston).setUnlocalizedName("brick").setCreativeTab(CreativeTabs.tabBlock); registerBlock(45, "brick_block", block5); registerBlock(46, "tnt", (new BlockTNT()).setHardness(0.0F).setStepSound(soundTypeGrass).setUnlocalizedName("tnt")); registerBlock(47, "bookshelf", (new BlockBookshelf()).setHardness(1.5F).setStepSound(soundTypeWood).setUnlocalizedName("bookshelf")); registerBlock(48, "mossy_cobblestone", (new Block(Material.rock)).setHardness(2.0F).setResistance(10.0F).setStepSound(soundTypePiston).setUnlocalizedName("stoneMoss").setCreativeTab(CreativeTabs.tabBlock)); registerBlock(49, "obsidian", (new BlockObsidian()).setHardness(50.0F).setResistance(2000.0F).setStepSound(soundTypePiston).setUnlocalizedName("obsidian")); registerBlock(50, "torch", (new BlockTorch()).setHardness(0.0F).setLightLevel(0.9375F).setStepSound(soundTypeWood).setUnlocalizedName("torch")); registerBlock(51, "fire", (new BlockFire()).setHardness(0.0F).setLightLevel(1.0F).setStepSound(soundTypeCloth).setUnlocalizedName("fire").disableStats()); registerBlock(52, "mob_spawner", (new BlockMobSpawner()).setHardness(5.0F).setStepSound(soundTypeMetal).setUnlocalizedName("mobSpawner").disableStats()); registerBlock(53, "oak_stairs", (new BlockStairs(block1.getDefaultState().withProperty(BlockPlanks.VARIANT, BlockPlanks.EnumType.OAK))).setUnlocalizedName("stairsWood")); registerBlock(54, "chest", (new BlockChest(0)).setHardness(2.5F).setStepSound(soundTypeWood).setUnlocalizedName("chest")); registerBlock(55, "redstone_wire", (new BlockRedstoneWire()).setHardness(0.0F).setStepSound(soundTypeStone).setUnlocalizedName("redstoneDust").disableStats()); registerBlock(56, "diamond_ore", (new BlockOre()).setHardness(3.0F).setResistance(5.0F).setStepSound(soundTypePiston).setUnlocalizedName("oreDiamond")); registerBlock(57, "diamond_block", (new Block(Material.iron, MapColor.diamondColor)).setHardness(5.0F).setResistance(10.0F).setStepSound(soundTypeMetal).setUnlocalizedName("blockDiamond").setCreativeTab(CreativeTabs.tabBlock)); registerBlock(58, "crafting_table", (new BlockWorkbench()).setHardness(2.5F).setStepSound(soundTypeWood).setUnlocalizedName("workbench")); registerBlock(59, "wheat", (new BlockCrops()).setUnlocalizedName("crops")); Block block6 = (new BlockFarmland()).setHardness(0.6F).setStepSound(soundTypeGravel).setUnlocalizedName("farmland"); registerBlock(60, "farmland", block6); registerBlock(61, "furnace", (new BlockFurnace(false)).setHardness(3.5F).setStepSound(soundTypePiston).setUnlocalizedName("furnace").setCreativeTab(CreativeTabs.tabDecorations)); registerBlock(62, "lit_furnace", (new BlockFurnace(true)).setHardness(3.5F).setStepSound(soundTypePiston).setLightLevel(0.875F).setUnlocalizedName("furnace")); registerBlock(63, "standing_sign", (new BlockStandingSign()).setHardness(1.0F).setStepSound(soundTypeWood).setUnlocalizedName("sign").disableStats()); registerBlock(64, "wooden_door", (new BlockDoor(Material.wood)).setHardness(3.0F).setStepSound(soundTypeWood).setUnlocalizedName("doorOak").disableStats()); registerBlock(65, "ladder", (new BlockLadder()).setHardness(0.4F).setStepSound(soundTypeLadder).setUnlocalizedName("ladder")); registerBlock(66, "rail", (new BlockRail()).setHardness(0.7F).setStepSound(soundTypeMetal).setUnlocalizedName("rail")); registerBlock(67, "stone_stairs", (new BlockStairs(block.getDefaultState())).setUnlocalizedName("stairsStone")); registerBlock(68, "wall_sign", (new BlockWallSign()).setHardness(1.0F).setStepSound(soundTypeWood).setUnlocalizedName("sign").disableStats()); registerBlock(69, "lever", (new BlockLever()).setHardness(0.5F).setStepSound(soundTypeWood).setUnlocalizedName("lever")); registerBlock(70, "stone_pressure_plate", (new BlockPressurePlate(Material.rock, BlockPressurePlate.Sensitivity.MOBS)).setHardness(0.5F).setStepSound(soundTypePiston).setUnlocalizedName("pressurePlateStone")); registerBlock(71, "iron_door", (new BlockDoor(Material.iron)).setHardness(5.0F).setStepSound(soundTypeMetal).setUnlocalizedName("doorIron").disableStats()); registerBlock(72, "wooden_pressure_plate", (new BlockPressurePlate(Material.wood, BlockPressurePlate.Sensitivity.EVERYTHING)).setHardness(0.5F).setStepSound(soundTypeWood).setUnlocalizedName("pressurePlateWood")); registerBlock(73, "redstone_ore", (new BlockRedstoneOre(false)).setHardness(3.0F).setResistance(5.0F).setStepSound(soundTypePiston).setUnlocalizedName("oreRedstone").setCreativeTab(CreativeTabs.tabBlock)); registerBlock(74, "lit_redstone_ore", (new BlockRedstoneOre(true)).setLightLevel(0.625F).setHardness(3.0F).setResistance(5.0F).setStepSound(soundTypePiston).setUnlocalizedName("oreRedstone")); registerBlock(75, "unlit_redstone_torch", (new BlockRedstoneTorch(false)).setHardness(0.0F).setStepSound(soundTypeWood).setUnlocalizedName("notGate")); registerBlock(76, "redstone_torch", (new BlockRedstoneTorch(true)).setHardness(0.0F).setLightLevel(0.5F).setStepSound(soundTypeWood).setUnlocalizedName("notGate").setCreativeTab(CreativeTabs.tabRedstone)); registerBlock(77, "stone_button", (new BlockButtonStone()).setHardness(0.5F).setStepSound(soundTypePiston).setUnlocalizedName("button")); registerBlock(78, "snow_layer", (new BlockSnow()).setHardness(0.1F).setStepSound(soundTypeSnow).setUnlocalizedName("snow").setLightOpacity(0)); registerBlock(79, "ice", (new BlockIce()).setHardness(0.5F).setLightOpacity(3).setStepSound(soundTypeGlass).setUnlocalizedName("ice")); registerBlock(80, "snow", (new BlockSnowBlock()).setHardness(0.2F).setStepSound(soundTypeSnow).setUnlocalizedName("snow")); registerBlock(81, "cactus", (new BlockCactus()).setHardness(0.4F).setStepSound(soundTypeCloth).setUnlocalizedName("cactus")); registerBlock(82, "clay", (new BlockClay()).setHardness(0.6F).setStepSound(soundTypeGravel).setUnlocalizedName("clay")); registerBlock(83, "reeds", (new BlockReed()).setHardness(0.0F).setStepSound(soundTypeGrass).setUnlocalizedName("reeds").disableStats()); registerBlock(84, "jukebox", (new BlockJukebox()).setHardness(2.0F).setResistance(10.0F).setStepSound(soundTypePiston).setUnlocalizedName("jukebox")); registerBlock(85, "fence", (new BlockFence(Material.wood, BlockPlanks.EnumType.OAK.func_181070_c())).setHardness(2.0F).setResistance(5.0F).setStepSound(soundTypeWood).setUnlocalizedName("fence")); Block block7 = (new BlockPumpkin()).setHardness(1.0F).setStepSound(soundTypeWood).setUnlocalizedName("pumpkin"); registerBlock(86, "pumpkin", block7); registerBlock(87, "netherrack", (new BlockNetherrack()).setHardness(0.4F).setStepSound(soundTypePiston).setUnlocalizedName("hellrock")); registerBlock(88, "soul_sand", (new BlockSoulSand()).setHardness(0.5F).setStepSound(soundTypeSand).setUnlocalizedName("hellsand")); registerBlock(89, "glowstone", (new BlockGlowstone(Material.glass)).setHardness(0.3F).setStepSound(soundTypeGlass).setLightLevel(1.0F).setUnlocalizedName("lightgem")); registerBlock(90, "portal", (new BlockPortal()).setHardness(-1.0F).setStepSound(soundTypeGlass).setLightLevel(0.75F).setUnlocalizedName("portal")); registerBlock(91, "lit_pumpkin", (new BlockPumpkin()).setHardness(1.0F).setStepSound(soundTypeWood).setLightLevel(1.0F).setUnlocalizedName("litpumpkin")); registerBlock(92, "cake", (new BlockCake()).setHardness(0.5F).setStepSound(soundTypeCloth).setUnlocalizedName("cake").disableStats()); registerBlock(93, "unpowered_repeater", (new BlockRedstoneRepeater(false)).setHardness(0.0F).setStepSound(soundTypeWood).setUnlocalizedName("diode").disableStats()); registerBlock(94, "powered_repeater", (new BlockRedstoneRepeater(true)).setHardness(0.0F).setStepSound(soundTypeWood).setUnlocalizedName("diode").disableStats()); registerBlock(95, "stained_glass", (new BlockStainedGlass(Material.glass)).setHardness(0.3F).setStepSound(soundTypeGlass).setUnlocalizedName("stainedGlass")); registerBlock(96, "trapdoor", (new BlockTrapDoor(Material.wood)).setHardness(3.0F).setStepSound(soundTypeWood).setUnlocalizedName("trapdoor").disableStats()); registerBlock(97, "monster_egg", (new BlockSilverfish()).setHardness(0.75F).setUnlocalizedName("monsterStoneEgg")); Block block8 = (new BlockStoneBrick()).setHardness(1.5F).setResistance(10.0F).setStepSound(soundTypePiston).setUnlocalizedName("stonebricksmooth"); registerBlock(98, "stonebrick", block8); registerBlock(99, "brown_mushroom_block", (new BlockHugeMushroom(Material.wood, MapColor.dirtColor, block3)).setHardness(0.2F).setStepSound(soundTypeWood).setUnlocalizedName("mushroom")); registerBlock(100, "red_mushroom_block", (new BlockHugeMushroom(Material.wood, MapColor.redColor, block4)).setHardness(0.2F).setStepSound(soundTypeWood).setUnlocalizedName("mushroom")); registerBlock(101, "iron_bars", (new BlockPane(Material.iron, true)).setHardness(5.0F).setResistance(10.0F).setStepSound(soundTypeMetal).setUnlocalizedName("fenceIron")); registerBlock(102, "glass_pane", (new BlockPane(Material.glass, false)).setHardness(0.3F).setStepSound(soundTypeGlass).setUnlocalizedName("thinGlass")); Block block9 = (new BlockMelon()).setHardness(1.0F).setStepSound(soundTypeWood).setUnlocalizedName("melon"); registerBlock(103, "melon_block", block9); registerBlock(104, "pumpkin_stem", (new BlockStem(block7)).setHardness(0.0F).setStepSound(soundTypeWood).setUnlocalizedName("pumpkinStem")); registerBlock(105, "melon_stem", (new BlockStem(block9)).setHardness(0.0F).setStepSound(soundTypeWood).setUnlocalizedName("pumpkinStem")); registerBlock(106, "vine", (new BlockVine()).setHardness(0.2F).setStepSound(soundTypeGrass).setUnlocalizedName("vine")); registerBlock(107, "fence_gate", (new BlockFenceGate(BlockPlanks.EnumType.OAK)).setHardness(2.0F).setResistance(5.0F).setStepSound(soundTypeWood).setUnlocalizedName("fenceGate")); registerBlock(108, "brick_stairs", (new BlockStairs(block5.getDefaultState())).setUnlocalizedName("stairsBrick")); registerBlock(109, "stone_brick_stairs", (new BlockStairs(block8.getDefaultState().withProperty(BlockStoneBrick.VARIANT, BlockStoneBrick.EnumType.DEFAULT))).setUnlocalizedName("stairsStoneBrickSmooth")); registerBlock(110, "mycelium", (new BlockMycelium()).setHardness(0.6F).setStepSound(soundTypeGrass).setUnlocalizedName("mycel")); registerBlock(111, "waterlily", (new BlockLilyPad()).setHardness(0.0F).setStepSound(soundTypeGrass).setUnlocalizedName("waterlily")); Block block10 = (new BlockNetherBrick()).setHardness(2.0F).setResistance(10.0F).setStepSound(soundTypePiston).setUnlocalizedName("netherBrick").setCreativeTab(CreativeTabs.tabBlock); registerBlock(112, "nether_brick", block10); registerBlock(113, "nether_brick_fence", (new BlockFence(Material.rock, MapColor.netherrackColor)).setHardness(2.0F).setResistance(10.0F).setStepSound(soundTypePiston).setUnlocalizedName("netherFence")); registerBlock(114, "nether_brick_stairs", (new BlockStairs(block10.getDefaultState())).setUnlocalizedName("stairsNetherBrick")); registerBlock(115, "nether_wart", (new BlockNetherWart()).setUnlocalizedName("netherStalk")); registerBlock(116, "enchanting_table", (new BlockEnchantmentTable()).setHardness(5.0F).setResistance(2000.0F).setUnlocalizedName("enchantmentTable")); registerBlock(117, "brewing_stand", (new BlockBrewingStand()).setHardness(0.5F).setLightLevel(0.125F).setUnlocalizedName("brewingStand")); registerBlock(118, "cauldron", (new BlockCauldron()).setHardness(2.0F).setUnlocalizedName("cauldron")); registerBlock(119, "end_portal", (new BlockEndPortal(Material.portal)).setHardness(-1.0F).setResistance(6000000.0F)); registerBlock(120, "end_portal_frame", (new BlockEndPortalFrame()).setStepSound(soundTypeGlass).setLightLevel(0.125F).setHardness(-1.0F).setUnlocalizedName("endPortalFrame").setResistance(6000000.0F).setCreativeTab(CreativeTabs.tabDecorations)); registerBlock(121, "end_stone", (new Block(Material.rock, MapColor.sandColor)).setHardness(3.0F).setResistance(15.0F).setStepSound(soundTypePiston).setUnlocalizedName("whiteStone").setCreativeTab(CreativeTabs.tabBlock)); registerBlock(122, "dragon_egg", (new BlockDragonEgg()).setHardness(3.0F).setResistance(15.0F).setStepSound(soundTypePiston).setLightLevel(0.125F).setUnlocalizedName("dragonEgg")); registerBlock(123, "redstone_lamp", (new BlockRedstoneLight(false)).setHardness(0.3F).setStepSound(soundTypeGlass).setUnlocalizedName("redstoneLight").setCreativeTab(CreativeTabs.tabRedstone)); registerBlock(124, "lit_redstone_lamp", (new BlockRedstoneLight(true)).setHardness(0.3F).setStepSound(soundTypeGlass).setUnlocalizedName("redstoneLight")); registerBlock(125, "double_wooden_slab", (new BlockDoubleWoodSlab()).setHardness(2.0F).setResistance(5.0F).setStepSound(soundTypeWood).setUnlocalizedName("woodSlab")); registerBlock(126, "wooden_slab", (new BlockHalfWoodSlab()).setHardness(2.0F).setResistance(5.0F).setStepSound(soundTypeWood).setUnlocalizedName("woodSlab")); registerBlock(127, "cocoa", (new BlockCocoa()).setHardness(0.2F).setResistance(5.0F).setStepSound(soundTypeWood).setUnlocalizedName("cocoa")); registerBlock(128, "sandstone_stairs", (new BlockStairs(block2.getDefaultState().withProperty(BlockSandStone.TYPE, BlockSandStone.EnumType.SMOOTH))).setUnlocalizedName("stairsSandStone")); registerBlock(129, "emerald_ore", (new BlockOre()).setHardness(3.0F).setResistance(5.0F).setStepSound(soundTypePiston).setUnlocalizedName("oreEmerald")); registerBlock(130, "ender_chest", (new BlockEnderChest()).setHardness(22.5F).setResistance(1000.0F).setStepSound(soundTypePiston).setUnlocalizedName("enderChest").setLightLevel(0.5F)); registerBlock(131, "tripwire_hook", (new BlockTripWireHook()).setUnlocalizedName("tripWireSource")); registerBlock(132, "tripwire", (new BlockTripWire()).setUnlocalizedName("tripWire")); registerBlock(133, "emerald_block", (new Block(Material.iron, MapColor.emeraldColor)).setHardness(5.0F).setResistance(10.0F).setStepSound(soundTypeMetal).setUnlocalizedName("blockEmerald").setCreativeTab(CreativeTabs.tabBlock)); registerBlock(134, "spruce_stairs", (new BlockStairs(block1.getDefaultState().withProperty(BlockPlanks.VARIANT, BlockPlanks.EnumType.SPRUCE))).setUnlocalizedName("stairsWoodSpruce")); registerBlock(135, "birch_stairs", (new BlockStairs(block1.getDefaultState().withProperty(BlockPlanks.VARIANT, BlockPlanks.EnumType.BIRCH))).setUnlocalizedName("stairsWoodBirch")); registerBlock(136, "jungle_stairs", (new BlockStairs(block1.getDefaultState().withProperty(BlockPlanks.VARIANT, BlockPlanks.EnumType.JUNGLE))).setUnlocalizedName("stairsWoodJungle")); registerBlock(137, "command_block", (new BlockCommandBlock()).setBlockUnbreakable().setResistance(6000000.0F).setUnlocalizedName("commandBlock")); registerBlock(138, "beacon", (new BlockBeacon()).setUnlocalizedName("beacon").setLightLevel(1.0F)); registerBlock(139, "cobblestone_wall", (new BlockWall(block)).setUnlocalizedName("cobbleWall")); registerBlock(140, "flower_pot", (new BlockFlowerPot()).setHardness(0.0F).setStepSound(soundTypeStone).setUnlocalizedName("flowerPot")); registerBlock(141, "carrots", (new BlockCarrot()).setUnlocalizedName("carrots")); registerBlock(142, "potatoes", (new BlockPotato()).setUnlocalizedName("potatoes")); registerBlock(143, "wooden_button", (new BlockButtonWood()).setHardness(0.5F).setStepSound(soundTypeWood).setUnlocalizedName("button")); registerBlock(144, "skull", (new BlockSkull()).setHardness(1.0F).setStepSound(soundTypePiston).setUnlocalizedName("skull")); registerBlock(145, "anvil", (new BlockAnvil()).setHardness(5.0F).setStepSound(soundTypeAnvil).setResistance(2000.0F).setUnlocalizedName("anvil")); registerBlock(146, "trapped_chest", (new BlockChest(1)).setHardness(2.5F).setStepSound(soundTypeWood).setUnlocalizedName("chestTrap")); registerBlock(147, "light_weighted_pressure_plate", (new BlockPressurePlateWeighted(Material.iron, 15, MapColor.goldColor)).setHardness(0.5F).setStepSound(soundTypeWood).setUnlocalizedName("weightedPlate_light")); registerBlock(148, "heavy_weighted_pressure_plate", (new BlockPressurePlateWeighted(Material.iron, 150)).setHardness(0.5F).setStepSound(soundTypeWood).setUnlocalizedName("weightedPlate_heavy")); registerBlock(149, "unpowered_comparator", (new BlockRedstoneComparator(false)).setHardness(0.0F).setStepSound(soundTypeWood).setUnlocalizedName("comparator").disableStats()); registerBlock(150, "powered_comparator", (new BlockRedstoneComparator(true)).setHardness(0.0F).setLightLevel(0.625F).setStepSound(soundTypeWood).setUnlocalizedName("comparator").disableStats()); registerBlock(151, "daylight_detector", new BlockDaylightDetector(false)); registerBlock(152, "redstone_block", (new BlockCompressedPowered(Material.iron, MapColor.tntColor)).setHardness(5.0F).setResistance(10.0F).setStepSound(soundTypeMetal).setUnlocalizedName("blockRedstone").setCreativeTab(CreativeTabs.tabRedstone)); registerBlock(153, "quartz_ore", (new BlockOre(MapColor.netherrackColor)).setHardness(3.0F).setResistance(5.0F).setStepSound(soundTypePiston).setUnlocalizedName("netherquartz")); registerBlock(154, "hopper", (new BlockHopper()).setHardness(3.0F).setResistance(8.0F).setStepSound(soundTypeMetal).setUnlocalizedName("hopper")); Block block11 = (new BlockQuartz()).setStepSound(soundTypePiston).setHardness(0.8F).setUnlocalizedName("quartzBlock"); registerBlock(155, "quartz_block", block11); registerBlock(156, "quartz_stairs", (new BlockStairs(block11.getDefaultState().withProperty(BlockQuartz.VARIANT, BlockQuartz.EnumType.DEFAULT))).setUnlocalizedName("stairsQuartz")); registerBlock(157, "activator_rail", (new BlockRailPowered()).setHardness(0.7F).setStepSound(soundTypeMetal).setUnlocalizedName("activatorRail")); registerBlock(158, "dropper", (new BlockDropper()).setHardness(3.5F).setStepSound(soundTypePiston).setUnlocalizedName("dropper")); registerBlock(159, "stained_hardened_clay", (new BlockColored(Material.rock)).setHardness(1.25F).setResistance(7.0F).setStepSound(soundTypePiston).setUnlocalizedName("clayHardenedStained")); registerBlock(160, "stained_glass_pane", (new BlockStainedGlassPane()).setHardness(0.3F).setStepSound(soundTypeGlass).setUnlocalizedName("thinStainedGlass")); registerBlock(161, "leaves2", (new BlockNewLeaf()).setUnlocalizedName("leaves")); registerBlock(162, "log2", (new BlockNewLog()).setUnlocalizedName("log")); registerBlock(163, "acacia_stairs", (new BlockStairs(block1.getDefaultState().withProperty(BlockPlanks.VARIANT, BlockPlanks.EnumType.ACACIA))).setUnlocalizedName("stairsWoodAcacia")); registerBlock(164, "dark_oak_stairs", (new BlockStairs(block1.getDefaultState().withProperty(BlockPlanks.VARIANT, BlockPlanks.EnumType.DARK_OAK))).setUnlocalizedName("stairsWoodDarkOak")); registerBlock(165, "slime", (new BlockSlime()).setUnlocalizedName("slime").setStepSound(SLIME_SOUND)); registerBlock(166, "barrier", (new BlockBarrier()).setUnlocalizedName("barrier")); registerBlock(167, "iron_trapdoor", (new BlockTrapDoor(Material.iron)).setHardness(5.0F).setStepSound(soundTypeMetal).setUnlocalizedName("ironTrapdoor").disableStats()); registerBlock(168, "prismarine", (new BlockPrismarine()).setHardness(1.5F).setResistance(10.0F).setStepSound(soundTypePiston).setUnlocalizedName("prismarine")); registerBlock(169, "sea_lantern", (new BlockSeaLantern(Material.glass)).setHardness(0.3F).setStepSound(soundTypeGlass).setLightLevel(1.0F).setUnlocalizedName("seaLantern")); registerBlock(170, "hay_block", (new BlockHay()).setHardness(0.5F).setStepSound(soundTypeGrass).setUnlocalizedName("hayBlock").setCreativeTab(CreativeTabs.tabBlock)); registerBlock(171, "carpet", (new BlockCarpet()).setHardness(0.1F).setStepSound(soundTypeCloth).setUnlocalizedName("woolCarpet").setLightOpacity(0)); registerBlock(172, "hardened_clay", (new BlockHardenedClay()).setHardness(1.25F).setResistance(7.0F).setStepSound(soundTypePiston).setUnlocalizedName("clayHardened")); registerBlock(173, "coal_block", (new Block(Material.rock, MapColor.blackColor)).setHardness(5.0F).setResistance(10.0F).setStepSound(soundTypePiston).setUnlocalizedName("blockCoal").setCreativeTab(CreativeTabs.tabBlock)); registerBlock(174, "packed_ice", (new BlockPackedIce()).setHardness(0.5F).setStepSound(soundTypeGlass).setUnlocalizedName("icePacked")); registerBlock(175, "double_plant", new BlockDoublePlant()); registerBlock(176, "standing_banner", (new BlockBanner.BlockBannerStanding()).setHardness(1.0F).setStepSound(soundTypeWood).setUnlocalizedName("banner").disableStats()); registerBlock(177, "wall_banner", (new BlockBanner.BlockBannerHanging()).setHardness(1.0F).setStepSound(soundTypeWood).setUnlocalizedName("banner").disableStats()); registerBlock(178, "daylight_detector_inverted", new BlockDaylightDetector(true)); Block block12 = (new BlockRedSandstone()).setStepSound(soundTypePiston).setHardness(0.8F).setUnlocalizedName("redSandStone"); registerBlock(179, "red_sandstone", block12); registerBlock(180, "red_sandstone_stairs", (new BlockStairs(block12.getDefaultState().withProperty(BlockRedSandstone.TYPE, BlockRedSandstone.EnumType.SMOOTH))).setUnlocalizedName("stairsRedSandStone")); registerBlock(181, "double_stone_slab2", (new BlockDoubleStoneSlabNew()).setHardness(2.0F).setResistance(10.0F).setStepSound(soundTypePiston).setUnlocalizedName("stoneSlab2")); registerBlock(182, "stone_slab2", (new BlockHalfStoneSlabNew()).setHardness(2.0F).setResistance(10.0F).setStepSound(soundTypePiston).setUnlocalizedName("stoneSlab2")); registerBlock(183, "spruce_fence_gate", (new BlockFenceGate(BlockPlanks.EnumType.SPRUCE)).setHardness(2.0F).setResistance(5.0F).setStepSound(soundTypeWood).setUnlocalizedName("spruceFenceGate")); registerBlock(184, "birch_fence_gate", (new BlockFenceGate(BlockPlanks.EnumType.BIRCH)).setHardness(2.0F).setResistance(5.0F).setStepSound(soundTypeWood).setUnlocalizedName("birchFenceGate")); registerBlock(185, "jungle_fence_gate", (new BlockFenceGate(BlockPlanks.EnumType.JUNGLE)).setHardness(2.0F).setResistance(5.0F).setStepSound(soundTypeWood).setUnlocalizedName("jungleFenceGate")); registerBlock(186, "dark_oak_fence_gate", (new BlockFenceGate(BlockPlanks.EnumType.DARK_OAK)).setHardness(2.0F).setResistance(5.0F).setStepSound(soundTypeWood).setUnlocalizedName("darkOakFenceGate")); registerBlock(187, "acacia_fence_gate", (new BlockFenceGate(BlockPlanks.EnumType.ACACIA)).setHardness(2.0F).setResistance(5.0F).setStepSound(soundTypeWood).setUnlocalizedName("acaciaFenceGate")); registerBlock(188, "spruce_fence", (new BlockFence(Material.wood, BlockPlanks.EnumType.SPRUCE.func_181070_c())).setHardness(2.0F).setResistance(5.0F).setStepSound(soundTypeWood).setUnlocalizedName("spruceFence")); registerBlock(189, "birch_fence", (new BlockFence(Material.wood, BlockPlanks.EnumType.BIRCH.func_181070_c())).setHardness(2.0F).setResistance(5.0F).setStepSound(soundTypeWood).setUnlocalizedName("birchFence")); registerBlock(190, "jungle_fence", (new BlockFence(Material.wood, BlockPlanks.EnumType.JUNGLE.func_181070_c())).setHardness(2.0F).setResistance(5.0F).setStepSound(soundTypeWood).setUnlocalizedName("jungleFence")); registerBlock(191, "dark_oak_fence", (new BlockFence(Material.wood, BlockPlanks.EnumType.DARK_OAK.func_181070_c())).setHardness(2.0F).setResistance(5.0F).setStepSound(soundTypeWood).setUnlocalizedName("darkOakFence")); registerBlock(192, "acacia_fence", (new BlockFence(Material.wood, BlockPlanks.EnumType.ACACIA.func_181070_c())).setHardness(2.0F).setResistance(5.0F).setStepSound(soundTypeWood).setUnlocalizedName("acaciaFence")); registerBlock(193, "spruce_door", (new BlockDoor(Material.wood)).setHardness(3.0F).setStepSound(soundTypeWood).setUnlocalizedName("doorSpruce").disableStats()); registerBlock(194, "birch_door", (new BlockDoor(Material.wood)).setHardness(3.0F).setStepSound(soundTypeWood).setUnlocalizedName("doorBirch").disableStats()); registerBlock(195, "jungle_door", (new BlockDoor(Material.wood)).setHardness(3.0F).setStepSound(soundTypeWood).setUnlocalizedName("doorJungle").disableStats()); registerBlock(196, "acacia_door", (new BlockDoor(Material.wood)).setHardness(3.0F).setStepSound(soundTypeWood).setUnlocalizedName("doorAcacia").disableStats()); registerBlock(197, "dark_oak_door", (new BlockDoor(Material.wood)).setHardness(3.0F).setStepSound(soundTypeWood).setUnlocalizedName("doorDarkOak").disableStats()); blockRegistry.validateKey(); for (Block block13 : blockRegistry) { if (block13.blockMaterial == Material.air) { block13.useNeighborBrightness = false; } else { boolean flag = false; boolean flag1 = block13 instanceof BlockStairs; boolean flag2 = block13 instanceof BlockSlab; boolean flag3 = block13 == block6; boolean flag4 = block13.translucent; boolean flag5 = block13.lightOpacity == 0; if (flag1 || flag2 || flag3 || flag4 || flag5) { flag = true; } block13.useNeighborBrightness = flag; } } for (Block block14 : blockRegistry) { for (IBlockState iblockstate : block14.getBlockState().getValidStates()) { int i = blockRegistry.getIDForObject(block14) << 4 | block14.getMetaFromState(iblockstate); BLOCK_STATE_IDS.put(iblockstate, i); } } } private static void registerBlock(int id, ResourceLocation textualID, Block block_) { blockRegistry.register(id, textualID, block_); } private static void registerBlock(int id, String textualID, Block block_) { registerBlock(id, new ResourceLocation(textualID), block_); } public static class SoundType { public final String soundName; public final float volume; public final float frequency; public SoundType(String name, float volume, float frequency) { this.soundName = name; this.volume = volume; this.frequency = frequency; } public float getVolume() { return this.volume; } public float getFrequency() { return this.frequency; } public String getBreakSound() { return "dig." + this.soundName; } public String getStepSound() { return "step." + this.soundName; } public String getPlaceSound() { return this.getBreakSound(); } } }
/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jitsi.impl.neomedia.codec.audio.opus; import java.awt.*; import java.util.*; import javax.media.*; import javax.media.format.*; import net.sf.fmj.media.*; import org.jitsi.impl.neomedia.codec.*; import org.jitsi.impl.neomedia.jmfext.media.renderer.audio.*; import org.jitsi.service.configuration.*; import org.jitsi.service.libjitsi.*; import org.jitsi.service.neomedia.codec.*; import org.jitsi.service.neomedia.control.*; import org.jitsi.util.*; /** * Implements an Opus encoder. * * @author Boris Grozev * @author Lyubomir Marinov */ public class JNIEncoder extends AbstractCodec2 implements FormatParametersAwareCodec, PacketLossAwareEncoder { /** * The <tt>Logger</tt> used by the <tt>JNIEncoder</tt> class and its * instances for logging output. */ private static final Logger logger = Logger.getLogger(JNIEncoder.class); /** * The list of <tt>Format</tt>s of audio data supported as input by * <tt>JNIEncoder</tt> instances. */ private static final Format[] SUPPORTED_INPUT_FORMATS; /** * The list of sample rates of audio data supported as input by * <tt>JNIEncoder</tt> instances. * <p> * The implementation does support 8, 12, 16, 24 and 48kHz but the lower * sample rates are not listed to prevent FMJ from defaulting to them. * </p> */ static final double[] SUPPORTED_INPUT_SAMPLE_RATES = new double[] { 48000 }; /** * The list of <tt>Format</tt>s of audio data supported as output by * <tt>JNIEncoder</tt> instances. */ private static final Format[] SUPPORTED_OUTPUT_FORMATS = new Format[] { new AudioFormat( Constants.OPUS_RTP, 48000, /* sampleSizeInBits */ Format.NOT_SPECIFIED, 2, /* endian */ Format.NOT_SPECIFIED, /* signed */ Format.NOT_SPECIFIED, /* frameSizeInBits */ Format.NOT_SPECIFIED, /* frameRate */ Format.NOT_SPECIFIED, Format.byteArray) }; /** * Sets the supported input formats. */ static { /* * If the Opus class or its supporting JNI library are not functional, * it is too late to discover the fact in #doOpen() because a JNIEncoder * instance has already been initialized and it has already signaled * that the Opus codec is supported. */ Opus.assertOpusIsFunctional(); int supportedInputCount = SUPPORTED_INPUT_SAMPLE_RATES.length; SUPPORTED_INPUT_FORMATS = new Format[supportedInputCount]; // SUPPORTED_INPUT_FORMATS = new Format[supportedInputCount*2]; for (int i = 0; i < supportedInputCount; i++) { SUPPORTED_INPUT_FORMATS[i] = new AudioFormat( AudioFormat.LINEAR, SUPPORTED_INPUT_SAMPLE_RATES[i], 16, 1, AbstractAudioRenderer.NATIVE_AUDIO_FORMAT_ENDIAN, AudioFormat.SIGNED, /* frameSizeInBits */ Format.NOT_SPECIFIED, /* frameRate */ Format.NOT_SPECIFIED, Format.byteArray); } /* * Using stereo input formats leads to problems (at least when used with * pulse audio). It is unclear whether they are rooted in this encoder * or somewhere else in the code. So stereo input formats are disabled * until we make sure that they work properly. */ // for (int i = 0; i < supportedInputCount; i++) // { // SUPPORTED_INPUT_FORMATS[i+supportedInputCount] // = new AudioFormat( // AudioFormat.LINEAR, // SUPPORTED_INPUT_SAMPLE_RATES[i], // 16, // 2, // AbstractAudioRenderer.NATIVE_AUDIO_FORMAT_ENDIAN, // AudioFormat.SIGNED, // /* frameSizeInBits */ Format.NOT_SPECIFIED, // /* frameRate */ Format.NOT_SPECIFIED, // Format.byteArray); // } } /** * Codec audio bandwidth, obtained from configuration. */ private int bandwidth; /** * The bitrate in bits per second obtained from the configuration and set on * {@link #encoder}. */ private int bitrate; /** * Number of channels to use, default to 1. */ private int channels = 1; /** * Complexity setting, obtained from configuration. */ private int complexity; /** * The pointer to the native OpusEncoder structure */ private long encoder = 0; /** * The size in bytes of an audio frame input by this instance. Automatically * calculated, based on {@link #frameSizeInMillis} and the * <tt>inputFormat</tt> of this instance. */ private int frameSizeInBytes; /** * The size/duration in milliseconds of an audio frame output by this * instance. The possible values are: 2.5, 5, 10, 20, 40 and 60. The default * value is 20. */ private final int frameSizeInMillis = 20; /** * The size in samples per channel of an audio frame input by this instance. * Automatically calculated, based on {@link #frameSizeInMillis} and the * <tt>inputFormat</tt> of this instance. */ private int frameSizeInSamplesPerChannel; /** * The minimum expected packet loss percentage to set to the encoder. */ private int minPacketLoss = 0; /** * The bytes from an input <tt>Buffer</tt> from a previous call to * {@link #process(Buffer, Buffer)} that this <tt>Codec</tt> didn't process * because the total number of bytes was less than {@link #inputFrameSize()} * need to be prepended to a subsequent input <tt>Buffer</tt> in order to * process a total of {@link #inputFrameSize()} bytes. */ private byte[] prevIn = null; /** * The length of the audio data in {@link #prevIn}. */ private int prevInLength = 0; /** * Whether to use DTX, obtained from configuration. */ private boolean useDtx; /** * Whether to use FEC, obtained from configuration. */ private boolean useFec; /** * Initializes a new <tt>JNIEncoder</tt> instance. */ public JNIEncoder() { super("Opus JNI Encoder", AudioFormat.class, SUPPORTED_OUTPUT_FORMATS); inputFormats = SUPPORTED_INPUT_FORMATS; addControl(this); } /** * {@inheritDoc} * * @see AbstractCodec2#doClose() */ @Override protected void doClose() { if (encoder != 0) { Opus.encoder_destroy(encoder); encoder = 0; } } /** * Opens this <tt>Codec</tt> and acquires the resources that it needs to * operate. A call to {@link PlugIn#open()} on this instance will result in * a call to <tt>doOpen</tt> only if {@link AbstractCodec#opened} is * <tt>false</tt>. All required input and/or output formats are assumed to * have been set on this <tt>Codec</tt> before <tt>doOpen</tt> is called. * * @throws ResourceUnavailableException if any of the resources that this * <tt>Codec</tt> needs to operate cannot be acquired * @see AbstractCodec2#doOpen() */ @Override protected void doOpen() throws ResourceUnavailableException { AudioFormat inputFormat = (AudioFormat) getInputFormat(); int sampleRate = (int) inputFormat.getSampleRate(); channels = inputFormat.getChannels(); encoder = Opus.encoder_create(sampleRate, channels); if (encoder == 0) throw new ResourceUnavailableException("opus_encoder_create()"); //Set encoder options according to user configuration ConfigurationService cfg = LibJitsi.getConfigurationService(); String bandwidthStr = cfg.getString(Constants.PROP_OPUS_BANDWIDTH, "auto"); bandwidth = Opus.OPUS_AUTO; if("fb".equals(bandwidthStr)) bandwidth = Opus.BANDWIDTH_FULLBAND; else if("swb".equals(bandwidthStr)) bandwidth = Opus.BANDWIDTH_SUPERWIDEBAND; else if("wb".equals(bandwidthStr)) bandwidth = Opus.BANDWIDTH_WIDEBAND; else if("mb".equals(bandwidthStr)) bandwidth = Opus.BANDWIDTH_MEDIUMBAND; else if("nb".equals(bandwidthStr)) bandwidth = Opus.BANDWIDTH_NARROWBAND; Opus.encoder_set_bandwidth(encoder, bandwidth); bitrate = 1000 /* configuration is in kilobits per second */ * cfg.getInt(Constants.PROP_OPUS_BITRATE, 32); if(bitrate < 500) bitrate = 500; else if(bitrate > 512000) bitrate = 512000; Opus.encoder_set_bitrate(encoder, bitrate); complexity = cfg.getInt(Constants.PROP_OPUS_COMPLEXITY, 0); if (complexity != 0) Opus.encoder_set_complexity(encoder, complexity); useFec = cfg.getBoolean(Constants.PROP_OPUS_FEC, true); Opus.encoder_set_inband_fec(encoder, useFec ? 1 : 0); minPacketLoss = cfg.getInt(Constants.PROP_OPUS_MIN_EXPECTED_PACKET_LOSS, 1); Opus.encoder_set_packet_loss_perc(encoder, minPacketLoss); useDtx = cfg.getBoolean(Constants.PROP_OPUS_DTX, true); Opus.encoder_set_dtx(encoder, useDtx ? 1 : 0); if(logger.isDebugEnabled()) { String bw; switch (Opus.encoder_get_bandwidth(encoder)) { case Opus.BANDWIDTH_FULLBAND: bw = "fb"; break; case Opus.BANDWIDTH_SUPERWIDEBAND: bw = "swb"; break; case Opus.BANDWIDTH_WIDEBAND: bw = "wb"; break; case Opus.BANDWIDTH_MEDIUMBAND: bw = "mb"; break; default: bw = "nb"; break; } logger.debug( "Encoder settings: audio bandwidth " + bw + ", bitrate " + Opus.encoder_get_bitrate(encoder) + ", DTX " + Opus.encoder_get_dtx(encoder) + ", FEC " + Opus.encoder_get_inband_fec(encoder)); } } /** * Processes (i.e. encodes) a specific input <tt>Buffer</tt>. * * @param inBuffer the <tt>Buffer</tt> from which the media to be encoded is * to be read * @param outBuffer the <tt>Buffer</tt> into which the encoded media is to * be written * @return <tt>BUFFER_PROCESSED_OK</tt> if the specified <tt>inBuffer</tt> * has been processed successfully * @see AbstractCodec2#doProcess(Buffer, Buffer) */ @Override protected int doProcess(Buffer inBuffer, Buffer outBuffer) { Format inFormat = inBuffer.getFormat(); if ((inFormat != null) && (inFormat != this.inputFormat) && !inFormat.equals(this.inputFormat) && (null == setInputFormat(inFormat))) { return BUFFER_PROCESSED_FAILED; } byte[] in = (byte[]) inBuffer.getData(); int inLength = inBuffer.getLength(); int inOffset = inBuffer.getOffset(); if ((prevIn != null) && (prevInLength > 0)) { if (prevInLength < frameSizeInBytes) { if (prevIn.length < frameSizeInBytes) { byte[] newPrevIn = new byte[frameSizeInBytes]; System.arraycopy(prevIn, 0, newPrevIn, 0, prevIn.length); prevIn = newPrevIn; } int bytesToCopyFromInToPrevIn = Math.min(frameSizeInBytes - prevInLength, inLength); if (bytesToCopyFromInToPrevIn > 0) { System.arraycopy( in, inOffset, prevIn, prevInLength, bytesToCopyFromInToPrevIn); prevInLength += bytesToCopyFromInToPrevIn; inLength -= bytesToCopyFromInToPrevIn; inBuffer.setLength(inLength); inBuffer.setOffset(inOffset + bytesToCopyFromInToPrevIn); } } if (prevInLength == frameSizeInBytes) { in = prevIn; inOffset = 0; prevInLength = 0; } else { outBuffer.setLength(0); discardOutputBuffer(outBuffer); if (inLength < 1) return BUFFER_PROCESSED_OK; else return BUFFER_PROCESSED_OK | INPUT_BUFFER_NOT_CONSUMED; } } else if (inLength < 1) { outBuffer.setLength(0); discardOutputBuffer(outBuffer); return BUFFER_PROCESSED_OK; } else if (inLength < frameSizeInBytes) { if ((prevIn == null) || (prevIn.length < inLength)) prevIn = new byte[frameSizeInBytes]; System.arraycopy(in, inOffset, prevIn, 0, inLength); prevInLength = inLength; outBuffer.setLength(0); discardOutputBuffer(outBuffer); return BUFFER_PROCESSED_OK; } else { inLength -= frameSizeInBytes; inBuffer.setLength(inLength); inBuffer.setOffset(inOffset + frameSizeInBytes); } // At long last, do the actual encoding. byte[] out = validateByteArraySize(outBuffer, Opus.MAX_PACKET, false); int outLength = Opus.encode( encoder, in, inOffset, frameSizeInSamplesPerChannel, out, 0, out.length); if (outLength < 0) // error from opus_encode return BUFFER_PROCESSED_FAILED; if (outLength > 0) { outBuffer.setDuration(((long) frameSizeInMillis) * 1000 * 1000); outBuffer.setFormat(getOutputFormat()); outBuffer.setLength(outLength); outBuffer.setOffset(0); outBuffer.setHeaderExtension(inBuffer.getHeaderExtension()); } if (inLength < 1) return BUFFER_PROCESSED_OK; else return BUFFER_PROCESSED_OK | INPUT_BUFFER_NOT_CONSUMED; } /** * Implements {@link Control#getControlComponent()}. <tt>JNIEncoder</tt> * does not provide user interface of its own. * * @return <tt>null</tt> to signify that <tt>JNIEncoder</tt> does not * provide user interface of its own */ @Override public Component getControlComponent() { return null; } /** * Gets the <tt>Format</tt> of the media output by this <tt>Codec</tt>. * * @return the <tt>Format</tt> of the media output by this <tt>Codec</tt> * @see net.sf.fmj.media.AbstractCodec#getOutputFormat() */ @Override @SuppressWarnings("serial") public Format getOutputFormat() { Format f = super.getOutputFormat(); if ((f != null) && (f.getClass() == AudioFormat.class)) { AudioFormat af = (AudioFormat) f; f = setOutputFormat( new AudioFormat( af.getEncoding(), af.getSampleRate(), af.getSampleSizeInBits(), af.getChannels(), af.getEndian(), af.getSigned(), af.getFrameSizeInBits(), af.getFrameRate(), af.getDataType()) { @Override public long computeDuration(long length) { return ((long) frameSizeInMillis) * 1000 * 1000; } }); } return f; } /** * Updates the encoder's expected packet loss percentage to the bigger of * <tt>percentage</tt> and <tt>this.minPacketLoss</tt>. * * @param percentage the expected packet loss percentage to set */ @Override public void setExpectedPacketLoss(int percentage) { if (opened) { Opus.encoder_set_packet_loss_perc( encoder, (percentage > minPacketLoss) ? percentage : minPacketLoss); if (logger.isTraceEnabled()) { logger.trace( "Updating expected packet loss: " + percentage + " (minimum " + minPacketLoss + ")"); } } } /** * Sets the format parameters. * * @param fmtps the format parameters to set */ @Override public void setFormatParameters(Map<String, String> fmtps) { if (logger.isDebugEnabled()) logger.debug("Setting format parameters: " + fmtps); /* * TODO Use the default value for maxaveragebitrate as defined at * http://tools.ietf.org/html/draft-spittka-payload-rtp-opus-02#section-6.1 */ int maxaveragebitrate = 40000; try { String s = fmtps.get("maxaveragebitrate"); if ((s != null) && (s.length() != 0)) maxaveragebitrate = Integer.parseInt(s); } catch (Exception e) { // Ignore and fall back to the default value. } Opus.encoder_set_bitrate( encoder, (maxaveragebitrate < bitrate) ? maxaveragebitrate : bitrate); // DTX is off unless specified. boolean useDtx = this.useDtx && "1".equals(fmtps.get("usedtx")); Opus.encoder_set_dtx(encoder, useDtx ? 1 : 0); // FEC is on unless specified. String s; boolean useFec = this.useFec && (((s = fmtps.get("useinbandfec")) == null) || s.equals("1")); Opus.encoder_set_inband_fec(encoder, useFec ? 1 : 0); } /** * {@inheritDoc} * * Automatically tracks and calculates the size in bytes of an audio frame * (to be) output by this instance. */ @Override public Format setInputFormat(Format format) { Format oldValue = getInputFormat(); Format setInputFormat = super.setInputFormat(format); Format newValue = getInputFormat(); if (oldValue != newValue) { AudioFormat af = (AudioFormat) newValue; int sampleRate = (int) af.getSampleRate(); frameSizeInSamplesPerChannel = (sampleRate * frameSizeInMillis) / 1000; frameSizeInBytes = 2 /* sizeof(opus_int16) */ * channels * frameSizeInSamplesPerChannel; } return setInputFormat; } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ package org.apache.ofbiz.base.util.template; import java.io.File; import java.io.IOException; import java.io.Writer; import java.net.URL; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TimeZone; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import freemarker.cache.MultiTemplateLoader; import freemarker.cache.StringTemplateLoader; import freemarker.cache.TemplateLoader; import org.apache.ofbiz.base.location.FlexibleLocation; import org.apache.ofbiz.base.util.Debug; import org.apache.ofbiz.base.util.StringUtil; import org.apache.ofbiz.base.util.UtilGenerics; import org.apache.ofbiz.base.util.UtilMisc; import org.apache.ofbiz.base.util.UtilProperties; import org.apache.ofbiz.base.util.UtilValidate; import org.apache.ofbiz.base.util.cache.UtilCache; import freemarker.cache.URLTemplateLoader; import freemarker.core.Environment; import freemarker.ext.beans.BeanModel; import freemarker.ext.beans.BeansWrapper; import freemarker.ext.beans.BeansWrapperBuilder; import freemarker.template.Configuration; import freemarker.template.SimpleHash; import freemarker.template.SimpleScalar; import freemarker.template.Template; import freemarker.template.TemplateException; import freemarker.template.TemplateExceptionHandler; import freemarker.template.TemplateHashModel; import freemarker.template.TemplateModel; import freemarker.template.TemplateModelException; import freemarker.template.Version; /** * FreeMarkerWorker - Freemarker Template Engine Utilities. */ public final class FreeMarkerWorker { public static final String module = FreeMarkerWorker.class.getName(); public static final Version version = Configuration.VERSION_2_3_24; private FreeMarkerWorker () {} // use soft references for this so that things from Content records don't kill all of our memory, or maybe not for performance reasons... hmmm, leave to config file... private static final UtilCache<String, Template> cachedTemplates = UtilCache.createUtilCache("template.ftl.general", 0, 0, false); private static final BeansWrapper defaultOfbizWrapper = new BeansWrapperBuilder(version).build(); private static final Configuration defaultOfbizConfig = makeConfiguration(defaultOfbizWrapper); public static BeansWrapper getDefaultOfbizWrapper() { return defaultOfbizWrapper; } public static Configuration newConfiguration() { return new Configuration(version); } public static Configuration makeConfiguration(BeansWrapper wrapper) { Configuration newConfig = newConfiguration(); newConfig.setObjectWrapper(wrapper); TemplateHashModel staticModels = wrapper.getStaticModels(); newConfig.setSharedVariable("Static", staticModels); try { newConfig.setSharedVariable("EntityQuery", staticModels.get("org.apache.ofbiz.entity.util.EntityQuery")); } catch (TemplateModelException e) { Debug.logError(e, module); } newConfig.setLocalizedLookup(false); newConfig.setSharedVariable("StringUtil", new BeanModel(StringUtil.INSTANCE, wrapper)); TemplateLoader[] templateLoaders = {new FlexibleTemplateLoader(), new StringTemplateLoader()}; MultiTemplateLoader multiTemplateLoader = new MultiTemplateLoader(templateLoaders); newConfig.setTemplateLoader(multiTemplateLoader); Map freemarkerImports = UtilProperties.getProperties("freemarkerImports"); if (freemarkerImports != null) { newConfig.setAutoImports(freemarkerImports); } newConfig.setLogTemplateExceptions(false); newConfig.setTemplateExceptionHandler(new FreeMarkerWorker.OFBizTemplateExceptionHandler()); try { newConfig.setSetting("datetime_format", "yyyy-MM-dd HH:mm:ss.SSS"); newConfig.setSetting("number_format", "0.##########"); } catch (TemplateException e) { Debug.logError("Unable to set date/time and number formats in FreeMarker: " + e, module); } // Transforms properties file set up as key=transform name, property=transform class name ClassLoader loader = Thread.currentThread().getContextClassLoader(); Enumeration<URL> resources; try { resources = loader.getResources("freemarkerTransforms.properties"); } catch (IOException e) { Debug.logError(e, "Could not load list of freemarkerTransforms.properties", module); throw UtilMisc.initCause(new InternalError(e.getMessage()), e); } while (resources.hasMoreElements()) { URL propertyURL = resources.nextElement(); Debug.logInfo("loading properties: " + propertyURL, module); Properties props = UtilProperties.getProperties(propertyURL); if (UtilValidate.isEmpty(props)) { Debug.logError("Unable to locate properties file " + propertyURL, module); } else { loadTransforms(loader, props, newConfig); } } return newConfig; } private static void loadTransforms(ClassLoader loader, Properties props, Configuration config) { for (Iterator<Object> i = props.keySet().iterator(); i.hasNext();) { String key = (String) i.next(); String className = props.getProperty(key); if (Debug.verboseOn()) { Debug.logVerbose("Adding FTL Transform " + key + " with class " + className, module); } try { config.setSharedVariable(key, loader.loadClass(className).newInstance()); } catch (Exception e) { Debug.logError(e, "Could not pre-initialize dynamically loaded class: " + className + ": " + e, module); } } } /** * Renders a template from a Reader. * @param templateLocation A unique ID for this template - used for caching * @param context The context Map * @param outWriter The Writer to render to */ public static void renderTemplate(String templateLocation, Map<String, Object> context, Appendable outWriter) throws TemplateException, IOException { Template template = getTemplate(templateLocation); renderTemplate(template, context, outWriter); } public static void renderTemplateFromString(String templateName, String templateString, Map<String, Object> context, Appendable outWriter, long lastModificationTime, boolean useCache) throws TemplateException, IOException { Template template = null; if (useCache) { template = cachedTemplates.get(templateName); } if (template == null) { StringTemplateLoader stringTemplateLoader = (StringTemplateLoader)((MultiTemplateLoader)defaultOfbizConfig.getTemplateLoader()).getTemplateLoader(1); Object templateSource = stringTemplateLoader.findTemplateSource(templateName); if (templateSource == null || stringTemplateLoader.getLastModified(templateSource) < lastModificationTime) { stringTemplateLoader.putTemplate(templateName, templateString, lastModificationTime); } } template = getTemplate(templateName); renderTemplate(template, context, outWriter); } public static void clearTemplateFromCache(String templateLocation) { cachedTemplates.remove(templateLocation); try { defaultOfbizConfig.removeTemplateFromCache(templateLocation); } catch(Exception e) { Debug.logInfo("Template not found in Fremarker cache with name: " + templateLocation, module); } } /** * Renders a Template instance. * @param template A Template instance * @param context The context Map * @param outWriter The Writer to render to */ public static Environment renderTemplate(Template template, Map<String, Object> context, Appendable outWriter) throws TemplateException, IOException { // make sure there is no "null" string in there as FreeMarker will try to use it context.remove("null"); // Since the template cache keeps a single instance of a Template that is shared among users, // and since that Template instance is immutable, we need to create an Environment instance and // use it to process the template with the user's settings. // // FIXME: the casting from Appendable to Writer is a temporary fix that could cause a // run time error if in the future we will pass a different class to the method // (such as a StringBuffer). Environment env = template.createProcessingEnvironment(context, (Writer) outWriter); applyUserSettings(env, context); env.process(); return env; } /** * Apply user settings to an Environment instance. * @param env An Environment instance * @param context The context Map containing the user settings */ private static void applyUserSettings(Environment env, Map<String, Object> context) throws TemplateException { Locale locale = (Locale) context.get("locale"); if (locale == null) { locale = Locale.getDefault(); } env.setLocale(locale); TimeZone timeZone = (TimeZone) context.get("timeZone"); if (timeZone == null) { timeZone = TimeZone.getDefault(); } env.setTimeZone(timeZone); } /** * Returns a <code>Configuration</code> instance initialized to OFBiz defaults. Client code should * call this method instead of creating its own <code>Configuration</code> instance. The instance * returned by this method includes the <code>component://</code> resolver and the OFBiz custom * transformations. * * @return A <code>Configuration</code> instance. */ public static Configuration getDefaultOfbizConfig() { return defaultOfbizConfig; } /** * Gets a Template instance from the template cache. If the Template instance isn't * found in the cache, then one will be created. * @param templateLocation Location of the template - file path or URL */ public static Template getTemplate(String templateLocation) throws TemplateException, IOException { return getTemplate(templateLocation, cachedTemplates, defaultOfbizConfig); } public static Template getTemplate(String templateLocation, UtilCache<String, Template> cache, Configuration config) throws TemplateException, IOException { Template template = cache.get(templateLocation); if (template == null) { template = config.getTemplate(templateLocation); template = cache.putIfAbsentAndGet(templateLocation, template); } return template; } public static String getArg(Map<String, ? extends Object> args, String key, Environment env) { Map<String, ? extends Object> templateContext = FreeMarkerWorker.getWrappedObject("context", env); return getArg(args, key, templateContext); } public static String getArg(Map<String, ? extends Object> args, String key, Map<String, ? extends Object> templateContext) { Object o = args.get(key); String returnVal = (String) unwrap(o); if (returnVal == null) { try { if (templateContext != null) { returnVal = (String) templateContext.get(key); } } catch (ClassCastException e2) { Debug.logInfo(e2.getMessage(), module); } } return returnVal; } /** * Gets BeanModel from FreeMarker context and returns the object that it wraps. * @param varName the name of the variable in the FreeMarker context. * @param env the FreeMarker Environment */ public static <T> T getWrappedObject(String varName, Environment env) { Object obj = null; try { obj = env.getVariable(varName); if (obj != null) { if (obj == TemplateModel.NOTHING) { obj = null; } else if (obj instanceof BeanModel) { BeanModel bean = (BeanModel) obj; obj = bean.getWrappedObject(); } else if (obj instanceof SimpleScalar) { obj = obj.toString(); } } } catch (TemplateModelException e) { Debug.logInfo(e.getMessage(), module); } return UtilGenerics.<T>cast(obj); } public static Object get(SimpleHash args, String key) { Object o = null; try { o = args.get(key); } catch (TemplateModelException e) { Debug.logVerbose(e.getMessage(), module); return null; } Object returnObj = unwrap(o); if (returnObj == null) { Object ctxObj = null; try { ctxObj = args.get("context"); } catch (TemplateModelException e) { Debug.logInfo(e.getMessage(), module); return returnObj; } Map<String, ?> ctx = null; if (ctxObj instanceof BeanModel) { ctx = UtilGenerics.cast(((BeanModel) ctxObj).getWrappedObject()); returnObj = ctx.get(key); } } return returnObj; } @SuppressWarnings("unchecked") public static <T> T unwrap(Object o) { Object returnObj = null; if (o == TemplateModel.NOTHING) { returnObj = null; } else if (o instanceof SimpleScalar) { returnObj = o.toString(); } else if (o instanceof BeanModel) { returnObj = ((BeanModel) o).getWrappedObject(); } return (T) returnObj; } public static Map<String, Object> createEnvironmentMap(Environment env) { Map<String, Object> templateRoot = new HashMap<String, Object>(); Set<String> varNames = null; try { varNames = UtilGenerics.checkSet(env.getKnownVariableNames()); } catch (TemplateModelException e1) { Debug.logError(e1, "Error getting FreeMarker variable names, will not put pass current context on to sub-content", module); } if (varNames != null) { for (String varName: varNames) { templateRoot.put(varName, FreeMarkerWorker.getWrappedObject(varName, env)); } } return templateRoot; } public static void saveContextValues(Map<String, Object> context, String [] saveKeyNames, Map<String, Object> saveMap) { for (String key: saveKeyNames) { Object o = context.get(key); if (o instanceof Map<?, ?>) { o = UtilMisc.makeMapWritable(UtilGenerics.checkMap(o)); } else if (o instanceof List<?>) { o = UtilMisc.makeListWritable(UtilGenerics.checkList(o)); } saveMap.put(key, o); } } public static Map<String, Object> saveValues(Map<String, Object> context, String [] saveKeyNames) { Map<String, Object> saveMap = new HashMap<String, Object>(); for (String key: saveKeyNames) { Object o = context.get(key); if (o instanceof Map<?, ?>) { o = UtilMisc.makeMapWritable(UtilGenerics.checkMap(o)); } else if (o instanceof List<?>) { o = UtilMisc.makeListWritable(UtilGenerics.checkList(o)); } saveMap.put(key, o); } return saveMap; } public static void reloadValues(Map<String, Object> context, Map<String, Object> saveValues, Environment env) { for (Map.Entry<String, Object> entry: saveValues.entrySet()) { String key = entry.getKey(); Object o = entry.getValue(); if (o instanceof Map<?, ?>) { context.put(key, UtilMisc.makeMapWritable(UtilGenerics.checkMap(o))); } else if (o instanceof List<?>) { List<Object> list = new ArrayList<Object>(); list.addAll(UtilGenerics.checkList(o)); context.put(key, list); } else { context.put(key, o); } env.setVariable(key, autoWrap(o, env)); } } public static void removeValues(Map<String, ?> context, String... removeKeyNames) { for (String key: removeKeyNames) { context.remove(key); } } public static void overrideWithArgs(Map<String, Object> ctx, Map<String, Object> args) { for (Map.Entry<String, Object> entry: args.entrySet()) { String key = entry.getKey(); Object obj = entry.getValue(); if (obj != null) { if (obj == TemplateModel.NOTHING) { ctx.put(key, null); } else { Object unwrappedObj = unwrap(obj); if (unwrappedObj == null) { unwrappedObj = obj; } ctx.put(key, unwrappedObj.toString()); } } else { ctx.put(key, null); } } } public static void getSiteParameters(HttpServletRequest request, Map<String, Object> ctx) { if (request == null) { return; } if (ctx == null) { throw new IllegalArgumentException("Error in getSiteParameters, context/ctx cannot be null"); } ServletContext servletContext = request.getSession().getServletContext(); String rootDir = (String)ctx.get("rootDir"); String webSiteId = (String)ctx.get("webSiteId"); String https = (String)ctx.get("https"); if (UtilValidate.isEmpty(rootDir)) { rootDir = servletContext.getRealPath("/"); ctx.put("rootDir", rootDir); } if (UtilValidate.isEmpty(webSiteId)) { webSiteId = (String) servletContext.getAttribute("webSiteId"); ctx.put("webSiteId", webSiteId); } if (UtilValidate.isEmpty(https)) { https = (String) servletContext.getAttribute("https"); ctx.put("https", https); } } public static TemplateModel autoWrap(Object obj, Environment env) { TemplateModel templateModelObj = null; try { templateModelObj = getDefaultOfbizWrapper().wrap(obj); } catch (TemplateModelException e) { throw new RuntimeException(e.getMessage()); } return templateModelObj; } /* * Custom TemplateLoader for Freemarker to locate templates by resource identifier * following the format: * component://componentname/path/to/some/file.ftl */ static class FlexibleTemplateLoader extends URLTemplateLoader { @Override protected URL getURL(String name) { if (name != null && name.startsWith("delegator:")) return null; // this is a template stored in the database URL locationUrl = null; try { locationUrl = FlexibleLocation.resolveLocation(name); } catch (Exception e) { Debug.logWarning("Unable to locate the template: " + name, module); } return locationUrl != null && new File(locationUrl.getFile()).exists()? locationUrl: null; } } /** * OFBiz specific TemplateExceptionHandler. */ static class OFBizTemplateExceptionHandler implements TemplateExceptionHandler { public void handleTemplateException(TemplateException te, Environment env, Writer out) throws TemplateException { try { out.write(te.getMessage()); Debug.logError(te, module); } catch (IOException e) { Debug.logError(e, module); } } } }
/* * Copyright 2009-2015 DigitalGlobe, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and limitations under the License. */ package org.mrgeo.vector.mrsvector; import org.apache.commons.lang3.ArrayUtils; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.codehaus.jackson.JsonGenerationException; import org.codehaus.jackson.map.JsonMappingException; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.util.DefaultPrettyPrinter; import org.mrgeo.core.MrGeoProperties; import org.mrgeo.pyramid.MrsPyramidMetadata; import org.mrgeo.hdfs.utils.HadoopFileUtils; import org.mrgeo.utils.LongRectangle; import org.mrgeo.utils.TMSUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.*; import java.util.Properties; //import org.mrgeo.utils.Stats.Stats; public class MrsVectorPyramidMetadata extends MrsPyramidMetadata { public final static String METADATA = "metadata"; private static final long serialVersionUID = 1L; private static final Logger log = LoggerFactory.getLogger(MrsVectorPyramidMetadata.class); /** * VectorMetadata stores metadata for each zoom level of * vector data. */ public static class VectorMetadata extends MrsPyramidMetadata.TileMetadata implements Serializable { private static final long serialVersionUID = 1L; // basic constructor public VectorMetadata() { } } // end VectorMetadata private VectorMetadata[] vectorData = null; // data specific to a single // private ImageStats[] stats; /** * Loading a metadata file from the local file system. The objects of * the file are stored in a json format. This enables the ObjectMapper * to parse out the values correctly. * * @param file metadata file on the local file system to load * @return a valid MrsVectorPyramidMetadata object * @throws JsonGenerationException * @throws JsonMappingException * @throws IOException */ public static MrsVectorPyramidMetadata load(final File file) throws JsonGenerationException, JsonMappingException, IOException { final ObjectMapper mapper = new ObjectMapper(); final MrsVectorPyramidMetadata metadata = mapper.readValue(file, MrsVectorPyramidMetadata.class); // make sure the name of the pyramid is set correctly for where the // metadata file was pulled metadata.setPyramid("file://" + file.getParentFile().getAbsolutePath()); return metadata; } // end load - File /** * Loading metadata from an InputStream. The objects of * the file are stored in a json format. This enables the ObjectMapper * to parse out the values correctly. * * @param stream - the stream attached to the metadata input * @return a valid MrsVectorPyramidMetadata object * @throws JsonGenerationException * @throws JsonMappingException * @throws IOException */ public static MrsVectorPyramidMetadata load(final InputStream stream) throws JsonGenerationException, JsonMappingException, IOException { final ObjectMapper mapper = new ObjectMapper(); final MrsVectorPyramidMetadata metadata = mapper.readValue(stream, MrsVectorPyramidMetadata.class); return metadata; } // end load - InputStream /** * Loading metadata from HDFS. The objects of * the file are stored in a json format. This enables the ObjectMapper * to parse out the values correctly. * * @param path - the location of the metadata file * @return a valid MrsVectorPyramidMetadata object * @throws JsonGenerationException * @throws JsonMappingException * @throws IOException */ public static MrsVectorPyramidMetadata load(final Path path) throws JsonGenerationException, JsonMappingException, IOException { // attach to hdfs and create an input stream for the file FileSystem fs = HadoopFileUtils.getFileSystem(path); log.debug("Physically loading image metadata from " + path.toString()); final InputStream is = HadoopFileUtils.open(path); // fs.open(path); try { // load the metadata from the input stream final MrsVectorPyramidMetadata metadata = load(is); // set the fully qualified path for the metadata file Path fullPath = path.makeQualified(fs); metadata.setPyramid(fullPath.getParent().toString()); return metadata; } finally { is.close(); } } // end load - Path /** * Loading metadata from a string. This is more work then the above * methods. This class will look at the incoming string and determine * which data source will be used. * * Valid input is: * Accumulo: * accumulo:tableName/zoomlevel * accumulo:tableName * HDFS: * hdfs://location/in/hdfs * location/in/hdfs * Local file system: * file://location/in/local/file/system * file:/location/in/local/file/system * location/in/local/file/system * * Note: Without the file: or hdfs: at the beginning, the code below * will look for a valid path in HDFS then look in the local file system. * Also, notice that the "datasource" property within the properties * object is ignored in this setup. * * @param name - input string to distinguish location of requested metadata * @return a valid MrsVectorPyramidMetadata object * @throws JsonGenerationException * @throws JsonMappingException * @throws IOException */ public static MrsVectorPyramidMetadata load(final String name) throws JsonGenerationException, JsonMappingException, IOException { // pull the properties from the environment final Properties p = MrGeoProperties.getInstance(); log.debug("looking for metadata object for vector pyramid: " + name); // take care of accumulo right away if(name.startsWith("accumulo:")){ // TODO: Implement Accumulo throw new IOException("Accumulo storage for vector data is not yet implemented"); // return AccumuloBackend.load(name, p); } // take care of designated hdfs else if(name.startsWith("hdfs://")) { return HdfsBackend.load(name, p); } // take care of designated local file system else if(name.startsWith("file:")) { String tmp = name; if(tmp.startsWith("file://")) { tmp = tmp.replace("file://", ""); } else { tmp = tmp.replace("file:", ""); } // add the metadata portion if(!tmp.endsWith("/metadata")) { tmp += "/metadata"; } // create a file handle final File file = new File(tmp); if (file.exists()) { return load(file); } } // name is not in HDFS and need to check in local file system String tmp = name; // make sure it is metadata that is the file to open if(!tmp.endsWith("/metadata")){ tmp += "/metadata"; } // if we are here then hdfs and possibly the local file system need to be searched Path path = new Path(tmp); if(HadoopFileUtils.exists(path)) { // jump out of this method return load(path); } // name is not in HDFS and need to check in local file system File f = new File(tmp); if(f.exists()){ // jump out of this method return load(f); } // throw file not found // Note: this is expected behavior in some of the throw new FileNotFoundException("Cannot open metadata file " + name); } // end load - string @Override public String getName(int zoomlevel) { if (vectorData != null && zoomlevel < vectorData.length && vectorData[zoomlevel].name != null) { return vectorData[zoomlevel].name; } return null; } // TODO: This method is HDFS specific and must be removed if we ever move MrsVector // access into the data access layer. public String getZoomName(int zoomlevel) { if (vectorData != null && zoomlevel < vectorData.length && vectorData[zoomlevel].name != null) { return pyramid + "/" + vectorData[zoomlevel].name; } return null; } public void setVectorMetadata(final VectorMetadata[] metadata) { vectorData = metadata; } public VectorMetadata[] getVectorMetadata() { return vectorData; } @Override public LongRectangle getTileBounds(int zoomlevel) { if (vectorData != null) { if (zoomlevel < vectorData.length) { return vectorData[zoomlevel].tileBounds; } // If we have _some_ tilebounds, calculate the bounds for the higher level return getOrCreateTileBounds(zoomlevel); } return null; } @Override public LongRectangle getOrCreateTileBounds(int zoomlevel) { if (vectorData != null && zoomlevel < vectorData.length) { return vectorData[zoomlevel].tileBounds; } LongRectangle tilebounds = getTileBounds(zoomlevel); if (tilebounds == null) { TMSUtils.Bounds b = new TMSUtils.Bounds(bounds.getMinX(), bounds.getMinY(), bounds.getMaxX(), bounds.getMaxY()); TMSUtils.TileBounds tb = TMSUtils.boundsToTile(b, zoomlevel, tilesize); tilebounds = new LongRectangle(tb.w, tb.s, tb.e, tb.n); } return tilebounds; } @Override public void setName(int zoomlevel, String name) { if (vectorData == null || zoomlevel > maxZoomLevel) { setMaxZoomLevel(zoomlevel); } vectorData[zoomlevel].name = name; } @Override public void setTileBounds(int zoomlevel, LongRectangle tileBounds) { if (vectorData == null || zoomlevel > maxZoomLevel) { setMaxZoomLevel(zoomlevel); } vectorData[zoomlevel].tileBounds = tileBounds; } public void save() throws JsonGenerationException, JsonMappingException, IOException { save(true); } public void save(final boolean overwrite) throws JsonGenerationException, JsonMappingException, IOException { if(pyramid.startsWith("accumulo:")){ throw new IOException("Accumulo storage of vector metadata is not yet implemented"); // Properties p = MrGeoProperties.getInstance(); // AccumuloBackend.save(pyramid, this, p); // return; } final Path path = new Path(getPyramid() + "/" + METADATA); final FileSystem fs = HadoopFileUtils.getFileSystem(path); if (fs.exists(path)) { if (overwrite) { fs.delete(path, false); } else { throw new IOException("File already exists: " + path.toString()); } } log.debug("Saving metadata to " + path.toString()); final FSDataOutputStream os = HadoopFileUtils.getFileSystem(path).create(path); save(os); os.close(); } public void save(final File file) throws JsonGenerationException, JsonMappingException, IOException { final ObjectMapper mapper = new ObjectMapper(); try { mapper.writer(new DefaultPrettyPrinter()).writeValue(file, this); } catch (NoSuchMethodError e) { // if we don't have the pretty printer, just write the json mapper.writeValue(file, this); } } public void save(final OutputStream stream) throws JsonGenerationException, JsonMappingException, IOException { final ObjectMapper mapper = new ObjectMapper(); try { mapper.writer(new DefaultPrettyPrinter()).writeValue(stream, this); } catch (NoSuchMethodError e) { // if we don't have the pretty printer, just write the json mapper.writeValue(stream, this); } } public void save(final Path path) throws JsonGenerationException, JsonMappingException, IOException { log.debug("Saving metadata to " + path.toString()); final FSDataOutputStream os = HadoopFileUtils.getFileSystem(path).create(path); save(os); os.close(); } public void setMaxZoomLevel(final int zoomlevel) { if (vectorData == null) { for (int i = 0; i <= zoomlevel; i++) { vectorData = (VectorMetadata[]) ArrayUtils.add(vectorData, new VectorMetadata()); } } else if (zoomlevel < maxZoomLevel) { vectorData = (VectorMetadata[]) ArrayUtils.subarray(vectorData, 0, zoomlevel + 1); } else if (zoomlevel > maxZoomLevel) { for (int i = maxZoomLevel + 1; i <= zoomlevel; i++) { vectorData = (VectorMetadata[]) ArrayUtils.add(vectorData, new VectorMetadata()); } } this.maxZoomLevel = zoomlevel; } // public void setStats(final ImageStats[] stats) // { // this.stats = stats; // } // // public ImageStats[] getStats() // { // return stats; // } /** * Internal class to handle hdfs reading and writing of metadata object */ static class HdfsBackend { /** * Pull metadata object out of HDFS * * @param name - location of metadata * @param p - properties object * @return a valid MrsVectorPyramidMetadata object * @throws IOException - if there is a problem or the file cannot be found. */ static MrsVectorPyramidMetadata load(String name, Properties p) throws IOException { MrsVectorPyramidMetadata metadata = null; // get the path for the metadata object final Path metapath = findMetadata(new Path(name)); if(metapath == null){ throw new IOException("Cannot find " + name); } // load the file from HDFS metadata = MrsVectorPyramidMetadata.load(metapath); return metadata; } // end load /** * The will locate the metadata for the data pyramid * * @param base is the base directory of the pyramid in HDFS. * @return Path to the metadata file in HDFS. * @throws IOException */ static Path findMetadata(final Path base) throws IOException { final FileSystem fs = HadoopFileUtils.getFileSystem(base); Path meta = new Path(base, "metadata"); // metadata file exists at this level if (fs.exists(meta)) { return meta; } // try one level up (for a single map file case) // meta = new Path(base, "../metadata"); // if (fs.exists(meta)) // { // return meta; // } // try two levels up (for the multiple map file case) // meta = new Path(base, "../../metadata"); // if (fs.exists(meta)) // { // return meta; // } return null; } // end findMetadata static void save(String name, MrsVectorPyramidMetadata metadata, Properties p) { } // end save } // end HdfsBackend }
package com.fly.firefly.ui.activity.BookingFlight; import android.content.Intent; import android.graphics.Bitmap; import android.net.Uri; import android.os.Bundle; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.EditText; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.RadioButton; import android.widget.RadioGroup; import android.widget.TextView; import com.androidquery.AQuery; import com.androidquery.callback.AjaxCallback; import com.androidquery.callback.AjaxStatus; import com.fly.firefly.FireFlyApplication; import com.fly.firefly.MainFragmentActivity; import com.fly.firefly.R; import com.fly.firefly.api.obj.ContactInfoReceive; import com.fly.firefly.api.obj.PaymentInfoReceive; import com.fly.firefly.api.obj.PaymentReceive; import com.fly.firefly.base.BaseFragment; import com.fly.firefly.ui.activity.FragmentContainerActivity; import com.fly.firefly.ui.module.PaymentFlightModule; import com.fly.firefly.ui.object.BaseObj; import com.fly.firefly.ui.object.Payment; import com.fly.firefly.ui.object.Signature; import com.fly.firefly.ui.presenter.BookingPresenter; import com.fly.firefly.utils.DropDownItem; import com.fly.firefly.utils.SharedPrefManager; import com.fly.firefly.utils.Utils; import com.google.gson.Gson; import com.mobsandgeeks.saripaar.ValidationError; import com.mobsandgeeks.saripaar.Validator; import com.mobsandgeeks.saripaar.annotation.NotEmpty; import com.mobsandgeeks.saripaar.annotation.Order; import org.json.JSONArray; import org.json.JSONObject; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import javax.inject.Inject; import butterknife.ButterKnife; import butterknife.InjectView; public class PaymentFlightFragment extends BaseFragment implements BookingPresenter.PaymentFlightView,Validator.ValidationListener { @Inject BookingPresenter presenter; @InjectView(R.id.paymentChannelList) LinearLayout paymentChannelList; @Order(1) @NotEmpty @InjectView(R.id.txtCardType) TextView txtCardType; @Order(2) @NotEmpty @InjectView(R.id.txtCardNumber) EditText txtCardNumber; @Order(3) @NotEmpty @InjectView(R.id.txtPaymentMonth) TextView txtPaymentMonth; @Order(4) @NotEmpty @InjectView(R.id.txtPaymentYear) TextView txtPaymentYear; @Order(5) @NotEmpty @InjectView(R.id.txtCardHolderName) EditText txtCardHolderName; @Order(6) @NotEmpty @InjectView(R.id.txtCardCVV) EditText txtCardCVV; @Order(7) @NotEmpty @InjectView(R.id.txtIssuingBank) EditText txtIssuingBank; @InjectView(R.id.btnPay) Button btnPay; @InjectView(R.id.creditCardFormLayout) LinearLayout creditCardFormLayout; private int fragmentContainerId; private SharedPrefManager pref; private String signature; private View view; private String selectedCheckBoxTag = "1"; private final List<String> channelType = new ArrayList<String>(); private ArrayList<DropDownItem> cardType = new ArrayList<DropDownItem>(); private ArrayList<DropDownItem> monthList = new ArrayList<DropDownItem>(); private ArrayList<DropDownItem> yearList = new ArrayList<DropDownItem>(); private Validator mValidator; public static PaymentFlightFragment newInstance() { PaymentFlightFragment fragment = new PaymentFlightFragment(); Bundle args = new Bundle(); fragment.setArguments(args); return fragment; // new SearchFragment(); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); FireFlyApplication.get(getActivity()).createScopedGraph(new PaymentFlightModule(this)).inject(this); mValidator = new Validator(this); mValidator.setValidationListener(this); mValidator.setValidationMode(Validator.Mode.BURST); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container,Bundle savedInstanceState) { view = inflater.inflate(R.layout.payment_flight, container, false); ButterKnife.inject(this, view); pref = new SharedPrefManager(MainFragmentActivity.getContext()); HashMap<String, String> initSignature = pref.getSignatureFromLocalStorage(); signature = initSignature.get(SharedPrefManager.SIGNATURE); HashMap<String, String> init = pref.getPaymentDummy(); String paymentDummy = init.get(SharedPrefManager.PAYMENT_DUMMY); //Signature baseObj = new Signature(); //baseObj.setSignature(signature); //getPaymentInfo(baseObj); Gson gson = new Gson(); PaymentInfoReceive obj = gson.fromJson(paymentDummy, PaymentInfoReceive.class); generatePaymentVendorList(obj); //Card Selection txtCardType.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { popupSelection(cardType, getActivity(), txtCardType, true); } }); //setMonthList yearList = getListOfYear(getActivity()); //Card Selection txtPaymentYear.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { popupSelection(yearList, getActivity(), txtPaymentYear, true); } }); //setMonthList monthList = getListOfMonth(getActivity()); //Card Selection txtPaymentMonth.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { popupSelection(monthList, getActivity(), txtPaymentMonth, true); } }); btnPay.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { mValidator.validate(); Utils.hideKeyboard(getActivity(), v); } }); return view; } @Override public void onValidationSucceeded() { initiateLoading(getActivity()); //paymentRequest(); Payment paymentObj = new Payment(); paymentObj.setSignature(signature); paymentObj.setCardHolderName(txtCardHolderName.getText().toString()); paymentObj.setCardNumber(txtCardNumber.getText().toString()); paymentObj.setChannelCode(txtCardType.getTag().toString()); paymentObj.setChannelType(selectedCheckBoxTag); paymentObj.setCvv(txtCardCVV.getText().toString()); paymentObj.setExpirationDateMonth(txtPaymentMonth.getText().toString()); paymentObj.setExpirationDateYear(txtPaymentYear.getText().toString()); paymentObj.setIssuingBank(txtIssuingBank.getText().toString()); presenter.paymentRequest(paymentObj); Log.e("Success", "True"); } @Override public void onValidationFailed(List<ValidationError> errors) { for (ValidationError error : errors) { View view = error.getView(); /* Split Error Message. Display first sequence only */ String message = error.getCollatedErrorMessage(getActivity()); String splitErrorMsg[] = message.split("\\r?\\n"); // Display error messages if (view instanceof EditText) { ((EditText) view).setError(splitErrorMsg[0]); } else if (view instanceof TextView){ ((TextView) view).setError(splitErrorMsg[0]); } croutonAlert(getActivity(), "Please fill empty field"); Log.e("Validation Failed",splitErrorMsg[0]); } } @Override public void onPaymentReceive(PaymentReceive obj) { dismissLoading(); if(obj.getObj().getStatus().equals("Redirect")){ //Open Secure Site At Browser String sanitizeUrl = obj.getObj().getPass().replaceAll("[/]", ""); String url = obj.getObj().getLink()+"/android/"+sanitizeUrl; Intent intent = new Intent(getActivity(), PaymentWebViewActivity.class); intent.putExtra("PAYMENT_URL", url); getActivity().startActivity(intent); //Intent i = new Intent(Intent.ACTION_VIEW); //i.setData(Uri.parse(url)); //startActivity(i); }else{ croutonAlert(getActivity(), obj.getObj().getMessage()); } } @Override public void onPaymentInfoReceive(PaymentInfoReceive obj) { dismissLoading(); Log.e("Status", obj.getObj().getStatus()); Log.e("Status", obj.getObj().getPayment_channel().get(0).getChannel_code()); Log.e("Size", Integer.toString(obj.getObj().getPayment_channel().size())); if (obj.getObj().getStatus().equals("success")){ generatePaymentVendorList(obj); }else{ croutonAlert(getActivity(), obj.getObj().getMessage()); } Gson gson = new Gson(); String payment = gson.toJson(obj); pref.setPaymentDummy(payment); } public void generatePaymentVendorList(final PaymentInfoReceive obj){ for(int a = 0 ; a < obj.getObj().getPayment_channel().size() ; a++){ String paymentType = obj.getObj().getPayment_channel().get(a).getChannel_type(); Log.e("PaymentType",paymentType); if (channelType.contains(paymentType)){ /*SKIP*/ }else { channelType.add(paymentType); } if(obj.getObj().getPayment_channel().get(a).getChannel_type().equals("1")){ DropDownItem itemTitle = new DropDownItem(); itemTitle.setText(obj.getObj().getPayment_channel().get(a).getChannel_name()); itemTitle.setCode(obj.getObj().getPayment_channel().get(a).getChannel_code()); itemTitle.setTag("Title"); cardType.add(itemTitle); } //List<String> imageURLCreditCard = new ArrayList<String>(); /* if(obj.getPayment_channel().get(a).getChannel_type().equals("1")){ imageURL.add(obj.getPayment_channel().get(a).getChannel_logo()); } else if(obj.getPayment_channel().get(a).getChannel_type().equals("2")){ imageURL.add(obj.getPayment_channel().get(a).getChannel_logo()); } }*/ } for(int totalPaymentChannel = 0 ; totalPaymentChannel < channelType.size() ; totalPaymentChannel++){ LinearLayout seatRow = new LinearLayout(getActivity()); seatRow.setOrientation(LinearLayout.VERTICAL); seatRow.setGravity(LinearLayout.TEXT_ALIGNMENT_GRAVITY); final LinearLayout imageRow = new LinearLayout(getActivity()); imageRow.setOrientation(LinearLayout.HORIZONTAL); imageRow.setGravity(LinearLayout.TEXT_ALIGNMENT_GRAVITY); for(int totalImage = 0 ; totalImage < obj.getObj().getPayment_channel().size() ; totalImage++){ if(channelType.get(totalPaymentChannel).toString().equals(obj.getObj().getPayment_channel().get(totalImage).getChannel_type())){ Log.e(channelType.get(totalPaymentChannel),obj.getObj().getPayment_channel().get(totalImage).getChannel_type()); //Need to move this later final AjaxCallback<Bitmap> cb = new AjaxCallback<Bitmap>() { @Override public void callback(String url, Bitmap bm, AjaxStatus status) { // do whatever you want with bm (the image) ImageView image = new ImageView(getActivity()); image.setImageBitmap(bm); imageRow.addView(image); } }; final AQuery aq = new AQuery(getActivity()); aq.ajax(obj.getObj().getPayment_channel().get(totalImage).getChannel_logo(), Bitmap.class, 0, cb); } } final RadioButton selectPaymentChannel = new RadioButton(getActivity()); selectPaymentChannel.setId(totalPaymentChannel + 1); selectPaymentChannel.setTag(channelType.get(totalPaymentChannel).toString()); if(channelType.get(totalPaymentChannel).toString().equals("1")){ selectPaymentChannel.setText("Credit Card"); } seatRow.addView(selectPaymentChannel); seatRow.addView(imageRow); paymentChannelList.addView(seatRow); } //set first checkbox checked final RadioButton checkBox = (RadioButton) view.findViewWithTag(channelType.get(0).toString()); checkBox.setChecked(true); for(int y = 0 ; y < channelType.size() ; y++){ final RadioButton checkToRemove = (RadioButton) view.findViewWithTag(channelType.get(y).toString()); checkToRemove.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (isChecked) { selectedCheckBoxTag = checkToRemove.getTag().toString(); if(selectedCheckBoxTag.equals("1")){ creditCardFormLayout.setVisibility(View.VISIBLE); }else{ creditCardFormLayout.setVisibility(View.GONE); } for (int b = 0; b < channelType.size(); b++) { if (selectedCheckBoxTag != channelType.get(b).toString()) { RadioButton checkToRemove = (RadioButton) view.findViewWithTag(channelType.get(b).toString()); checkToRemove.setChecked(false); } } } } } ); } } public void getPaymentInfo(Signature baseObj){ initiateLoading(getActivity()); presenter.paymentInfo(baseObj); } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); fragmentContainerId = ((FragmentContainerActivity) getActivity()).getFragmentContainerId(); } @Override public void onResume() { super.onResume(); presenter.onResume(); } @Override public void onPause() { super.onPause(); presenter.onPause(); } }
package com.thinkaurelius.titan.hadoop.scan; import com.google.common.base.Preconditions; import com.thinkaurelius.titan.diskstorage.Entry; import com.thinkaurelius.titan.diskstorage.EntryList; import com.thinkaurelius.titan.diskstorage.StaticBuffer; import com.thinkaurelius.titan.diskstorage.configuration.ConfigNamespace; import com.thinkaurelius.titan.diskstorage.configuration.Configuration; import com.thinkaurelius.titan.diskstorage.configuration.ModifiableConfiguration; import com.thinkaurelius.titan.diskstorage.keycolumnvalue.SliceQuery; import com.thinkaurelius.titan.diskstorage.keycolumnvalue.scan.ScanJob; import com.thinkaurelius.titan.diskstorage.util.BufferUtil; import com.thinkaurelius.titan.diskstorage.util.EntryArrayList; import com.thinkaurelius.titan.hadoop.config.ModifiableHadoopConfiguration; import com.thinkaurelius.titan.hadoop.config.TitanHadoopConfiguration; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.Mapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.lang.reflect.Field; import java.util.*; import java.util.function.Predicate; import static com.thinkaurelius.titan.hadoop.compat.HadoopCompatLoader.DEFAULT_COMPAT; /** * Run a {@link com.thinkaurelius.titan.diskstorage.keycolumnvalue.scan.ScanJob} * via a Hadoop {@link org.apache.hadoop.mapreduce.Mapper} over the edgestore. */ public class HadoopScanMapper extends Mapper<StaticBuffer, Iterable<Entry>, NullWritable, NullWritable> { private static final Logger log = LoggerFactory.getLogger(HadoopScanMapper.class); protected ScanJob job; protected HadoopContextScanMetrics metrics; protected com.thinkaurelius.titan.diskstorage.configuration.Configuration jobConf; private Predicate<StaticBuffer> keyFilter; private SliceQuery initialQuery; private List<SliceQuery> subsequentQueries; @Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); org.apache.hadoop.conf.Configuration hadoopConf = DEFAULT_COMPAT.getContextConfiguration(context); ModifiableHadoopConfiguration scanConf = ModifiableHadoopConfiguration.of(TitanHadoopConfiguration.MAPRED_NS, hadoopConf); job = getJob(scanConf); metrics = new HadoopContextScanMetrics(context); Configuration graphConf = getTitanConfiguration(context); finishSetup(scanConf, graphConf); } protected void finishSetup(ModifiableHadoopConfiguration scanConf, Configuration graphConf) { jobConf = getJobConfiguration(scanConf); Preconditions.checkNotNull(metrics); // Allowed to be null for jobs that specify no configuration and no configuration root //Preconditions.checkNotNull(jobConf); Preconditions.checkNotNull(job); job.workerIterationStart(jobConf, graphConf, metrics); keyFilter = job.getKeyFilter(); List<SliceQuery> sliceQueries = job.getQueries(); Preconditions.checkArgument(null != sliceQueries, "Job cannot specify null query list"); Preconditions.checkArgument(0 < sliceQueries.size(), "Job must specify at least one query"); // Assign head of getQueries() to "initialQuery" initialQuery = sliceQueries.get(0); // Assign tail of getQueries() to "subsequentQueries" subsequentQueries = new ArrayList<>(sliceQueries.subList(1,sliceQueries.size())); Preconditions.checkState(sliceQueries.size() == subsequentQueries.size() + 1); Preconditions.checkNotNull(initialQuery); if (0 < subsequentQueries.size()) { //It is assumed that the first query is the grounding query if multiple queries exist StaticBuffer start = initialQuery.getSliceStart(); Preconditions.checkArgument(start.equals(BufferUtil.zeroBuffer(1)), "Expected start of first query to be all 0s: %s", start); StaticBuffer end = initialQuery.getSliceEnd(); Preconditions.checkArgument(end.equals(BufferUtil.oneBuffer(end.length())), "Expected end of first query to be all 1s: %s", end); } } @Override protected void map(StaticBuffer key, Iterable<Entry> values, Context context) throws IOException, InterruptedException { EntryArrayList al = EntryArrayList.of(values); // KeyFilter check if (!keyFilter.test(key)) { log.debug("Skipping key {} based on KeyFilter", key); return; } // InitialQuery check (at least one match is required or else the key is ignored) EntryList initialQueryMatches = findEntriesMatchingQuery(initialQuery, al); if (0 == initialQueryMatches.size()) { log.debug("Skipping key {} based on InitialQuery ({}) match failure", key, initialQuery); return; } // Both conditions (KeyFilter && InitialQuery) for invoking process are satisfied // Create an entries parameter to be passed into the process method Map<SliceQuery, EntryList> matches = new HashMap<>(); matches.put(initialQuery, initialQueryMatches); // Find matches (if any are present) for noninitial queries for (SliceQuery sq : subsequentQueries) { matches.put(sq, findEntriesMatchingQuery(sq, al)); } // Process job.process(key, matches, metrics); } @Override protected void cleanup(Context context) throws IOException, InterruptedException { super.cleanup(context); job.workerIterationEnd(metrics); } private EntryList findEntriesMatchingQuery(SliceQuery query, EntryList sortedEntries) { int lowestStartMatch = sortedEntries.size(); // Inclusive int highestEndMatch = -1; // Inclusive final StaticBuffer queryStart = query.getSliceStart(); final StaticBuffer queryEnd = query.getSliceEnd(); // Find the lowest matchStart s.t. query.getSliceStart <= sortedEntries.get(matchStart) int low = 0; int high = sortedEntries.size() - 1; while (low <= high) { int mid = (low + high) >>> 1; Entry midVal = sortedEntries.get(mid); int cmpStart = queryStart.compareTo(midVal.getColumn()); if (0 < cmpStart) { // query lower bound exceeds entry (no match) if (lowestStartMatch == mid + 1) { // lowestStartMatch located break; } // Move to higher list index low = mid + 1; } else /* (0 >= cmpStart) */ { // entry equals or exceeds query lower bound (match, but not necessarily lowest match) if (mid < lowestStartMatch) { lowestStartMatch = mid; } // Move to a lower list index high = mid - 1; } } // If lowestStartMatch is beyond the end of our list parameter, there cannot possibly be any matches, // so we can bypass the highestEndMatch search and just return an empty result. if (sortedEntries.size() == lowestStartMatch) { return EntryList.EMPTY_LIST; } // Find the highest matchEnd s.t. sortedEntries.get(matchEnd) < query.getSliceEnd low = 0; high = sortedEntries.size() - 1; while (low <= high) { int mid = (low + high) >>> 1; Entry midVal = sortedEntries.get(mid); int cmpEnd = queryEnd.compareTo(midVal.getColumn()); if (0 < cmpEnd) { // query upper bound exceeds entry (match, not necessarily highest) if (mid > highestEndMatch) { highestEndMatch = mid; } // Move to higher list index low = mid + 1; } else /* (0 >= cmpEnd) */ { // entry equals or exceeds query upper bound (no match) if (highestEndMatch == mid - 1) { // highestEndMatch located break; } // Move to a lower list index high = mid - 1; } } if (0 <= highestEndMatch - lowestStartMatch) { // Return sublist between indices (inclusive at both indices) int endIndex = highestEndMatch + 1; // This will be passed into subList, which interprets it exclusively if (query.hasLimit()) { endIndex = Math.min(endIndex, query.getLimit() + lowestStartMatch); } // TODO avoid unnecessary copy here return EntryArrayList.of(sortedEntries.subList(lowestStartMatch, endIndex /* exclusive */)); } else { return EntryList.EMPTY_LIST; } } private ScanJob getJob(Configuration scanConf) { String jobClass = scanConf.get(TitanHadoopConfiguration.SCAN_JOB_CLASS); try { return (ScanJob)Class.forName(jobClass).newInstance(); } catch (InstantiationException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } } static ModifiableConfiguration getTitanConfiguration(Context context) { org.apache.hadoop.conf.Configuration hadoopConf = DEFAULT_COMPAT.getContextConfiguration(context); return ModifiableHadoopConfiguration.of(TitanHadoopConfiguration.MAPRED_NS, hadoopConf).getTitanGraphConf(); } static Configuration getJobConfiguration(ModifiableHadoopConfiguration scanConf) { if (!scanConf.has(TitanHadoopConfiguration.SCAN_JOB_CONFIG_ROOT)) { log.debug("No job configuration root provided"); return null; } ConfigNamespace jobRoot = getJobRoot(scanConf.get(TitanHadoopConfiguration.SCAN_JOB_CONFIG_ROOT)); return ModifiableHadoopConfiguration.prefixView(jobRoot, TitanHadoopConfiguration.SCAN_JOB_CONFIG_KEYS, scanConf); } static ConfigNamespace getJobRoot(String confRootName) { String tokens[] = confRootName.split("#"); String className = tokens[0]; String fieldName = tokens[1]; try { Field f = Class.forName(className).getField(fieldName); return (ConfigNamespace)f.get(null); } catch (NoSuchFieldException e) { throw new RuntimeException(e); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } }
package de.uni_koeln.info.data; import java.util.List; import com.fasterxml.jackson.annotation.JsonProperty; public class CardleObject { private String id; private String author; @JsonProperty("author_name") private String authorName; @JsonProperty("author_name_for_admin") private String authorNameForAdmin; private String status; @JsonProperty("max_points") private String maxPoints; @JsonProperty("exam_time") private String examTime; @JsonProperty("version_nr") private String versionNumber; @JsonProperty("basic_card_type") private String basicCardType; @JsonProperty("card_data_type") private String cardDataType; private Question question; @JsonProperty("card_data") private CardData cardData; private List<String> tags; @JsonProperty("tag_ids") private List<String> tagIds; private List<String> flags; @JsonProperty("created_at") private String createdAt; @JsonProperty("updated_at") private String updatedAt; public String getId() { return id; } public void setId(String id) { this.id = id; } public String getAuthor() { return author; } public void setAuthor(String author) { this.author = author; } public String getAuthorName() { return authorName; } public void setAuthorName(String authorName) { this.authorName = authorName; } public String getAuthorNameForAdmin() { return authorNameForAdmin; } public void setAuthorNameForAdmin(String authorNameForAdmin) { this.authorNameForAdmin = authorNameForAdmin; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public String getMaxPoints() { return maxPoints; } public void setMaxPoints(String maxPoints) { this.maxPoints = maxPoints; } public String getExamTime() { return examTime; } public void setExamTime(String examTime) { this.examTime = examTime; } public String getVersionNumber() { return versionNumber; } public void setVersionNumber(String versionNumber) { this.versionNumber = versionNumber; } public String getCardDataType() { return cardDataType; } public void setCardDataType(String cardDataType) { this.cardDataType = cardDataType; } public Question getQuestion() { return question; } public void setQuestion(Question question) { this.question = question; } public CardData getCardData() { return cardData; } public void setCardData(CardData cardData) { this.cardData = cardData; } public String getBasicCardType() { return basicCardType; } public void setBasicCardType(String basicCardType) { this.basicCardType = basicCardType; } public List<String> getTags() { return tags; } public void setTags(List<String> tags) { this.tags = tags; } public List<String> getTagIds() { return tagIds; } public void setTagIds(List<String> tagIds) { this.tagIds = tagIds; } public List<String> getFlags() { return flags; } public void setFlags(List<String> flags) { this.flags = flags; } public String getCreatedAt() { return createdAt; } public void setCreatedAt(String createdAt) { this.createdAt = createdAt; } public String getUpdatedAt() { return updatedAt; } public void setUpdatedAt(String updatedAt) { this.updatedAt = updatedAt; } @Override public String toString() { return "CardleObject [id=" + id + ", author=" + author + ", authorName=" + authorName + ", authorNameForAdmin=" + authorNameForAdmin + ", status=" + status + ", maxPoints=" + maxPoints + ", examTime=" + examTime + ", versionNumber=" + versionNumber + ", basicCardType=" + basicCardType + ", cardDataType=" + cardDataType + ", question=" + question + ", cardData=" + cardData + ", tags=" + tags + ", tagIds=" + tagIds + ", flags=" + flags + ", createdAt=" + createdAt + ", updatedAt=" + updatedAt + "]"; } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package util; import java.util.*; //import util.*; import hydra.*; import hydra.blackboard.*; //import com.gemstone.gemfire.cache.*; /** Blackboard defining counters for events. This blackboard has counters * with names used by the increment methods in {@link util#AbstractListener}. * Any subclass of AbstractListener that wants to use AbstractListener's * counter increment methods can use this blackboard for that purpose. * Note that the names of the blackboard counters defined here must be * the same names as defined in AbstractListener. */ public class EventCountersBB extends Blackboard { // Blackboard variables static String BB_NAME = "EventCounters_Blackboard"; static String BB_TYPE = "RMI"; // singleton instance of blackboard private static EventCountersBB bbInstance; // Event counters // afterCreate events public static int numAfterCreateEvents_isDist; public static int numAfterCreateEvents_isNotDist; public static int numAfterCreateEvents_isExp; public static int numAfterCreateEvents_isNotExp; public static int numAfterCreateEvents_isRemote; public static int numAfterCreateEvents_isNotRemote; public static int numAfterCreateEvents_isLoad; public static int numAfterCreateEvents_isNotLoad; public static int numAfterCreateEvents_isLocalLoad; public static int numAfterCreateEvents_isNotLocalLoad; public static int numAfterCreateEvents_isNetLoad; public static int numAfterCreateEvents_isNotNetLoad; public static int numAfterCreateEvents_isNetSearch; public static int numAfterCreateEvents_isNotNetSearch; // afterDestroy events public static int numAfterDestroyEvents_isDist; public static int numAfterDestroyEvents_isNotDist; public static int numAfterDestroyEvents_isExp; public static int numAfterDestroyEvents_isNotExp; public static int numAfterDestroyEvents_isRemote; public static int numAfterDestroyEvents_isNotRemote; public static int numAfterDestroyEvents_isLoad; public static int numAfterDestroyEvents_isNotLoad; public static int numAfterDestroyEvents_isLocalLoad; public static int numAfterDestroyEvents_isNotLocalLoad; public static int numAfterDestroyEvents_isNetLoad; public static int numAfterDestroyEvents_isNotNetLoad; public static int numAfterDestroyEvents_isNetSearch; public static int numAfterDestroyEvents_isNotNetSearch; // afterInvalidate events public static int numAfterInvalidateEvents_isDist; public static int numAfterInvalidateEvents_isNotDist; public static int numAfterInvalidateEvents_isExp; public static int numAfterInvalidateEvents_isNotExp; public static int numAfterInvalidateEvents_isRemote; public static int numAfterInvalidateEvents_isNotRemote; public static int numAfterInvalidateEvents_isLoad; public static int numAfterInvalidateEvents_isNotLoad; public static int numAfterInvalidateEvents_isLocalLoad; public static int numAfterInvalidateEvents_isNotLocalLoad; public static int numAfterInvalidateEvents_isNetLoad; public static int numAfterInvalidateEvents_isNotNetLoad; public static int numAfterInvalidateEvents_isNetSearch; public static int numAfterInvalidateEvents_isNotNetSearch; // afterUpdate events public static int numAfterUpdateEvents_isDist; public static int numAfterUpdateEvents_isNotDist; public static int numAfterUpdateEvents_isExp; public static int numAfterUpdateEvents_isNotExp; public static int numAfterUpdateEvents_isRemote; public static int numAfterUpdateEvents_isNotRemote; public static int numAfterUpdateEvents_isLoad; public static int numAfterUpdateEvents_isNotLoad; public static int numAfterUpdateEvents_isLocalLoad; public static int numAfterUpdateEvents_isNotLocalLoad; public static int numAfterUpdateEvents_isNetLoad; public static int numAfterUpdateEvents_isNotNetLoad; public static int numAfterUpdateEvents_isNetSearch; public static int numAfterUpdateEvents_isNotNetSearch; // afterRegionDestroy events public static int numAfterRegionDestroyEvents_isDist; public static int numAfterRegionDestroyEvents_isNotDist; public static int numAfterRegionDestroyEvents_isExp; public static int numAfterRegionDestroyEvents_isNotExp; public static int numAfterRegionDestroyEvents_isRemote; public static int numAfterRegionDestroyEvents_isNotRemote; // afterRegionInvalidate events public static int numAfterRegionInvalidateEvents_isDist; public static int numAfterRegionInvalidateEvents_isNotDist; public static int numAfterRegionInvalidateEvents_isExp; public static int numAfterRegionInvalidateEvents_isNotExp; public static int numAfterRegionInvalidateEvents_isRemote; public static int numAfterRegionInvalidateEvents_isNotRemote; // afterRegionCreate events public static int numAfterRegionCreateEvents_isDist; public static int numAfterRegionCreateEvents_isNotDist; public static int numAfterRegionCreateEvents_isExp; public static int numAfterRegionCreateEvents_isNotExp; public static int numAfterRegionCreateEvents_isRemote; public static int numAfterRegionCreateEvents_isNotRemote; // close events public static int numClose; //afterClear events public static int numAfterClearEvents_isDist; public static int numAfterClearEvents_isNotDist; public static int numAfterClearEvents_isExp; public static int numAfterClearEvents_isNotExp; public static int numAfterClearEvents_isRemote; public static int numAfterClearEvents_isNotRemote; /** * Get the EventCountersBB */ public static EventCountersBB getBB() { if (bbInstance == null) { synchronized ( EventCountersBB.class ) { if (bbInstance == null) bbInstance = new EventCountersBB(BB_NAME, BB_TYPE); } } return bbInstance; } /** * Zero-arg constructor for remote method invocations. */ public EventCountersBB() { } /** * Creates a sample blackboard using the specified name and transport type. */ public EventCountersBB(String name, String type) { super(name, type, EventCountersBB.class); } /** Increment the appropriate entry event counters. * * @param eventName - An event name as used in this blackboard's counters, such as "AfterCreate" * @param isDistributed - Entry event boolean. * @param isExpiration - Entry event boolean. * @param isRemote - Entry event boolean. * @param isLoad - Entry event boolean. * @param isLocalLoad - Entry event boolean. * @param isNetLoad - Entry event boolean. * @param isNetSearch - Entry event boolean. * */ public static void incrementEntryEventCntrs(String eventName, boolean isDistributed, boolean isExpiration, boolean isRemote, boolean isLoad, boolean isLocalLoad, boolean isNetLoad, boolean isNetSearch) { Blackboard bb = EventCountersBB.getBB(); SharedCounters sc = bb.getSharedCounters(); String counterName; counterName = "num" + eventName + "Events_" + (isDistributed ? "isDist" : "isNotDist"); sc.increment(bb.getSharedCounter(counterName)); counterName = "num" + eventName + "Events_" + (isExpiration ? "isExp" : "isNotExp"); sc.increment(bb.getSharedCounter(counterName)); counterName = "num" + eventName + "Events_" + (isRemote ? "isRemote" : "isNotRemote"); sc.increment(bb.getSharedCounter(counterName)); counterName = "num" + eventName + "Events_" + (isLoad ? "isLoad" : "isNotLoad"); sc.increment(bb.getSharedCounter(counterName)); counterName = "num" + eventName + "Events_" + (isLocalLoad ? "isLocalLoad" : "isNotLocalLoad"); sc.increment(bb.getSharedCounter(counterName)); counterName = "num" + eventName + "Events_" + (isNetLoad ? "isNetLoad" : "isNotNetLoad"); sc.increment(bb.getSharedCounter(counterName)); counterName = "num" + eventName + "Events_" + (isNetSearch ? "isNetSearch" : "isNotNetSearch"); sc.increment(bb.getSharedCounter(counterName)); } /** Increment the appropriate region event counters. * * @param eventName - An event name as used in this blackboard's counters, such as "RegionDestroy" * @param isDistributed - Entry event boolean. * @param isExpiration - Entry event boolean. * @param isRemote - Entry event boolean. * */ public static void incrementRegionEventCntrs(String eventName, boolean isDistributed, boolean isExpiration, boolean isRemote) { Blackboard bb = EventCountersBB.getBB(); SharedCounters sc = bb.getSharedCounters(); String counterName; counterName = "num" + eventName + "Events_" + (isDistributed ? "isDist" : "isNotDist"); sc.increment(bb.getSharedCounter(counterName)); counterName = "num" + eventName + "Events_" + (isExpiration ? "isExp" : "isNotExp"); sc.increment(bb.getSharedCounter(counterName)); counterName = "num" + eventName + "Events_" + (isRemote ? "isRemote" : "isNotRemote"); sc.increment(bb.getSharedCounter(counterName)); } /** * Check the value of all event counters. * * @param expectedValues An ArrayList of instances of ExpCounterValue. * * @throws TestException if any counter does not have the expected value. */ public void checkEventCounters(ArrayList expectedValues) { Log.getLogWriter().info("Checking " + expectedValues.size() + " event counters in " + this.getClass().getName()); Blackboard BB = getBB(); SharedCounters counters = BB.getSharedCounters(); String[] counterNames = BB.getCounterNames(); for (int i = 0; i < expectedValues.size(); i++) { ExpCounterValue expValue = (ExpCounterValue)expectedValues.get(i); if ((expValue.getCounterName1() != null) && (expValue.getCounterName2() != null)) { // want sum of counters try { TestHelper.waitForCounterSum(BB, expValue.getCounterName1(), expValue.getCounterName2(), expValue.getExpectedValue(), true, event.EventTest.MILLIS_TO_WAIT); } catch (TestException e) { BB.printSharedCounters(); throw e; } } else { // only one counterName has a value boolean exact = expValue.getExact(); try { TestHelper.waitForCounter(BB, expValue.getCounterName1(), BB.getSharedCounter(expValue.getCounterName1()), expValue.getExpectedValue(), exact, event.EventTest.MILLIS_TO_WAIT); } catch (TestException e) { BB.printSharedCounters(); throw e; } } } Log.getLogWriter().info(BB.getClass().getName() + ", all counters are OK"); } /** * Check the value of all event counters. * * @param expectedValues An array of the expected counter values, in the * same order as they are defined in this class. If any value is * < 0, then don't check it. * * @throws TestException if any counter does not have the expected value. */ public void checkEventCounters(long[] expectedValues) { Log.getLogWriter().info("Checking event counters in " + this.getClass().getName()); Blackboard BB = getBB(); String[] counterNames = BB.getCounterNames(); // make sure expectedValues is same length as number of counters in this BB if (counterNames.length != expectedValues.length) { StringBuffer aStr = new StringBuffer(); for (int i = 0; i < counterNames.length; i++) aStr.append(" counterNames[" + i + "] is " + counterNames[i] + "\n"); for (int i = 0; i < expectedValues.length; i++) aStr.append(" expectedValues[" + i + "] is " + expectedValues[i] + "\n"); Log.getLogWriter().info(aStr.toString()); throw new TestException("Expected length of expectedValues " + expectedValues.length + " to be = length of counterNames " + counterNames.length); } SharedCounters counters = BB.getSharedCounters(); for (int i = 0; i < expectedValues.length; i++) { if (expectedValues[i] >= 0) try { TestHelper.waitForCounter(BB, counterNames[i], BB.getSharedCounter(counterNames[i]), expectedValues[i], true, event.EventTest.MILLIS_TO_WAIT); } catch (TestException e) { BB.printSharedCounters(); throw e; } } Log.getLogWriter().info(BB.getClass().getName() + ", all counters are OK"); } /** * Check the value of update/create combined. */ public static void checkCreateUpdate(long expectedIsDist, long expectedIsNotDist, long expectedIsExp, long expectedIsNotExp, long expectedIsRemote, long expectedIsNotRemote) { Blackboard BB = EventCountersBB.getBB(); SharedCounters counters = EventCountersBB.getBB().getSharedCounters(); long isDist = counters.read(numAfterCreateEvents_isDist) + counters.read(numAfterUpdateEvents_isDist); long isNotDist = counters.read(numAfterCreateEvents_isNotDist) + counters.read(numAfterUpdateEvents_isNotDist); long isExp = counters.read(numAfterCreateEvents_isExp) + counters.read(numAfterUpdateEvents_isExp); long isNotExp = counters.read(numAfterCreateEvents_isNotExp) + counters.read(numAfterUpdateEvents_isNotExp); long isRemote = counters.read(numAfterCreateEvents_isRemote) + counters.read(numAfterUpdateEvents_isRemote); long isNotRemote = counters.read(numAfterCreateEvents_isNotRemote) + counters.read(numAfterUpdateEvents_isNotRemote); if (isDist != expectedIsDist) { BB.print(); throw new TestException("Expected create/update sum of isDist to be " + expectedIsDist + ", but it is " + isDist); } if (isNotDist != expectedIsNotDist) { BB.print(); throw new TestException("Expected create/update sum of isNotDist to be " + expectedIsNotDist + ", but it is " + isNotDist); } if (isExp != expectedIsExp) { BB.print(); throw new TestException("Expected create/update sum of isExp to be " + expectedIsExp + ", but it is " + isExp); } if (isNotExp != expectedIsNotExp) { BB.print(); throw new TestException("Expected create/update sum of isNotExp to be " + expectedIsNotExp + ", but it is " + isNotExp); } if (isRemote != expectedIsRemote) { BB.print(); throw new TestException("Expected create/update sum of isRemote to be " + expectedIsRemote + ", but it is " + isRemote); } if (isNotRemote != expectedIsNotRemote) { BB.print(); throw new TestException("Expected create/update sum of isNotRemote to be " + expectedIsNotRemote + ", but it is " + isNotRemote); } Log.getLogWriter().info("EventCountersBB, create/update counters are OK"); } /** Zero all counters in this blackboard. */ public void zeroAllCounters() { SharedCounters sc = getSharedCounters(); sc.zero(numAfterCreateEvents_isDist); sc.zero(numAfterCreateEvents_isNotDist); sc.zero(numAfterCreateEvents_isExp); sc.zero(numAfterCreateEvents_isNotExp); sc.zero(numAfterCreateEvents_isRemote); sc.zero(numAfterCreateEvents_isNotRemote); sc.zero(numAfterCreateEvents_isLoad); sc.zero(numAfterCreateEvents_isNotLoad); sc.zero(numAfterCreateEvents_isLocalLoad); sc.zero(numAfterCreateEvents_isNotLocalLoad); sc.zero(numAfterCreateEvents_isNetLoad); sc.zero(numAfterCreateEvents_isNotNetLoad); sc.zero(numAfterCreateEvents_isNetSearch); sc.zero(numAfterCreateEvents_isNotNetSearch); sc.zero(numAfterDestroyEvents_isDist); sc.zero(numAfterDestroyEvents_isNotDist); sc.zero(numAfterDestroyEvents_isExp); sc.zero(numAfterDestroyEvents_isNotExp); sc.zero(numAfterDestroyEvents_isRemote); sc.zero(numAfterDestroyEvents_isNotRemote); sc.zero(numAfterDestroyEvents_isLoad); sc.zero(numAfterDestroyEvents_isNotLoad); sc.zero(numAfterDestroyEvents_isLocalLoad); sc.zero(numAfterDestroyEvents_isNotLocalLoad); sc.zero(numAfterDestroyEvents_isNetLoad); sc.zero(numAfterDestroyEvents_isNotNetLoad); sc.zero(numAfterDestroyEvents_isNetSearch); sc.zero(numAfterDestroyEvents_isNotNetSearch); sc.zero(numAfterInvalidateEvents_isDist); sc.zero(numAfterInvalidateEvents_isNotDist); sc.zero(numAfterInvalidateEvents_isExp); sc.zero(numAfterInvalidateEvents_isNotExp); sc.zero(numAfterInvalidateEvents_isRemote); sc.zero(numAfterInvalidateEvents_isNotRemote); sc.zero(numAfterInvalidateEvents_isLoad); sc.zero(numAfterInvalidateEvents_isNotLoad); sc.zero(numAfterInvalidateEvents_isLocalLoad); sc.zero(numAfterInvalidateEvents_isNotLocalLoad); sc.zero(numAfterInvalidateEvents_isNetLoad); sc.zero(numAfterInvalidateEvents_isNotNetLoad); sc.zero(numAfterInvalidateEvents_isNetSearch); sc.zero(numAfterInvalidateEvents_isNotNetSearch); sc.zero(numAfterUpdateEvents_isDist); sc.zero(numAfterUpdateEvents_isNotDist); sc.zero(numAfterUpdateEvents_isExp); sc.zero(numAfterUpdateEvents_isNotExp); sc.zero(numAfterUpdateEvents_isRemote); sc.zero(numAfterUpdateEvents_isNotRemote); sc.zero(numAfterUpdateEvents_isLoad); sc.zero(numAfterUpdateEvents_isNotLoad); sc.zero(numAfterUpdateEvents_isLocalLoad); sc.zero(numAfterUpdateEvents_isNotLocalLoad); sc.zero(numAfterUpdateEvents_isNetLoad); sc.zero(numAfterUpdateEvents_isNotNetLoad); sc.zero(numAfterUpdateEvents_isNetSearch); sc.zero(numAfterUpdateEvents_isNotNetSearch); sc.zero(numAfterRegionDestroyEvents_isDist); sc.zero(numAfterRegionDestroyEvents_isNotDist); sc.zero(numAfterRegionDestroyEvents_isExp); sc.zero(numAfterRegionDestroyEvents_isNotExp); sc.zero(numAfterRegionDestroyEvents_isRemote); sc.zero(numAfterRegionDestroyEvents_isNotRemote); sc.zero(numAfterRegionInvalidateEvents_isDist); sc.zero(numAfterRegionInvalidateEvents_isNotDist); sc.zero(numAfterRegionInvalidateEvents_isExp); sc.zero(numAfterRegionInvalidateEvents_isNotExp); sc.zero(numAfterRegionInvalidateEvents_isRemote); sc.zero(numAfterRegionInvalidateEvents_isNotRemote); sc.zero(numAfterRegionCreateEvents_isDist); sc.zero(numAfterRegionCreateEvents_isNotDist); sc.zero(numAfterRegionCreateEvents_isExp); sc.zero(numAfterRegionCreateEvents_isNotExp); sc.zero(numAfterRegionCreateEvents_isRemote); sc.zero(numAfterRegionCreateEvents_isNotRemote); sc.zero(numClose); sc.zero(numAfterClearEvents_isDist); sc.zero(numAfterClearEvents_isNotDist); sc.zero(numAfterClearEvents_isExp); sc.zero(numAfterClearEvents_isNotExp); sc.zero(numAfterClearEvents_isRemote); sc.zero(numAfterClearEvents_isNotRemote); } }
/* * Copyright 2010-2013 Heads Up Development Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.headsupdev.irc.impl; import org.headsupdev.irc.IRCServiceManager; import org.schwering.irc.lib.IRCConnection; import org.schwering.irc.lib.IRCUtil; import java.io.IOException; /** * the default connection provider which handles common irc functionality. * * @author Andrew Williams * @version $Id: DefaultIRCConnection.java 55 2009-12-08 15:46:03Z handyande $ * @since 1.0 */ public class DefaultIRCConnection implements org.headsupdev.irc.IRCConnection { private IRCConnection conn; private IRCServiceManager manager; public DefaultIRCConnection( IRCConnection conn, IRCServiceManager manager ) { this.conn = conn; this.manager = manager; } public IRCServiceManager getManager() { return manager; } public void disconnect() throws IOException { disconnect( "Application shutting down" ); } public void disconnect( String reason ) throws IOException { checkConnected(); conn.doQuit( reason ); conn = null; } public void sendMessage( String to, String message ) { if ( to == null || message == null ) { throw new IllegalArgumentException(); } checkConnected(); int linesSent = 0; String[] lines = message.split( "\n" ); for ( int i = 0; i < lines.length; i++ ) { linesSent++; if ( linesSent > 5 ) { try { Thread.sleep( 100 * ( ( linesSent <= 10 ) ? 1 : 5 ) ); } catch ( InterruptedException e ) { // risk posting anyway } } conn.doPrivmsg( to, lines[i] ); } } public void sendAction( String to, String action ) { checkConnected(); conn.doPrivmsg( to, IRCUtil.actionIndicator + "ACTION " + action + IRCUtil.actionIndicator ); } public void sendNotice( String to, String notice ) { checkConnected(); conn.doNotice( to, notice ); } public void join( String channel ) { checkConnected(); conn.doJoin( channel ); } public void part( String channel ) { checkConnected(); conn.doPart( channel ); } public void part( String channel, String reason ) { checkConnected(); conn.doPart( channel, reason ); } public void op( String channel, String nick ) { checkConnected(); conn.doMode( channel, "+o " + nick ); } public void deOp( String channel, String nick ) { checkConnected(); conn.doMode( channel, "-o " + nick ); } public void voice( String channel, String nick ) { checkConnected(); conn.doMode( channel, "+v " + nick ); } public void deVoice( String channel, String nick ) { checkConnected(); conn.doMode( channel, "-v " + nick ); } public void kick( String channel, String nick ) { checkConnected(); conn.doKick( channel, nick ); } public void kick( String channel, String nick, String reason ) { checkConnected(); conn.doKick( channel, nick, reason ); } public void ban( String channel, String mask ) { checkConnected(); conn.doMode( channel, "+b " + mask ); } public void unBan( String channel, String mask ) { checkConnected(); conn.doMode( channel, "-b " + mask ); } public String getHost() { checkConnected(); return conn.getHost(); } public String getNick() { checkConnected(); return conn.getNick(); } public void setNick( String nick ) { checkConnected(); conn.doNick( nick ); } public String getUsername() { checkConnected(); return conn.getUsername(); } public String getRealname() { checkConnected(); return conn.getRealname(); } public String getTopic( String channel ) { checkConnected(); return ( new ReplyMonitor( conn, IRCUtil.RPL_TOPIC, channel ) ).getReply(); } public void setTopic( String channel, String topic ) { checkConnected(); conn.doTopic( channel, topic ); } public String getTime( String nick ) { return ( new ReplyMonitor( conn, IRCUtil.RPL_TIME, nick ) ).getReply(); } public void checkConnected() { if ( conn == null ) { throw new IllegalStateException( "Must be connected to a server" ); } } }
/* * Copyright 2018 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.swiperefreshlayout.widget; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import android.content.Context; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; import androidx.annotation.NonNull; import androidx.core.view.NestedScrollingParent2; import androidx.core.view.ViewCompat; import androidx.test.core.app.ApplicationProvider; import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.filters.LargeTest; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; /** * Small integration tests that verifies correctness of {@link SwipeRefreshLayout}'s * NestedScrollingParent2 implementation. */ @RunWith(AndroidJUnit4.class) @LargeTest public class SwipeRefreshLayoutNestedScrollingParent2Test { private SwipeRefreshLayout mSwipeRefreshLayout; private NestedScrollingSpyView mParent; private View mChild; @Before public void instantiateMembers() { mSwipeRefreshLayout = new SwipeRefreshLayout(ApplicationProvider.getApplicationContext()); mParent = spy(new NestedScrollingSpyView(ApplicationProvider.getApplicationContext())); mChild = new View(ApplicationProvider.getApplicationContext()); } @Test public void onStartNestedScroll_scrollAxisIncludesVerticalAndTypeTouch_returnsTrue() { int vertical = ViewCompat.SCROLL_AXIS_VERTICAL; int both = ViewCompat.SCROLL_AXIS_VERTICAL | ViewCompat.SCROLL_AXIS_HORIZONTAL; onStartNestedScroll(vertical, ViewCompat.TYPE_TOUCH, true); onStartNestedScroll(both, ViewCompat.TYPE_TOUCH, true); } @Test public void onStartNestedScroll_typeIsNotTouch_returnsFalse() { int vertical = ViewCompat.SCROLL_AXIS_VERTICAL; int both = ViewCompat.SCROLL_AXIS_VERTICAL | ViewCompat.SCROLL_AXIS_HORIZONTAL; onStartNestedScroll(vertical, ViewCompat.TYPE_NON_TOUCH, false); onStartNestedScroll(both, ViewCompat.TYPE_NON_TOUCH, false); } @Test public void onStartNestedScroll_scrollAxisExcludesVertical_returnsFalse() { int horizontal = ViewCompat.SCROLL_AXIS_HORIZONTAL; int neither = ViewCompat.SCROLL_AXIS_NONE; onStartNestedScroll(horizontal, ViewCompat.TYPE_TOUCH, false); onStartNestedScroll(neither, ViewCompat.TYPE_TOUCH, false); } @Test public void onNestedScrollAccepted_callsParentsOnStartNestedScrollWithCorrectParams() { setupNestedScrollViewWithParentAndChild(); mSwipeRefreshLayout.onNestedScrollAccepted(mChild, mChild, ViewCompat.SCROLL_AXIS_VERTICAL, ViewCompat.TYPE_TOUCH); verify(mParent, times(1)).onStartNestedScroll( mSwipeRefreshLayout, mSwipeRefreshLayout, ViewCompat.SCROLL_AXIS_VERTICAL, ViewCompat.TYPE_TOUCH); verify(mParent, times(1)).onStartNestedScroll( any(View.class), any(View.class), anyInt(), anyInt()); } @Test public void onNestedScrollAccepted_callsParentsOnNestedScrollAcceptedWithCorrectParams() { setupNestedScrollViewWithParentAndChild(); doReturn(true) .when(mParent) .onStartNestedScroll(any(View.class), any(View.class), anyInt(), anyInt()); mSwipeRefreshLayout.onNestedScrollAccepted( mChild, mChild, ViewCompat.SCROLL_AXIS_VERTICAL, ViewCompat.TYPE_TOUCH); verify(mParent, times(1)).onNestedScrollAccepted( mSwipeRefreshLayout, mSwipeRefreshLayout, ViewCompat.SCROLL_AXIS_VERTICAL, ViewCompat.TYPE_TOUCH); verify(mParent, times(1)).onNestedScrollAccepted( any(View.class), any(View.class), anyInt(), anyInt()); } @Test public void onNestedScrollAccepted_bothOrientations_pOnNestedScrollAcceptedCalledWithVert() { setupNestedScrollViewWithParentAndChild(); doReturn(true) .when(mParent) .onStartNestedScroll(any(View.class), any(View.class), anyInt(), anyInt()); mSwipeRefreshLayout.onNestedScrollAccepted( mChild, mChild, ViewCompat.SCROLL_AXIS_VERTICAL | ViewCompat.SCROLL_AXIS_HORIZONTAL, ViewCompat.TYPE_TOUCH); verify(mParent, times(1)).onNestedScrollAccepted( any(View.class), any(View.class), eq(ViewCompat.SCROLL_AXIS_VERTICAL), anyInt()); verify(mParent, times(1)).onNestedScrollAccepted( any(View.class), any(View.class), anyInt(), anyInt()); } @Test public void onNestedScrollAccepted_parentRejects_parentOnNestedScrollAcceptedNotCalled() { setupNestedScrollViewWithParentAndChild(); doReturn(false) .when(mParent) .onStartNestedScroll(any(View.class), any(View.class), anyInt(), anyInt()); mSwipeRefreshLayout.onNestedScrollAccepted( mChild, mChild, ViewCompat.SCROLL_AXIS_VERTICAL, ViewCompat.TYPE_TOUCH); verify(mParent, never()).onNestedScrollAccepted( any(View.class), any(View.class), anyInt(), anyInt()); } @Test public void onStopNestedScroll_parentOnStopNestedScrollCalledWithCorrectParams() { setupNestedScrollViewWithParentAndChild(); doReturn(true) .when(mParent) .onStartNestedScroll(any(View.class), any(View.class), anyInt(), anyInt()); mSwipeRefreshLayout.onNestedScrollAccepted(mChild, mChild, ViewCompat.SCROLL_AXIS_VERTICAL, ViewCompat.TYPE_TOUCH); mSwipeRefreshLayout.onStopNestedScroll(mChild, ViewCompat.TYPE_TOUCH); verify(mParent, times(1)).onStopNestedScroll(mSwipeRefreshLayout, ViewCompat.TYPE_TOUCH); verify(mParent, times(1)).onStopNestedScroll(any(View.class), anyInt()); } @Test public void onStopNestedScroll_parentRejects_parentOnStopNestedScrollNotCalled() { setupNestedScrollViewWithParentAndChild(); doReturn(false) .when(mParent) .onStartNestedScroll(any(View.class), any(View.class), anyInt(), anyInt()); mSwipeRefreshLayout.onNestedScrollAccepted(mChild, mChild, ViewCompat.SCROLL_AXIS_VERTICAL); mSwipeRefreshLayout.onStopNestedScroll(mChild, ViewCompat.TYPE_TOUCH); verify(mParent, never()).onStopNestedScroll(any(View.class), anyInt()); } @Test public void onNestedScroll_parentOnNestedScrollCalledWithCorrectParams() { setupNestedScrollViewWithParentAndChild(); doReturn(true) .when(mParent) .onStartNestedScroll(any(View.class), any(View.class), anyInt(), anyInt()); mSwipeRefreshLayout.onNestedScrollAccepted(mChild, mChild, ViewCompat.SCROLL_AXIS_VERTICAL, ViewCompat.TYPE_TOUCH); mSwipeRefreshLayout.onNestedScroll(mChild, 1, 2, 3, 4, ViewCompat.TYPE_TOUCH); verify(mParent, times(1)).onNestedScroll( mSwipeRefreshLayout, 1, 2, 3, 4, ViewCompat.TYPE_TOUCH); verify(mParent, times(1)).onNestedScroll( any(View.class), anyInt(), anyInt(), anyInt(), anyInt(), anyInt()); } @Test public void onNestedScroll_parentRejects_parentOnNestedScrollNotCalled() { setupNestedScrollViewWithParentAndChild(); doReturn(false) .when(mParent) .onStartNestedScroll(any(View.class), any(View.class), anyInt(), anyInt()); mSwipeRefreshLayout.onNestedScrollAccepted(mChild, mChild, ViewCompat.SCROLL_AXIS_VERTICAL, ViewCompat.TYPE_TOUCH); mSwipeRefreshLayout.onNestedScroll(mChild, 1, 2, 3, 4, ViewCompat.TYPE_TOUCH); verify(mParent, never()).onNestedScroll(any(View.class), anyInt(), anyInt(), anyInt(), anyInt(), anyInt()); } @Test public void onNestedPreScroll_parentOnNestedPreScrollCalledWithCorrectParams() { setupNestedScrollViewWithParentAndChild(); doReturn(true) .when(mParent) .onStartNestedScroll(any(View.class), any(View.class), anyInt(), anyInt()); mSwipeRefreshLayout.onNestedScrollAccepted(mChild, mChild, ViewCompat.SCROLL_AXIS_VERTICAL, ViewCompat.TYPE_TOUCH); mSwipeRefreshLayout.onNestedPreScroll(mChild, 1, 2, new int[]{0, 0}, ViewCompat.TYPE_TOUCH); verify(mParent, times(1)).onNestedPreScroll(eq(mSwipeRefreshLayout), eq(1), eq(2), eq(new int[]{0, 0}), eq(ViewCompat.TYPE_TOUCH)); verify(mParent, times(1)).onNestedPreScroll(any(View.class), anyInt(), anyInt(), any(int[].class), anyInt()); } @Test public void onNestedPreScroll_parentRejects_parentOnNestedPreScrollNotCalled() { setupNestedScrollViewWithParentAndChild(); doReturn(false) .when(mParent) .onStartNestedScroll(any(View.class), any(View.class), anyInt(), anyInt()); mSwipeRefreshLayout.onNestedScrollAccepted(mChild, mChild, ViewCompat.SCROLL_AXIS_VERTICAL, ViewCompat.TYPE_TOUCH); mSwipeRefreshLayout.onNestedPreScroll(mChild, 1, 2, new int[2], ViewCompat.TYPE_TOUCH); verify(mParent, never()).onNestedPreScroll(any(View.class), anyInt(), anyInt(), any(int[].class), anyInt()); } // onNestedPreScroll, srl dragged down, parent on NestedPreScroll called with remainder @Test public void onNestedPreScroll_mSwipeRefreshPulledPartWay_parentReceivesRemainder() { setupNestedScrollViewWithParentAndChild(); doReturn(true) .when(mParent) .onStartNestedScroll(any(View.class), any(View.class), anyInt(), anyInt()); mSwipeRefreshLayout.onNestedScrollAccepted(mChild, mChild, ViewCompat.SCROLL_AXIS_VERTICAL, ViewCompat.TYPE_TOUCH); // Make sure the distance to trigger is greater than our pull so we don't accidentally // fire refresh. mSwipeRefreshLayout.setDistanceToTriggerSync(50); // Pull refresh down part way first so when we scroll up, we are testing that we consumed // the same portion that we scrolled down and passed remainder to parent. mSwipeRefreshLayout.onNestedScroll(mChild, 0, 0, 0, -20, ViewCompat.TYPE_TOUCH); mSwipeRefreshLayout.onNestedPreScroll(mChild, 0, 50, new int[2], ViewCompat.TYPE_TOUCH); verify(mParent, times(1)).onNestedPreScroll(eq(mSwipeRefreshLayout), eq(0), eq(30), eq(new int[]{0, 0}), eq(ViewCompat.TYPE_TOUCH)); verify(mParent, times(1)).onNestedPreScroll(any(View.class), anyInt(), anyInt(), any(int[].class), anyInt()); } @Test public void onNestedPreScroll_mSwipeRefreshPulledPartWay_mutatesConsumedCorrectAmount() { // Arrange setupNestedScrollViewWithParentAndChild(); doReturn(true) .when(mParent) .onStartNestedScroll(any(View.class), any(View.class), anyInt(), anyInt()); // Mutate consumed in call to onNestedPreScroll. doAnswer(new Answer() { public Object answer(InvocationOnMock invocation) { int[] consumed = (int[]) invocation.getArguments()[3]; consumed[1] += 5; return null; }}).when(mParent) .onNestedPreScroll(any(View.class), anyInt(), anyInt(), any(int[].class), anyInt()); mSwipeRefreshLayout.onNestedScrollAccepted(mChild, mChild, ViewCompat.SCROLL_AXIS_VERTICAL, ViewCompat.TYPE_TOUCH); // Make sure the distance to trigger is greater than our pull so we don't accidentally // fire refresh. mSwipeRefreshLayout.setDistanceToTriggerSync(50); // Pull refresh down part way first so when we scroll up, we are testing that we consumed // the same portion that we scrolled down and passed remainder to parent. mSwipeRefreshLayout.onNestedScroll(mChild, 0, 0, 0, -20, ViewCompat.TYPE_TOUCH); int[] consumed = new int[2]; // Act mSwipeRefreshLayout.onNestedPreScroll(mChild, 0, 50, consumed, ViewCompat.TYPE_TOUCH); // Assert assertThat(consumed, is(new int[]{0, 25})); } private void onStartNestedScroll(int iScrollAxis, int type, boolean oRetValue) { boolean retVal = mSwipeRefreshLayout.onStartNestedScroll(mChild, mChild, iScrollAxis, type); assertThat(retVal, is(oRetValue)); } private void setupNestedScrollViewWithParentAndChild() { mSwipeRefreshLayout.setLayoutParams(new ViewGroup.LayoutParams( ViewGroup.LayoutParams.MATCH_PARENT, 100)); mSwipeRefreshLayout.setMinimumHeight(100); mChild.setLayoutParams(new ViewGroup.LayoutParams( ViewGroup.LayoutParams.MATCH_PARENT, 100)); mChild.setMinimumHeight(100); mSwipeRefreshLayout.addView(mChild); mParent.addView(mSwipeRefreshLayout); int widthMeasureSpec = View.MeasureSpec.makeMeasureSpec(100, View.MeasureSpec.EXACTLY); int heightMeasureSpec = View.MeasureSpec.makeMeasureSpec(100, View.MeasureSpec.EXACTLY); mParent.measure(widthMeasureSpec, heightMeasureSpec); mParent.layout(0, 0, 100, 100); } public class NestedScrollingSpyView extends FrameLayout implements NestedScrollingParent2 { public NestedScrollingSpyView(Context context) { super(context); } @Override public boolean onStartNestedScroll(@NonNull View child, @NonNull View target, int axes, int type) { return false; } @Override public void onNestedScrollAccepted(@NonNull View child, @NonNull View target, int axes, int type) { } @Override public void onStopNestedScroll(@NonNull View target, int type) { } @Override public void onNestedScroll(@NonNull View target, int dxConsumed, int dyConsumed, int dxUnconsumed, int dyUnconsumed, int type) { } @Override public void onNestedPreScroll(@NonNull View target, int dx, int dy, @NonNull int[] consumed, int type) { } @Override public boolean onStartNestedScroll(View child, View target, int axes) { return false; } @Override public void onNestedScrollAccepted(View child, View target, int axes) { } @Override public void onStopNestedScroll(View target) { } @Override public void onNestedScroll(View target, int dxConsumed, int dyConsumed, int dxUnconsumed, int dyUnconsumed) { } @Override public void onNestedPreScroll(View target, int dx, int dy, int[] consumed) { } @Override public boolean onNestedFling(View target, float velocityX, float velocityY, boolean consumed) { return false; } @Override public boolean onNestedPreFling(View target, float velocityX, float velocityY) { return false; } @Override public int getNestedScrollAxes() { return 0; } } }
/*********************************************************************************************************************** * Copyright (c) 2003, International Barcode Consortium * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this list of * conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or other materials * provided with the distribution. * Neither the name of the International Barcode Consortium nor the names of any contributors may be used to endorse * or promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY * AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. ***********************************************************************************************************************/ package net.sourceforge.barbecue.linear.ean; import java.text.CharacterIterator; import java.text.StringCharacterIterator; import java.util.StringTokenizer; import net.sourceforge.barbecue.BarcodeException; import net.sourceforge.barbecue.linear.code128.Accumulator; import net.sourceforge.barbecue.linear.code128.CharBuffer; import net.sourceforge.barbecue.linear.code128.Code128Barcode; import net.sourceforge.barbecue.linear.code128.ModuleFactory; /** * An implementation of the UCC 128 and EAN 128 code formats. These are almost identical * to the vanilla Code 128 format, but they are encoded in character set C and include the * FNC1 character at the start. In addition, an Application Identifier must be provided * that identifies the application domain of the barcode. Please see the convenienve methods * on BarcodeFactory that provide application domain specific instances of this barcode type. * * @author <a href="mailto:opensource@ianbourke.com">Ian Bourke</a> */ public class UCCEAN128Barcode extends Code128Barcode { /** * SSCC-18 application identifier. */ public static final String SSCC_18_AI = "00"; /** * SCC-14 shipping code application identifier. */ public static final String SCC_14_AI = "01"; /** * Global Trade Item Number application identifier. */ public static final String GTIN_AI = SCC_14_AI; /** * EAN 128 application identifier for all EAN 128 formats. */ public static final String EAN128_AI = "01"; /** * Shipment Identification Number application identifier. */ public static final String SHIPMENT_ID_AI = "402"; /** * US Postal service application identifier for all USPS formats. */ public static final String USPS_AI = "420"; private final String applicationIdentifier; private final boolean includeCheckDigit; private String labelData; private boolean labelDataEncoded = false; /** * Creates a new UCC/EAN 128 barcode with the given application identifier and * data to encode. The AI will be prepended to the data (which also has a mod 10 * check digit appended) before encoding, and will appear in parentheses in the * printed label underneath the barcode. A mod 10 check digit will be generated. * @param applicationIdentifier The application identifier for this barcode * @param data The data to encode * @throws BarcodeException If the data to be encoded is invalid */ public UCCEAN128Barcode(String applicationIdentifier, String data) throws BarcodeException { this(applicationIdentifier, data, true); } /** * Creates a new UCC/EAN 128 barcode with the given application identifier and * data to encode. The AI will be prepended to the data (which also has a mod 10 * check digit appended) before encoding, and will appear in parentheses in the * printed label underneath the barcode. * @param applicationIdentifier The application identifier for this barcode * @param data The data to encode * @param includeCheckDigit specifies whether a mod 10 check digit should be generated or not * @throws BarcodeException If the data to be encoded is invalid */ public UCCEAN128Barcode(String applicationIdentifier, String data, boolean includeCheckDigit) throws BarcodeException { super(FNC_1 + applicationIdentifier + data + getMod10CheckDigit(data, includeCheckDigit), C); if (applicationIdentifier == null || applicationIdentifier.length() == 0) { throw new IllegalArgumentException("Application Identifier must be provided"); } this.applicationIdentifier = applicationIdentifier; this.includeCheckDigit = includeCheckDigit; this.labelData = data; } /** * Creates a new UCC/EAN 128 barcode based on the provided data, with an * optional Modulo 10 check digit. * @param data The data to encode * @param includeCheckDigit if true then a modulo 10 check digit based on * data is appended * @throws BarcodeException If the data to be encoded is invalid */ public UCCEAN128Barcode(String data, boolean includeCheckDigit) throws BarcodeException { super(FNC_1 + data + getMod10CheckDigit(data, includeCheckDigit), C); this.applicationIdentifier = ""; this.includeCheckDigit = includeCheckDigit; this.labelData = data; } /** * Creates a new UCC/EAN 128 barcode with the given application identifier and * data to encode. The AI will be prepended to the data (which also has a mod 10 * check digit appended) before encoding, and will appear in parentheses in the * printed label underneath the barcode. A mod 10 check digit will be generated. * @param encodedData The data to encode. The application identifiers should * be enclosed in parentheses, i.e., (01) 123435 (14) 1234235 e.t.c. * * Concatenating Element Strings of variable length, which includes all Application * Identifiers that do not start with two characters contained in Figure 5.3.8.2.1 ? 1, * involves the use of a Separator Character. The Separator Character used is the * Function 1 Character (FNC1). It is placed immediately after the last symbol * character of a variable length data string and is followed by the Application * Identifier of the next Element String. If the Element String is the last to be encoded, * it is followed by the Symbol Check and Stop Characters and not the FNC1 * Separator Character. * @throws BarcodeException If the data to be encoded is invalid */ public UCCEAN128Barcode(String encodedData) throws BarcodeException { super(FNC_1, O); this.applicationIdentifier = EAN128_AI; this.includeCheckDigit = false; StringTokenizer st = new StringTokenizer(encodedData,"()",true); StringBuffer sb = new StringBuffer(); StringBuffer labelBuffer = new StringBuffer(); boolean lastAIwasVariableLength = false; while (st.hasMoreTokens()) { String tok = st.nextToken(); String ai = null; if (tok.equals("(")) { ai = st.nextToken(); st.nextToken(); // get rid of the last ")" } String barcode_data = st.nextToken(); if (lastAIwasVariableLength) { sb.append(FNC_1); } lastAIwasVariableLength = (getAILength(ai) == 0); sb.append(ai); sb.append(barcode_data); labelBuffer.append("("+ai+")"); labelBuffer.append(barcode_data); if (ai.equals(EAN128_AI)) { String checkDigit = getGTINCheckDigit(barcode_data); sb.append(checkDigit); // The check digit should be in the human readable label labelBuffer.append(checkDigit); } } setData(sb.toString()); this.labelData = labelBuffer.toString(); this.labelDataEncoded = true; } /** Get the length of a pre-defined length EAN application identifier. * @return the number of characters (including the application identifier) or zero if the AI * does not have a fixed length. * Note: See the UCC/EAN-128 Symbology Specifications for details. * No separator character is required when these appliction identifiers * are used. */ private int getAILength(String ai) // return 0 if variable { if (ai.equals("00")) return 20; if (ai.equals("01")) return 16; if (ai.equals("02")) return 16; if (ai.equals("03")) return 16; if (ai.equals("04")) return 18; if (ai.equals("11")) return 8; if (ai.equals("12")) return 8; if (ai.equals("13")) return 8; if (ai.equals("14")) return 8; if (ai.equals("15")) return 8; if (ai.equals("16")) return 8; if (ai.equals("17")) return 8; if (ai.equals("18")) return 8; if (ai.equals("19")) return 8; if (ai.equals("20")) return 4; if (ai.equals("31")) return 10; if (ai.equals("32")) return 10; if (ai.equals("33")) return 10; if (ai.equals("34")) return 10; if (ai.equals("35")) return 10; if (ai.equals("36")) return 10; if (ai.equals("41")) return 16; return 0; } /** * Returns the text to be displayed underneath the barcode. * @return The text that the barcode represents */ public String getLabel() { if (null != labelData && labelDataEncoded) { return labelData; } if (null != getPureLabel()) { return getPureLabel(); } return '(' + applicationIdentifier + ") " + labelData + getMod10CheckDigit(labelData, includeCheckDigit); } /** * Generates a mod 10 check digit for the barcode data (ignoring the app id). * @param data The data to generate the check digit for * @param calculate Whether the check digit should actually be calculated or not * @return The check digit (or "" if not calculated) */ static String getMod10CheckDigit(String data, boolean calculate) { if (!calculate) { return ""; } Accumulator sum = new Accumulator(START_INDICES[C]); Accumulator index = new Accumulator(1); CharBuffer buf = new CharBuffer(BUF_SIZES[C]); StringCharacterIterator iter = new StringCharacterIterator(data); for (char c = iter.first(); c != CharacterIterator.DONE; c = iter.next()) { buf.addChar(c); if (buf.isFull()) { int code = ModuleFactory.getIndex(buf.toString(), C); sum.add(code * index.getValue()); index.increment(); buf.clear(); } } return String.valueOf(sum.getValue() % 10); } /** * Generates a UCC/EAN standard check digit for the EAN * barcode element (ignoring the app id). * @param element The data to generate the check digit for * @return The check digit (or "" if not calculated) */ static String getGTINCheckDigit(String element) { /* Note: The other Check digit calculation appears wrong. This one is taken from the EAN SPEC Section 3.A.1 Appendix 1: Check Digit Calculations IT ONLY APPLIES TO GTIN and possibly others! NOTE: Price and Weight elements MUST to use a different method. */ int len = element.length(); int multiplier = 1; int sum = 0; for (int i=(len-1);i>=0;i--) { if (multiplier == 1) multiplier = 3; else multiplier = 1; sum += multiplier * Integer.parseInt(element.substring(i,i+1)); } // Step 3: Subtract sum from nearest equal or higher multiple of ten (110) = Check Digit (9) int ret = ((sum / 10) + 1) * 10 - sum; return String.valueOf(ret % 10); }}
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.rest.action.admin.indices.upgrade; import com.google.common.base.Predicate; import org.apache.http.impl.client.HttpClients; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.segments.IndexSegments; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.cluster.routing.allocation.decider.ConcurrentRebalanceAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.engine.Segment; import org.elasticsearch.node.internal.InternalNode; import org.elasticsearch.test.ElasticsearchBackwardsCompatIntegrationTest; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.elasticsearch.test.rest.client.http.HttpRequestBuilder; import org.elasticsearch.test.rest.client.http.HttpResponse; import org.elasticsearch.test.rest.json.JsonPath; import org.junit.BeforeClass; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.List; import java.util.Map; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @ElasticsearchIntegrationTest.ClusterScope(scope = ElasticsearchIntegrationTest.Scope.TEST) // test scope since we set cluster wide settings public class UpgradeTest extends ElasticsearchBackwardsCompatIntegrationTest { @BeforeClass public static void checkUpgradeVersion() { final boolean luceneVersionMatches = (globalCompatibilityVersion().luceneVersion.major == Version.CURRENT.luceneVersion.major && globalCompatibilityVersion().luceneVersion.minor == Version.CURRENT.luceneVersion.minor); assumeFalse("lucene versions must be different to run upgrade test", luceneVersionMatches); } @Override protected int minExternalNodes() { return 2; } public void testUpgrade() throws Exception { // allow the cluster to rebalance quickly - 2 concurrent rebalance are default we can do higher ImmutableSettings.Builder builder = ImmutableSettings.builder(); builder.put(ConcurrentRebalanceAllocationDecider.CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE, 100); client().admin().cluster().prepareUpdateSettings().setPersistentSettings(builder).get(); int numIndexes = randomIntBetween(2, 4); String[] indexNames = new String[numIndexes]; for (int i = 0; i < numIndexes; ++i) { final String indexName = "test" + i; indexNames[i] = indexName; Settings settings = ImmutableSettings.builder() .put("index.routing.allocation.exclude._name", backwardsCluster().newNodePattern()) // don't allow any merges so that we can check segments are upgraded // by the upgrader, and not just regular merging .put("index.merge.policy.segments_per_tier", 1000000f) .put(indexSettings()) .build(); assertAcked(prepareCreate(indexName).setSettings(settings)); ensureGreen(indexName); assertAllShardsOnNodes(indexName, backwardsCluster().backwardsNodePattern()); int numDocs = scaledRandomIntBetween(100, 1000); List<IndexRequestBuilder> docs = new ArrayList<>(); for (int j = 0; j < numDocs; ++j) { String id = Integer.toString(j); docs.add(client().prepareIndex(indexName, "type1", id).setSource("text", "sometext")); } indexRandom(true, docs); ensureGreen(indexName); if (globalCompatibilityVersion().before(Version.V_1_4_0_Beta1)) { // before 1.4 and the wait_if_ongoing flag, flushes could fail randomly, so we // need to continue to try flushing until all shards succeed assertTrue(awaitBusy(new Predicate<Object>() { @Override public boolean apply(Object o) { return flush(indexName).getFailedShards() == 0; } })); } else { assertEquals(0, flush(indexName).getFailedShards()); } // index more docs that won't be flushed numDocs = scaledRandomIntBetween(100, 1000); docs = new ArrayList<>(); for (int j = 0; j < numDocs; ++j) { String id = Integer.toString(j); docs.add(client().prepareIndex(indexName, "type2", id).setSource("text", "someothertext")); } indexRandom(true, docs); ensureGreen(indexName); } logger.debug("--> Upgrading nodes"); backwardsCluster().allowOnAllNodes(indexNames); ensureGreen(); // disable allocation entirely until all nodes are upgraded builder = ImmutableSettings.builder(); builder.put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, EnableAllocationDecider.Allocation.NONE); client().admin().cluster().prepareUpdateSettings().setTransientSettings(builder).get(); backwardsCluster().upgradeAllNodes(); builder = ImmutableSettings.builder(); // disable rebalanceing entirely for the time being otherwise we might get relocations / rebalance from nodes with old segments builder.put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE, EnableAllocationDecider.Rebalance.NONE); builder.put(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE, EnableAllocationDecider.Allocation.ALL); client().admin().cluster().prepareUpdateSettings().setTransientSettings(builder).get(); ensureGreen(); logger.info("--> Nodes upgrade complete"); logSegmentsState(); final HttpRequestBuilder httpClient = httpClient(); assertNotUpgraded(httpClient, null); final String indexToUpgrade = "test" + randomInt(numIndexes - 1); logger.info("--> Running upgrade on index " + indexToUpgrade); runUpgrade(httpClient, indexToUpgrade); awaitBusy(new Predicate<Object>() { @Override public boolean apply(Object o) { try { return isUpgraded(httpClient, indexToUpgrade); } catch (Exception e) { throw ExceptionsHelper.convertToRuntime(e); } } }); logger.info("--> Single index upgrade complete"); logger.info("--> Running upgrade on the rest of the indexes"); runUpgrade(httpClient, null, "wait_for_completion", "true"); logSegmentsState(); logger.info("--> Full upgrade complete"); assertUpgraded(httpClient, null); } static String upgradePath(String index) { String path = "/_upgrade"; if (index != null) { path = "/" + index + path; } return path; } public static void assertNotUpgraded(HttpRequestBuilder httpClient, String index) throws Exception { for (UpgradeStatus status : getUpgradeStatus(httpClient, upgradePath(index))) { assertTrue("index " + status.indexName + " should not be zero sized", status.totalBytes != 0); // TODO: it would be better for this to be strictly greater, but sometimes an extra flush // mysteriously happens after the second round of docs are indexed assertTrue("index " + status.indexName + " should have recovered some segments from transaction log", status.totalBytes >= status.toUpgradeBytes); assertTrue("index " + status.indexName + " should need upgrading", status.toUpgradeBytes != 0); } } public static void assertUpgraded(HttpRequestBuilder httpClient, String index) throws Exception { for (UpgradeStatus status : getUpgradeStatus(httpClient, upgradePath(index))) { assertTrue("index " + status.indexName + " should not be zero sized", status.totalBytes != 0); assertEquals("index " + status.indexName + " should be upgraded", 0, status.toUpgradeBytes); } // double check using the segments api that all segments are actually upgraded IndicesSegmentResponse segsRsp; if (index == null) { segsRsp = client().admin().indices().prepareSegments().execute().actionGet(); } else { segsRsp = client().admin().indices().prepareSegments(index).execute().actionGet(); } for (IndexSegments indexSegments : segsRsp.getIndices().values()) { for (IndexShardSegments shard : indexSegments) { for (ShardSegments segs : shard.getShards()) { for (Segment seg : segs.getSegments()) { assertEquals("Index " + indexSegments.getIndex() + " has unupgraded segment " + seg.toString(), Version.CURRENT.luceneVersion.major, seg.version.major); assertEquals("Index " + indexSegments.getIndex() + " has unupgraded segment " + seg.toString(), Version.CURRENT.luceneVersion.minor, seg.version.minor); } } } } } static boolean isUpgraded(HttpRequestBuilder httpClient, String index) throws Exception { ESLogger logger = Loggers.getLogger(UpgradeTest.class); int toUpgrade = 0; for (UpgradeStatus status : getUpgradeStatus(httpClient, upgradePath(index))) { logger.info("Index: " + status.indexName + ", total: " + status.totalBytes + ", toUpgrade: " + status.toUpgradeBytes); toUpgrade += status.toUpgradeBytes; } return toUpgrade == 0; } static class UpgradeStatus { public final String indexName; public final int totalBytes; public final int toUpgradeBytes; public UpgradeStatus(String indexName, int totalBytes, int toUpgradeBytes) { this.indexName = indexName; this.totalBytes = totalBytes; this.toUpgradeBytes = toUpgradeBytes; } } public static void runUpgrade(HttpRequestBuilder httpClient, String index, String... params) throws Exception { assert params.length % 2 == 0; HttpRequestBuilder builder = httpClient.method("POST").path(upgradePath(index)); for (int i = 0; i < params.length; i += 2) { builder.addParam(params[i], params[i + 1]); } HttpResponse rsp = builder.execute(); assertNotNull(rsp); assertEquals(200, rsp.getStatusCode()); } static List<UpgradeStatus> getUpgradeStatus(HttpRequestBuilder httpClient, String path) throws Exception { HttpResponse rsp = httpClient.method("GET").path(path).execute(); Map<String,Object> data = validateAndParse(rsp); List<UpgradeStatus> ret = new ArrayList<>(); for (String index : data.keySet()) { Map<String, Object> status = (Map<String,Object>)data.get(index); assertTrue("missing key size_in_bytes for index " + index, status.containsKey("size_in_bytes")); Object totalBytes = status.get("size_in_bytes"); assertTrue("size_in_bytes for index " + index + " is not an integer", totalBytes instanceof Integer); assertTrue("missing key size_to_upgrade_in_bytes for index " + index, status.containsKey("size_to_upgrade_in_bytes")); Object toUpgradeBytes = status.get("size_to_upgrade_in_bytes"); assertTrue("size_to_upgrade_in_bytes for index " + index + " is not an integer", toUpgradeBytes instanceof Integer); ret.add(new UpgradeStatus(index, ((Integer)totalBytes).intValue(), ((Integer)toUpgradeBytes).intValue())); } return ret; } static Map<String, Object> validateAndParse(HttpResponse rsp) throws Exception { assertNotNull(rsp); assertEquals(200, rsp.getStatusCode()); assertTrue(rsp.hasBody()); return (Map<String,Object>)new JsonPath(rsp.getBody()).evaluate(""); } HttpRequestBuilder httpClient() { InetSocketAddress[] addresses = cluster().httpAddresses(); InetSocketAddress address = addresses[randomInt(addresses.length - 1)]; return new HttpRequestBuilder(HttpClients.createDefault()).host(address.getHostName()).port(address.getPort()); } @Override protected Settings nodeSettings(int nodeOrdinal) { return ImmutableSettings.builder().put(super.nodeSettings(nodeOrdinal)) .put(InternalNode.HTTP_ENABLED, true).build(); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package git4idea.history; import com.intellij.execution.process.ProcessOutputTypes; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.*; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vcs.FilePath; import com.intellij.openapi.vcs.FileStatus; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vcs.changes.Change; import com.intellij.openapi.vcs.changes.ChangeListManager; import com.intellij.openapi.vcs.diff.ItemLatestState; import com.intellij.openapi.vcs.history.VcsFileRevision; import com.intellij.openapi.vcs.history.VcsRevisionDescription; import com.intellij.openapi.vcs.history.VcsRevisionDescriptionImpl; import com.intellij.openapi.vcs.history.VcsRevisionNumber; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.AsynchConsumer; import com.intellij.util.Consumer; import com.intellij.util.Function; import com.intellij.util.concurrency.Semaphore; import com.intellij.util.containers.ContainerUtil; import git4idea.*; import git4idea.branch.GitBranchUtil; import git4idea.commands.*; import git4idea.config.GitConfigUtil; import git4idea.history.browser.GitHeavyCommit; import git4idea.history.browser.SHAHash; import git4idea.history.browser.SymbolicRefs; import git4idea.history.browser.SymbolicRefsI; import git4idea.history.wholeTree.AbstractHash; import git4idea.history.wholeTree.CommitHashPlusParents; import git4idea.history.wholeTree.GitCommitsSequentialIndex; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.*; import java.nio.charset.Charset; import java.util.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import static git4idea.history.GitLogParser.GitLogOption.*; /** * A collection of methods for retrieving history information from native Git. */ public class GitHistoryUtils { private final static Logger LOG = Logger.getInstance("#git4idea.history.GitHistoryUtils"); private GitHistoryUtils() { } /** * Get current revision for the file under git in the current or specified branch. * * @param project a project * @param filePath file path to the file which revision is to be retrieved. * @param branch name of branch or null if current branch wanted. * @return revision number or null if the file is unversioned or new. * @throws VcsException if there is a problem with running git. */ @Nullable public static VcsRevisionNumber getCurrentRevision(final Project project, FilePath filePath, @Nullable String branch) throws VcsException { return getCurrentRevision(project, filePath, branch, false); } public static long getHeadTs(final Project project, FilePath filePath) throws VcsException { GitSimpleHandler h = new GitSimpleHandler(project, GitUtil.getGitRoot(filePath), GitCommand.LOG); GitLogParser parser = new GitLogParser(project, SHORT_HASH, COMMIT_TIME); h.setSilent(true); h.addParameters("-n1", parser.getPretty()); h.addParameters("HEAD"); h.endOptions(); String result = h.run(); if (result.length() == 0) { return -1; } final GitLogRecord record = parser.parseOneRecord(result); if (record == null) { return -1; } record.setUsedHandler(h); return record.getDate().getTime(); } @Nullable public static VcsRevisionNumber getCurrentRevision(@NotNull Project project, @NotNull FilePath filePath, @Nullable String branch, final boolean shortHash) throws VcsException { filePath = getLastCommitName(project, filePath); GitSimpleHandler h = new GitSimpleHandler(project, GitUtil.getGitRoot(filePath), GitCommand.LOG); GitLogParser parser = shortHash ? new GitLogParser(project, SHORT_HASH, COMMIT_TIME) : new GitLogParser(project, HASH, COMMIT_TIME); h.setSilent(true); h.addParameters("-n1", parser.getPretty()); h.addParameters(!StringUtil.isEmpty(branch) ? branch : "--all"); h.endOptions(); h.addRelativePaths(filePath); String result = h.run(); if (result.length() == 0) { return null; } final GitLogRecord record = parser.parseOneRecord(result); if (record == null) { return null; } record.setUsedHandler(h); return shortHash ? new GitRevisionNumber(record.getShortHash(), record.getDate()) : new GitRevisionNumber(record.getHash(), record.getDate()); } @Nullable public static VcsRevisionDescription getCurrentRevisionDescription(final Project project, FilePath filePath, @Nullable String branch) throws VcsException { filePath = getLastCommitName(project, filePath); GitSimpleHandler h = new GitSimpleHandler(project, GitUtil.getGitRoot(filePath), GitCommand.LOG); GitLogParser parser = new GitLogParser(project, HASH, COMMIT_TIME, AUTHOR_NAME, COMMITTER_NAME, SUBJECT, BODY, RAW_BODY); h.setSilent(true); h.addParameters("-n1", parser.getPretty()); if (branch != null && !branch.isEmpty()) { h.addParameters(branch); } else { h.addParameters("--all"); } h.endOptions(); h.addRelativePaths(filePath); String result = h.run(); if (result.length() == 0) { return null; } final GitLogRecord record = parser.parseOneRecord(result); if (record == null) { return null; } record.setUsedHandler(h); final String author = Comparing.equal(record.getAuthorName(), record.getCommitterName()) ? record.getAuthorName() : record.getAuthorName() + " (" + record.getCommitterName() + ")"; return new VcsRevisionDescriptionImpl(new GitRevisionNumber(record.getHash(), record.getDate()), record.getDate(), author, record.getFullMessage()); } /** * Get current revision for the file under git * * @param project a project * @param filePath a file path * @return a revision number or null if the file is unversioned or new * @throws VcsException if there is problem with running git */ @Nullable public static ItemLatestState getLastRevision(final Project project, FilePath filePath) throws VcsException { VirtualFile root = GitUtil.getGitRoot(filePath); GitBranch c = GitBranchUtil.getCurrentBranch(project, root); GitBranch t = c == null ? null : GitBranchUtil.tracked(project, root, c.getName()); if (t == null) { return new ItemLatestState(getCurrentRevision(project, filePath, null), true, false); } filePath = getLastCommitName(project, filePath); GitSimpleHandler h = new GitSimpleHandler(project, root, GitCommand.LOG); GitLogParser parser = new GitLogParser(project, GitLogParser.NameStatus.STATUS, HASH, COMMIT_TIME, SHORT_PARENTS); h.setSilent(true); h.addParameters("-n1", parser.getPretty(), "--name-status", t.getFullName()); h.endOptions(); h.addRelativePaths(filePath); String result = h.run(); if (result.length() == 0) { return null; } GitLogRecord record = parser.parseOneRecord(result); if (record == null) { return null; } final List<Change> changes = record.parseChanges(project, root); boolean exists = changes.isEmpty() || !FileStatus.DELETED.equals(changes.get(0).getFileStatus()); record.setUsedHandler(h); return new ItemLatestState(new GitRevisionNumber(record.getHash(), record.getDate()), exists, false); } public static void dumpFullHistory(final Project project, VirtualFile root, final String outFilePath) throws VcsException { if (! GitUtil.isGitRoot(new File(root.getPath()))) throw new VcsException("Path " + root.getPath() + " is not git repository root"); final GitLineHandler h = new GitLineHandler(project, root, GitCommand.LOG); h.setSilent(true); h.addParameters("HEAD", "--branches", "--remotes", "--tags", "--pretty=format:%H%x20%ct%x0A", "--date-order", "--reverse", "--encoding=UTF-8", "--full-history", "--sparse"); h.endOptions(); final OutputStream[] stream = new OutputStream[1]; try { stream[0] = new BufferedOutputStream(new FileOutputStream(outFilePath, false)); final Semaphore semaphore = new Semaphore(); final VcsException[] ioExceptions = new VcsException[1]; h.addLineListener(new GitLineHandlerListener() { @Override public void onLineAvailable(String line, Key outputType) { if (line.length() == 0) return; try { GitCommitsSequentialIndex.parseRecord(line); stream[0].write((line + '\n').getBytes("UTF-8")); } catch (IOException e) { ioExceptions[0] = new VcsException(e); h.cancel(); semaphore.up(); } catch (ProcessCanceledException e) { h.cancel(); semaphore.up(); } catch (VcsException e) { ioExceptions[0] = e; h.cancel(); semaphore.up(); } } @Override public void processTerminated(int exitCode) { semaphore.up(); } @Override public void startFailed(Throwable exception) { semaphore.up(); } }); semaphore.down(); h.start(); semaphore.waitFor(); if (ioExceptions[0] != null) { throw ioExceptions[0]; } } catch (FileNotFoundException e) { throw new VcsException(e); } finally { try { if (stream[0] != null) { stream[0].close(); } } catch (IOException e) { throw new VcsException(e); } } File file = new File(outFilePath); if (! file.exists() || file.length() == 0) throw new VcsException("Short repository history not loaded"); } /* === Smart full log with renames === 'git log --follow' does detect renames, but it has a bug - merge commits aren't handled properly: they just dissapear from the history. See http://kerneltrap.org/mailarchive/git/2009/1/30/4861054 and the whole thread about that: --follow is buggy, but maybe it won't be fixed. To get the whole history through renames we do the following: 1. 'git log <file>' - and we get the history since the first rename, if there was one. 2. 'git show -M --follow --name-status <first_commit_id> -- <file>' where <first_commit_id> is the hash of the first commit in the history we got in #1. With this command we get the rename-detection-friendly information about the first commit of the given file history. (by specifying the <file> we filter out other changes in that commit; but in that case rename detection requires '--follow' to work, that's safe for one commit though) If the first commit was ADDING the file, then there were no renames with this file, we have the full history. But if the first commit was RENAMING the file, we are going to query for the history before rename. Now we have the previous name of the file: ~/sandbox/git # git show --oneline --name-status -M 4185b97 4185b97 renamed a to b R100 a b 3. 'git log <rename_commit_id> -- <previous_file_name>' - get the history of a before the given commit. We need to specify <rename_commit_id> here, because <previous_file_name> could have some new history, which has nothing common with our <file>. Then we repeat 2 and 3 until the first commit is ADDING the file, not RENAMING it. TODO: handle multiple repositories configuration: a file can be moved from one repo to another */ /** * Retrieves the history of the file, including renames. * @param project * @param path FilePath which history is queried. * @param root Git root - optional: if this is null, then git root will be detected automatically. * @param consumer This consumer is notified ({@link Consumer#consume(Object)} when new history records are retrieved. * @param exceptionConsumer This consumer is notified in case of error while executing git command. * @param parameters Optional parameters which will be added to the git log command just before the path. */ public static void history(final Project project, FilePath path, @Nullable VirtualFile root, final Consumer<GitFileRevision> consumer, final Consumer<VcsException> exceptionConsumer, String... parameters) { // adjust path using change manager final FilePath filePath = getLastCommitName(project, path); final VirtualFile finalRoot; try { finalRoot = (root == null ? GitUtil.getGitRoot(filePath) : root); } catch (VcsException e) { exceptionConsumer.consume(e); return; } final GitLogParser logParser = new GitLogParser(project, GitLogParser.NameStatus.STATUS, HASH, COMMIT_TIME, AUTHOR_NAME, AUTHOR_EMAIL, COMMITTER_NAME, COMMITTER_EMAIL, PARENTS, SUBJECT, BODY, RAW_BODY, AUTHOR_TIME); final AtomicReference<String> firstCommit = new AtomicReference<String>("HEAD"); final AtomicReference<String> firstCommitParent = new AtomicReference<String>("HEAD"); final AtomicReference<FilePath> currentPath = new AtomicReference<FilePath>(filePath); final AtomicReference<GitLineHandler> logHandler = new AtomicReference<GitLineHandler>(); final AtomicBoolean skipFurtherOutput = new AtomicBoolean(); final Consumer<GitLogRecord> resultAdapter = new Consumer<GitLogRecord>() { public void consume(GitLogRecord record) { if (skipFurtherOutput.get()) { return; } if (record == null) { exceptionConsumer.consume(new VcsException("revision details are null.")); return; } record.setUsedHandler(logHandler.get()); final GitRevisionNumber revision = new GitRevisionNumber(record.getHash(), record.getDate()); firstCommit.set(record.getHash()); final String[] parentHashes = record.getParentsHashes(); if (parentHashes == null || parentHashes.length < 1) { firstCommitParent.set(null); } else { firstCommitParent.set(parentHashes[0]); } final String message = record.getFullMessage(); FilePath revisionPath; try { final List<FilePath> paths = record.getFilePaths(finalRoot); if (paths.size() > 0) { revisionPath = paths.get(0); } else { // no paths are shown for merge commits, so we're using the saved path we're inspecting now revisionPath = currentPath.get(); } final Pair<String, String> authorPair = Pair.create(record.getAuthorName(), record.getAuthorEmail()); final Pair<String, String> committerPair = record.getCommitterName() == null ? null : Pair.create(record.getCommitterName(), record.getCommitterEmail()); Collection<String> parents = parentHashes == null ? Collections.<String>emptyList() : Arrays.asList(parentHashes); consumer.consume(new GitFileRevision(project, revisionPath, revision, Pair.create(authorPair, committerPair), message, null, new Date(record.getAuthorTimeStamp() * 1000), parents)); List<GitLogStatusInfo> statusInfos = record.getStatusInfos(); if (statusInfos.isEmpty()) { // can safely be empty, for example, for simple merge commits that don't change anything. return; } if (statusInfos.get(0).getType() == GitChangeType.ADDED && !filePath.isDirectory()) { skipFurtherOutput.set(true); } } catch (VcsException e) { exceptionConsumer.consume(e); } } }; final AtomicBoolean criticalFailure = new AtomicBoolean(); while (currentPath.get() != null && firstCommitParent.get() != null) { logHandler.set(getLogHandler(project, finalRoot, logParser, currentPath.get(), firstCommitParent.get(), parameters)); final MyTokenAccumulator accumulator = new MyTokenAccumulator(logParser); final Semaphore semaphore = new Semaphore(); logHandler.get().addLineListener(new GitLineHandlerAdapter() { @Override public void onLineAvailable(String line, Key outputType) { final GitLogRecord record = accumulator.acceptLine(line); if (record != null) { resultAdapter.consume(record); } } @Override public void startFailed(Throwable exception) { //noinspection ThrowableInstanceNeverThrown try { exceptionConsumer.consume(new VcsException(exception)); } finally { criticalFailure.set(true); semaphore.up(); } } @Override public void processTerminated(int exitCode) { try { super.processTerminated(exitCode); final GitLogRecord record = accumulator.processLast(); if (record != null) { resultAdapter.consume(record); } } catch (Throwable t) { LOG.error(t); exceptionConsumer.consume(new VcsException("Internal error " + t.getMessage(), t)); criticalFailure.set(true); } finally { semaphore.up(); } } }); semaphore.down(); logHandler.get().start(); semaphore.waitFor(); if (criticalFailure.get()) { return; } try { FilePath firstCommitRenamePath; firstCommitRenamePath = getFirstCommitRenamePath(project, finalRoot, firstCommit.get(), currentPath.get()); currentPath.set(firstCommitRenamePath); skipFurtherOutput.set(false); } catch (VcsException e) { LOG.warn("Tried to get first commit rename path", e); exceptionConsumer.consume(e); return; } } } private static GitLineHandler getLogHandler(Project project, VirtualFile root, GitLogParser parser, FilePath path, String lastCommit, String... parameters) { final GitLineHandler h = new GitLineHandler(project, root, GitCommand.LOG); h.setStdoutSuppressed(true); h.addParameters("--name-status", parser.getPretty(), "--encoding=UTF-8", lastCommit); if (parameters != null && parameters.length > 0) { h.addParameters(parameters); } h.endOptions(); h.addRelativePaths(path); return h; } /** * Gets info of the given commit and checks if it was a RENAME. * If yes, returns the older file path, which file was renamed from. * If it's not a rename, returns null. */ @Nullable private static FilePath getFirstCommitRenamePath(Project project, VirtualFile root, String commit, FilePath filePath) throws VcsException { // 'git show -M --name-status <commit hash>' returns the information about commit and detects renames. // NB: we can't specify the filepath, because then rename detection will work only with the '--follow' option, which we don't wanna use. final GitSimpleHandler h = new GitSimpleHandler(project, root, GitCommand.SHOW); final GitLogParser parser = new GitLogParser(project, GitLogParser.NameStatus.STATUS, HASH, COMMIT_TIME, SHORT_PARENTS); h.setStdoutSuppressed(true); h.addParameters("-M", "--name-status", parser.getPretty(), "--encoding=UTF-8", commit); h.endOptions(); final String output = h.run(); final List<GitLogRecord> records = parser.parse(output); if (records.isEmpty()) return null; // we have information about all changed files of the commit. Extracting information about the file we need. final List<Change> changes = records.get(0).parseChanges(project, root); for (Change change : changes) { if ((change.isMoved() || change.isRenamed()) && filePath.equals(change.getAfterRevision().getFile())) { return change.getBeforeRevision().getFile(); } } return null; } private static class MyTokenAccumulator { private final StringBuilder myBuffer = new StringBuilder(); private boolean myNotStarted = true; private GitLogParser myParser; public MyTokenAccumulator(GitLogParser parser) { myParser = parser; } @Nullable public GitLogRecord acceptLine(String s) { final boolean lineEnd = s.startsWith(GitLogParser.RECORD_START); if (lineEnd && (!myNotStarted)) { final String line = myBuffer.toString(); myBuffer.setLength(0); myBuffer.append(s.substring(GitLogParser.RECORD_START.length())); return processResult(line); } else { myBuffer.append(lineEnd ? s.substring(GitLogParser.RECORD_START.length()) : s); myBuffer.append("\n"); } myNotStarted = false; return null; } public GitLogRecord processLast() { return processResult(myBuffer.toString()); } private GitLogRecord processResult(final String line) { return myParser.parseOneRecord(line); } } /** * Get history for the file * * @param project the context project * @param path the file path * @return the list of the revisions * @throws VcsException if there is problem with running git */ public static List<VcsFileRevision> history(final Project project, final FilePath path) throws VcsException { final VirtualFile root = GitUtil.getGitRoot(path); return history(project, path, root); } /** * Get history for the file * * @param project the context project * @param path the file path * @return the list of the revisions * @throws VcsException if there is problem with running git */ public static List<VcsFileRevision> history(final Project project, FilePath path, final VirtualFile root, final String... parameters) throws VcsException { final List<VcsFileRevision> rc = new ArrayList<VcsFileRevision>(); final List<VcsException> exceptions = new ArrayList<VcsException>(); history(project, path, root, new Consumer<GitFileRevision>() { @Override public void consume(GitFileRevision gitFileRevision) { rc.add(gitFileRevision); } }, new Consumer<VcsException>() { @Override public void consume(VcsException e) { exceptions.add(e); } }, parameters); if (!exceptions.isEmpty()) { throw exceptions.get(0); } return rc; } public static List<Pair<SHAHash, Date>> onlyHashesHistory(Project project, FilePath path, final String... parameters) throws VcsException { final VirtualFile root = GitUtil.getGitRoot(path); return onlyHashesHistory(project, path, root, parameters); } public static List<Pair<SHAHash, Date>> onlyHashesHistory(Project project, FilePath path, final VirtualFile root, final String... parameters) throws VcsException { // adjust path using change manager path = getLastCommitName(project, path); GitSimpleHandler h = new GitSimpleHandler(project, root, GitCommand.LOG); GitLogParser parser = new GitLogParser(project, HASH, COMMIT_TIME); h.setStdoutSuppressed(true); h.addParameters(parameters); h.addParameters(parser.getPretty(), "--encoding=UTF-8"); h.endOptions(); h.addRelativePaths(path); String output = h.run(); final List<Pair<SHAHash, Date>> rc = new ArrayList<Pair<SHAHash, Date>>(); for (GitLogRecord record : parser.parse(output)) { record.setUsedHandler(h); rc.add(new Pair<SHAHash, Date>(new SHAHash(record.getHash()), record.getDate())); } return rc; } /** * <p>Get & parse git log detailed output with commits, their parents and their changes.</p> * * <p>Warning: this is method is efficient by speed, but don't query too much, because the whole log output is retrieved at once, * and it can occupy too much memory. The estimate is ~600Kb for 1000 commits.</p> */ @NotNull public static List<GitCommit> history(@NotNull final Project project, @NotNull final VirtualFile root, String... parameters) throws VcsException { GitSimpleHandler h = new GitSimpleHandler(project, root, GitCommand.LOG); GitLogParser parser = new GitLogParser(project, GitLogParser.NameStatus.STATUS, HASH, COMMIT_TIME, AUTHOR_NAME, AUTHOR_TIME, AUTHOR_EMAIL, COMMITTER_NAME, COMMITTER_EMAIL, PARENTS, SUBJECT, BODY, RAW_BODY); h.setStdoutSuppressed(true); h.addParameters(parameters); h.addParameters("--name-status", parser.getPretty(), "--encoding=UTF-8"); h.addParameters("--full-history", "--sparse"); h.endOptions(); String output = h.run(); List<GitLogRecord> records = parser.parse(output); return ContainerUtil.mapNotNull(records, new Function<GitLogRecord, GitCommit>() { @Override public GitCommit fun(GitLogRecord record) { try { List<Hash> parents = ContainerUtil.map(record.getParentsHashes(), new Function<String, Hash>() { @Override public Hash fun(String hash) { return Hash.create(hash); } }); return new GitCommit(Hash.create(record.getHash()), record.getAuthorName(), record.getAuthorEmail(), record.getAuthorTimeStamp(), record.getCommitterName(), record.getCommitterEmail(), record.getLongTimeStamp(), record.getSubject(), record.getFullMessage(), parents, record.parseChanges(project, root)); } catch (VcsException e) { LOG.error(e); return null; } } }); } /** * <p>Returns the history queried by {@code git log}} command with a possibility to asynchronously process each log record * returned by Git.</p> * <p>This method is not efficient. * Consider using {@link #history(Project, VirtualFile, String...)} if you can afford storing the whole Git output in memory while * parsing.</p> */ public static void historyWithLinks(final Project project, FilePath path, @Nullable final SymbolicRefsI refs, @NotNull final AsynchConsumer<GitHeavyCommit> gitCommitConsumer, @Nullable final Getter<Boolean> isCanceled, @Nullable Collection<VirtualFile> paths, boolean fullHistory, final String... parameters) throws VcsException { // adjust path using change manager path = getLastCommitName(project, path); final VirtualFile root = GitUtil.getGitRoot(path); final GitLineHandler h = new GitLineHandler(project, root, GitCommand.LOG); final GitLogParser parser = new GitLogParser(project, GitLogParser.NameStatus.STATUS, SHORT_HASH, HASH, COMMIT_TIME, AUTHOR_NAME, AUTHOR_TIME, AUTHOR_EMAIL, COMMITTER_NAME, COMMITTER_EMAIL, SHORT_PARENTS, REF_NAMES, SUBJECT, BODY, RAW_BODY); h.setStdoutSuppressed(true); h.addParameters(parameters); h.addParameters("--name-status", parser.getPretty(), "--encoding=UTF-8"); if (fullHistory) { h.addParameters("--full-history"); } if (paths != null && ! paths.isEmpty()) { h.endOptions(); h.addRelativeFiles(paths); } else { if (fullHistory) { h.addParameters("--sparse"); } h.endOptions(); h.addRelativePaths(path); } final VcsException[] exc = new VcsException[1]; final Semaphore semaphore = new Semaphore(); final StringBuilder sb = new StringBuilder(); final Ref<Boolean> skipFirst = new Ref<Boolean>(true); h.addLineListener(new GitLineHandlerAdapter() { @Override public void onLineAvailable(final String line, final Key outputType) { try { if (ProcessOutputTypes.STDOUT.equals(outputType)) { if (isCanceled != null && isCanceled.get()) { h.cancel(); return; } //if (line.charAt(line.length() - 1) != '\u0003') { if ((! line.startsWith("\u0001")) || skipFirst.get()) { if (sb.length() > 0) { sb.append("\n"); } sb.append(line); skipFirst.set(false); return; } takeLine(project, line, sb, parser, refs, root, exc, h, gitCommitConsumer); } } catch (ProcessCanceledException e) { h.cancel(); semaphore.up(); } } @Override public void processTerminated(int exitCode) { semaphore.up(); } @Override public void startFailed(Throwable exception) { semaphore.up(); } }); semaphore.down(); h.start(); semaphore.waitFor(); takeLine(project, "", sb, parser, refs, root, exc, h, gitCommitConsumer); gitCommitConsumer.finished(); if (exc[0] != null) { throw exc[0]; } } private static void takeLine(final Project project, String line, StringBuilder sb, GitLogParser parser, SymbolicRefsI refs, VirtualFile root, VcsException[] exc, GitLineHandler h, AsynchConsumer<GitHeavyCommit> gitCommitConsumer) { final String text = sb.toString(); sb.setLength(0); sb.append(line); if (text.length() == 0) return; GitLogRecord record = parser.parseOneRecord(text); final GitHeavyCommit gitCommit; try { gitCommit = createCommit(project, refs, root, record); } catch (VcsException e) { exc[0] = e; h.cancel(); return; } gitCommitConsumer.consume(gitCommit); } @NotNull private static GitHeavyCommit createCommit(@NotNull Project project, @Nullable SymbolicRefsI refs, @NotNull VirtualFile root, @NotNull GitLogRecord record) throws VcsException { final Collection<String> currentRefs = record.getRefs(); List<String> locals = new ArrayList<String>(); List<String> remotes = new ArrayList<String>(); List<String> tags = new ArrayList<String>(); final String s = parseRefs(refs, currentRefs, locals, remotes, tags); GitHeavyCommit gitCommit = new GitHeavyCommit(root, AbstractHash.create(record.getShortHash()), new SHAHash(record.getHash()), record.getAuthorName(), record.getCommitterName(), record.getDate(), record.getSubject(), record.getFullMessage(), new HashSet<String>(Arrays.asList(record.getParentsShortHashes())), record.getFilePaths(root), record.getAuthorEmail(), record.getCommitterEmail(), tags, locals, remotes, record.parseChanges(project, root), record.getAuthorTimeStamp() * 1000); gitCommit.setCurrentBranch(s); return gitCommit; } @Nullable private static String parseRefs(@Nullable SymbolicRefsI refs, Collection<String> currentRefs, List<String> locals, List<String> remotes, List<String> tags) { if (refs == null) { return null; } for (String ref : currentRefs) { final SymbolicRefs.Kind kind = refs.getKind(ref); if (SymbolicRefs.Kind.LOCAL.equals(kind)) { locals.add(ref); } else if (SymbolicRefs.Kind.REMOTE.equals(kind)) { remotes.add(ref); } else { tags.add(ref); } } if (refs.getCurrent() != null && currentRefs.contains(refs.getCurrent().getName())) { return refs.getCurrent().getName(); } return null; } @Nullable public static Pair<AbstractHash, AbstractHash> getStashTop(@NotNull Project project, @NotNull VirtualFile root) throws VcsException { GitSimpleHandler h = new GitSimpleHandler(project, root, GitCommand.STASH.readLockingCommand()); GitLogParser parser = new GitLogParser(project, SHORT_HASH, SHORT_PARENTS); h.setSilent(true); h.addParameters("list"); h.addParameters("-n1"); h.addParameters(parser.getPretty()); String out; h.setCharset(Charset.forName(GitConfigUtil.getLogEncoding(project, root))); out = h.run(); final List<GitLogRecord> gitLogRecords = parser.parse(out); for (GitLogRecord gitLogRecord : gitLogRecords) { ProgressManager.checkCanceled(); GitSimpleHandler h1 = new GitSimpleHandler(project, root, GitCommand.LOG); GitLogParser parser1 = new GitLogParser(project, SHORT_HASH, SHORT_PARENTS, SUBJECT); h1.setSilent(true); h1.addParameters("-n1"); h1.addParameters(parser1.getPretty()); //h1.endOptions(); h1.addParameters(gitLogRecord.getShortHash()); String out1; out1 = h1.run(); final List<GitLogRecord> gitLogRecords1 = parser1.parse(out1); LOG.assertTrue(gitLogRecords1.size() == 1, String.format("gitLogRecords size is incorrect. size: %s, records: %s, output: %s", gitLogRecords1.size(), gitLogRecords1, out1)); final GitLogRecord logRecord = gitLogRecords1.get(0); final String[] parentsShortHashes = logRecord.getParentsShortHashes(); String indexCommit = null; // heuristics if (parentsShortHashes.length == 2) { if (logRecord.getSubject().contains(parentsShortHashes[0])) { indexCommit = parentsShortHashes[1]; } if (logRecord.getSubject().contains(parentsShortHashes[1])) { indexCommit = parentsShortHashes[0]; } } return new Pair<AbstractHash, AbstractHash>(AbstractHash.create(gitLogRecord.getShortHash()), indexCommit == null ? null : AbstractHash.create(indexCommit)); } return null; } @Nullable public static List<Pair<String, GitHeavyCommit>> loadStashStackAsCommits(@NotNull Project project, @NotNull VirtualFile root, SymbolicRefsI refs, final String... parameters) throws VcsException { GitSimpleHandler h = new GitSimpleHandler(project, root, GitCommand.STASH.readLockingCommand()); GitLogParser parser = new GitLogParser(project, GitLogParser.NameStatus.STATUS, SHORT_HASH, HASH, COMMIT_TIME, AUTHOR_NAME, AUTHOR_TIME, AUTHOR_EMAIL, COMMITTER_NAME, COMMITTER_EMAIL, SHORT_PARENTS, REF_NAMES, SHORT_REF_LOG_SELECTOR, SUBJECT, BODY, RAW_BODY); h.setSilent(true); h.addParameters("list"); h.addParameters(parameters); h.addParameters(parser.getPretty()); String out; h.setCharset(Charset.forName(GitConfigUtil.getLogEncoding(project, root))); out = h.run(); final List<GitLogRecord> gitLogRecords = parser.parse(out); final List<Pair<String, GitHeavyCommit>> result = new ArrayList<Pair<String, GitHeavyCommit>>(); for (GitLogRecord gitLogRecord : gitLogRecords) { ProgressManager.checkCanceled(); final GitHeavyCommit gitCommit = createCommit(project, refs, root, gitLogRecord); result.add(new Pair<String, GitHeavyCommit>(gitLogRecord.getShortenedRefLog(), gitCommit)); } return result; } @NotNull public static List<GitHeavyCommit> commitsDetails(@NotNull Project project, @NotNull FilePath path, @Nullable SymbolicRefsI refs, @NotNull final Collection<String> commitsIds) throws VcsException { path = getLastCommitName(project, path); // adjust path using change manager VirtualFile root = GitUtil.getGitRoot(path); GitSimpleHandler h = new GitSimpleHandler(project, root, GitCommand.SHOW); GitLogParser parser = new GitLogParser(project, GitLogParser.NameStatus.STATUS, SHORT_HASH, HASH, COMMIT_TIME, AUTHOR_NAME, AUTHOR_TIME, AUTHOR_EMAIL, COMMITTER_NAME, COMMITTER_EMAIL, SHORT_PARENTS, REF_NAMES, SUBJECT, BODY, RAW_BODY); h.setStdoutSuppressed(true); h.addParameters("--name-status", "-M", parser.getPretty(), "--encoding=UTF-8"); h.addParameters(new ArrayList<String>(commitsIds)); String output = h.run(); final List<GitHeavyCommit> rc = new ArrayList<GitHeavyCommit>(); for (GitLogRecord record : parser.parse(output)) { final GitHeavyCommit gitCommit = createCommit(project, refs, root, record); rc.add(gitCommit); } return rc; } public static long getAuthorTime(Project project, FilePath path, final String commitsId) throws VcsException { // adjust path using change manager path = getLastCommitName(project, path); final VirtualFile root = GitUtil.getGitRoot(path); GitSimpleHandler h = new GitSimpleHandler(project, root, GitCommand.SHOW); GitLogParser parser = new GitLogParser(project, GitLogParser.NameStatus.STATUS, AUTHOR_TIME); h.setSilent(true); h.addParameters("--name-status", parser.getPretty(), "--encoding=UTF-8"); h.addParameters(commitsId); String output = h.run(); GitLogRecord logRecord = parser.parseOneRecord(output); return logRecord.getAuthorTimeStamp() * 1000; } public static void hashesWithParents(Project project, FilePath path, final AsynchConsumer<CommitHashPlusParents> consumer, final Getter<Boolean> isCanceled, Collection<VirtualFile> paths, final String... parameters) throws VcsException { // adjust path using change manager path = getLastCommitName(project, path); final VirtualFile root = GitUtil.getGitRoot(path); final GitLineHandler h = new GitLineHandler(project, root, GitCommand.LOG); final GitLogParser parser = new GitLogParser(project, GitLogParser.NameStatus.NAME, SHORT_HASH, COMMIT_TIME, SHORT_PARENTS, AUTHOR_NAME); h.setStdoutSuppressed(true); h.addParameters(parameters); h.addParameters(parser.getPretty(), "--encoding=UTF-8", "--full-history"); if (paths != null && ! paths.isEmpty()) { h.endOptions(); h.addRelativeFiles(paths); } else { h.addParameters("--sparse"); h.endOptions(); h.addRelativePaths(path); } final Semaphore semaphore = new Semaphore(); h.addLineListener(new GitLineHandlerListener() { @Override public void onLineAvailable(final String line, final Key outputType) { try { if (ProcessOutputTypes.STDOUT.equals(outputType)) { if (isCanceled != null && isCanceled.get()) { h.cancel(); return; } GitLogRecord record = parser.parseOneRecord(line); consumer.consume(new CommitHashPlusParents(record.getShortHash(), record.getParentsShortHashes(), record.getLongTimeStamp() * 1000, record.getAuthorName())); } } catch (ProcessCanceledException e) { h.cancel(); semaphore.up(); } } @Override public void processTerminated(int exitCode) { semaphore.up(); } @Override public void startFailed(Throwable exception) { semaphore.up(); } }); semaphore.down(); h.start(); semaphore.waitFor(); consumer.finished(); } /** * Get name of the file in the last commit. If file was renamed, returns the previous name. * * @param project the context project * @param path the path to check * @return the name of file in the last commit or argument */ public static FilePath getLastCommitName(@NotNull Project project, FilePath path) { if (project.isDefault()) return path; final ChangeListManager changeManager = ChangeListManager.getInstance(project); final Change change = changeManager.getChange(path); if (change != null && change.getType() == Change.Type.MOVED) { // GitContentRevision r = (GitContentRevision)change.getBeforeRevision(); assert change.getBeforeRevision() != null : "Move change always have beforeRevision"; path = change.getBeforeRevision().getFile(); } return path; } @Nullable public static GitRevisionNumber getMergeBase(final Project project, final VirtualFile root, @NotNull final String first, @NotNull final String second) throws VcsException { GitSimpleHandler h = new GitSimpleHandler(project, root, GitCommand.MERGE_BASE); h.setSilent(true); h.addParameters(first, second); String output = h.run().trim(); if (output.length() == 0) { return null; } else { return GitRevisionNumber.resolve(project, root, output); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.xslt; import java.io.IOException; import java.util.HashMap; import java.util.Map; import javax.xml.transform.ErrorListener; import javax.xml.transform.Source; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.URIResolver; import org.apache.camel.Component; import org.apache.camel.Exchange; import org.apache.camel.api.management.ManagedAttribute; import org.apache.camel.api.management.ManagedOperation; import org.apache.camel.api.management.ManagedResource; import org.apache.camel.builder.xml.ResultHandlerFactory; import org.apache.camel.builder.xml.XsltBuilder; import org.apache.camel.converter.jaxp.XmlConverter; import org.apache.camel.impl.ProcessorEndpoint; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriPath; import org.apache.camel.util.ObjectHelper; import org.apache.camel.util.ServiceHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @ManagedResource(description = "Managed XsltEndpoint") @UriEndpoint(scheme = "xslt", title = "XSLT", syntax = "xslt:resourceUri", producerOnly = true, label = "core,transformation") public class XsltEndpoint extends ProcessorEndpoint { public static final String SAXON_TRANSFORMER_FACTORY_CLASS_NAME = "net.sf.saxon.TransformerFactoryImpl"; private static final Logger LOG = LoggerFactory.getLogger(XsltEndpoint.class); private volatile boolean cacheCleared; private volatile XsltBuilder xslt; private Map<String, Object> parameters; @UriPath @Metadata(required = "true") private String resourceUri; @UriParam(defaultValue = "true") private boolean contentCache = true; @UriParam(label = "advanced") private XmlConverter converter; @UriParam(label = "advanced") private String transformerFactoryClass; @UriParam(label = "advanced") private TransformerFactory transformerFactory; @UriParam private boolean saxon; @UriParam(label = "advanced") private ResultHandlerFactory resultHandlerFactory; @UriParam(defaultValue = "true") private boolean failOnNullBody = true; @UriParam(defaultValue = "string") private XsltOutput output = XsltOutput.string; @UriParam(defaultValue = "0") private int transformerCacheSize; @UriParam(label = "advanced") private ErrorListener errorListener; @UriParam(label = "advanced") private URIResolver uriResolver; @UriParam(defaultValue = "true") private boolean allowStAX = true; @UriParam private boolean deleteOutputFile; @Deprecated public XsltEndpoint(String endpointUri, Component component, XsltBuilder xslt, String resourceUri, boolean cacheStylesheet) throws Exception { super(endpointUri, component, xslt); this.xslt = xslt; this.resourceUri = resourceUri; this.contentCache = cacheStylesheet; } public XsltEndpoint(String endpointUri, Component component) { super(endpointUri, component); } @ManagedOperation(description = "Clears the cached XSLT stylesheet, forcing to re-load the stylesheet on next request") public void clearCachedStylesheet() { this.cacheCleared = true; } @ManagedAttribute(description = "Whether the XSLT stylesheet is cached") public boolean isCacheStylesheet() { return contentCache; } public XsltEndpoint findOrCreateEndpoint(String uri, String newResourceUri) { String newUri = uri.replace(resourceUri, newResourceUri); LOG.trace("Getting endpoint with URI: {}", newUri); return getCamelContext().getEndpoint(newUri, XsltEndpoint.class); } @Override protected void onExchange(Exchange exchange) throws Exception { if (!contentCache || cacheCleared) { loadResource(resourceUri); } super.onExchange(exchange); } public boolean isCacheCleared() { return cacheCleared; } public void setCacheCleared(boolean cacheCleared) { this.cacheCleared = cacheCleared; } public XsltBuilder getXslt() { return xslt; } public void setXslt(XsltBuilder xslt) { this.xslt = xslt; } @ManagedAttribute(description = "The name of the template to load from classpath or file system") public String getResourceUri() { return resourceUri; } /** * The name of the template to load from classpath or file system */ public void setResourceUri(String resourceUri) { this.resourceUri = resourceUri; } public XmlConverter getConverter() { return converter; } /** * To use a custom implementation of {@link org.apache.camel.converter.jaxp.XmlConverter} */ public void setConverter(XmlConverter converter) { this.converter = converter; } public String getTransformerFactoryClass() { return transformerFactoryClass; } /** * To use a custom XSLT transformer factory, specified as a FQN class name */ public void setTransformerFactoryClass(String transformerFactoryClass) { this.transformerFactoryClass = transformerFactoryClass; } public TransformerFactory getTransformerFactory() { return transformerFactory; } /** * To use a custom XSLT transformer factory */ public void setTransformerFactory(TransformerFactory transformerFactory) { this.transformerFactory = transformerFactory; } @ManagedAttribute(description = "Whether to use Saxon as the transformerFactoryClass") public boolean isSaxon() { return saxon; } /** * Whether to use Saxon as the transformerFactoryClass. * If enabled then the class net.sf.saxon.TransformerFactoryImpl. You would need to add Saxon to the classpath. */ public void setSaxon(boolean saxon) { this.saxon = saxon; } public ResultHandlerFactory getResultHandlerFactory() { return resultHandlerFactory; } /** * Allows you to use a custom org.apache.camel.builder.xml.ResultHandlerFactory which is capable of * using custom org.apache.camel.builder.xml.ResultHandler types. */ public void setResultHandlerFactory(ResultHandlerFactory resultHandlerFactory) { this.resultHandlerFactory = resultHandlerFactory; } @ManagedAttribute(description = "Whether or not to throw an exception if the input body is null") public boolean isFailOnNullBody() { return failOnNullBody; } /** * Whether or not to throw an exception if the input body is null. */ public void setFailOnNullBody(boolean failOnNullBody) { this.failOnNullBody = failOnNullBody; } @ManagedAttribute(description = "What kind of option to use.") public XsltOutput getOutput() { return output; } /** * Option to specify which output type to use. * Possible values are: string, bytes, DOM, file. The first three options are all in memory based, where as file is streamed directly to a java.io.File. * For file you must specify the filename in the IN header with the key Exchange.XSLT_FILE_NAME which is also CamelXsltFileName. * Also any paths leading to the filename must be created beforehand, otherwise an exception is thrown at runtime. */ public void setOutput(XsltOutput output) { this.output = output; } public int getTransformerCacheSize() { return transformerCacheSize; } /** * The number of javax.xml.transform.Transformer object that are cached for reuse to avoid calls to Template.newTransformer(). */ public void setTransformerCacheSize(int transformerCacheSize) { this.transformerCacheSize = transformerCacheSize; } public ErrorListener getErrorListener() { return errorListener; } /** * Allows to configure to use a custom javax.xml.transform.ErrorListener. Beware when doing this then the default error * listener which captures any errors or fatal errors and store information on the Exchange as properties is not in use. * So only use this option for special use-cases. */ public void setErrorListener(ErrorListener errorListener) { this.errorListener = errorListener; } @ManagedAttribute(description = "Cache for the resource content (the stylesheet file) when it is loaded.") public boolean isContentCache() { return contentCache; } /** * Cache for the resource content (the stylesheet file) when it is loaded. * If set to false Camel will reload the stylesheet file on each message processing. This is good for development. * A cached stylesheet can be forced to reload at runtime via JMX using the clearCachedStylesheet operation. */ public void setContentCache(boolean contentCache) { this.contentCache = contentCache; } public URIResolver getUriResolver() { return uriResolver; } /** * To use a custom javax.xml.transform.URIResolver */ public void setUriResolver(URIResolver uriResolver) { this.uriResolver = uriResolver; } @ManagedAttribute(description = "Whether to allow using StAX as the javax.xml.transform.Source") public boolean isAllowStAX() { return allowStAX; } /** * Whether to allow using StAX as the javax.xml.transform.Source. */ public void setAllowStAX(boolean allowStAX) { this.allowStAX = allowStAX; } public boolean isDeleteOutputFile() { return deleteOutputFile; } /** * If you have output=file then this option dictates whether or not the output file should be deleted when the Exchange * is done processing. For example suppose the output file is a temporary file, then it can be a good idea to delete it after use. */ public void setDeleteOutputFile(boolean deleteOutputFile) { this.deleteOutputFile = deleteOutputFile; } public Map<String, Object> getParameters() { return parameters; } /** * Additional parameters to configure on the javax.xml.transform.Transformer. */ public void setParameters(Map<String, Object> parameters) { this.parameters = parameters; } /** * Loads the resource. * * @param resourceUri the resource to load * @throws TransformerException is thrown if error loading resource * @throws IOException is thrown if error loading resource */ protected void loadResource(String resourceUri) throws TransformerException, IOException { LOG.trace("{} loading schema resource: {}", this, resourceUri); Source source = xslt.getUriResolver().resolve(resourceUri, null); if (source == null) { throw new IOException("Cannot load schema resource " + resourceUri); } else { xslt.setTransformerSource(source); } // now loaded so clear flag cacheCleared = false; } @Override protected void doStart() throws Exception { super.doStart(); LOG.debug("{} using schema resource: {}", this, resourceUri); this.xslt = getCamelContext().getInjector().newInstance(XsltBuilder.class); if (converter != null) { xslt.setConverter(converter); } if (transformerFactoryClass == null && saxon) { transformerFactoryClass = SAXON_TRANSFORMER_FACTORY_CLASS_NAME; } TransformerFactory factory = transformerFactory; if (factory == null && transformerFactoryClass != null) { // provide the class loader of this component to work in OSGi environments Class<?> factoryClass = getCamelContext().getClassResolver().resolveMandatoryClass(transformerFactoryClass, XsltComponent.class.getClassLoader()); LOG.debug("Using TransformerFactoryClass {}", factoryClass); factory = (TransformerFactory) getCamelContext().getInjector().newInstance(factoryClass); } if (factory != null) { LOG.debug("Using TransformerFactory {}", factory); xslt.getConverter().setTransformerFactory(factory); } if (resultHandlerFactory != null) { xslt.setResultHandlerFactory(resultHandlerFactory); } if (errorListener != null) { xslt.errorListener(errorListener); } xslt.setFailOnNullBody(failOnNullBody); xslt.transformerCacheSize(transformerCacheSize); xslt.setUriResolver(uriResolver); xslt.setAllowStAX(allowStAX); xslt.setDeleteOutputFile(deleteOutputFile); configureOutput(xslt, output.name()); // any additional transformer parameters then make a copy to avoid side-effects if (parameters != null) { Map<String, Object> copy = new HashMap<String, Object>(parameters); xslt.setParameters(copy); } // must load resource first which sets a template and do a stylesheet compilation to catch errors early loadResource(resourceUri); // and then inject camel context and start service xslt.setCamelContext(getCamelContext()); // the processor is the xslt builder setProcessor(xslt); ServiceHelper.startService(xslt); } protected void configureOutput(XsltBuilder xslt, String output) throws Exception { if (ObjectHelper.isEmpty(output)) { return; } if ("string".equalsIgnoreCase(output)) { xslt.outputString(); } else if ("bytes".equalsIgnoreCase(output)) { xslt.outputBytes(); } else if ("DOM".equalsIgnoreCase(output)) { xslt.outputDOM(); } else if ("file".equalsIgnoreCase(output)) { xslt.outputFile(); } else { throw new IllegalArgumentException("Unknown output type: " + output); } } @Override protected void doStop() throws Exception { super.doStop(); ServiceHelper.stopService(xslt); } }
package de.hanneseilers.core; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; public class Article { private DBController db = MyBudget.database; private int aid = -1; private String article = ""; private Date date = new Date(); private double price = 0.0; private Category category = new Category(); public static String formatterString = null; public static int articleNameLength = 40; public static int categoryNameLength = 6; public static int numbersPreDecimalPlaces = 6; public static int numbersPostDecimalPlaces = 2; public static String currencySymbol = "EUR"; public static int timestampDay = Loader.config.getInt( ConfigurationValues.ARTICLE_TIMESTAMP_DAYS.getKey() ); /** * Constructor */ public Article(){ // setDate( new Date(convertTimestamp(System.currentTimeMillis())) ); setDate( new Date(System.currentTimeMillis()) ); } /** * Constructor (sets current date) * @param article * @param price */ public Article(String article, double price){ this(); setArticle(article); setPrice(price); } /** * Constructor * @param article * @param price * @param date */ public Article(String article, double price, Date date){ this(article, price); setDate(date); } /** * Constructor (sets current date) * @param article * @param price * @param category */ public Article(String article, double price, Category category){ this(article, price); setCategory(category); } /** * Constructor * @param article * @param price * @param date * @param category */ public Article(String article, double price, Date date, Category category){ this(article, price, date); setCategory(category); } /** * Constructor * @param article * @param price * @param date * @param category * @param aid */ public Article(String article, double price, Date date, Category category, int aid){ setAid(aid); setArticle(article); setPrice(price); setDate(date); setCategory(category); } /** * @return the aid */ public int getAid() { return aid; } /** * @param aid the aid to set */ public void setAid(int aid) { this.aid = aid; } /** * @return the article */ public String getArticle() { return article; } /** * @param article the article to set */ public void setArticle(String article){ article = article.trim(); // check for empty article name if( article.length() == 0 ){ article = ">NO NAME<"; } // check article names length if( article.length() > articleNameLength ){ article = article.substring(0, articleNameLength); } this.article = article; } /** * @return the date */ public Date getDate() { return date; } /** * @param date the date to set */ public void setDate(Date date) { // set correct time Calendar calendar = Calendar.getInstance(); calendar.setTime(date); calendar.set(Calendar.HOUR_OF_DAY, 1); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); calendar.set(Calendar.MILLISECOND, 0); date = calendar.getTime(); // date.setTime( convertTimestamp(date.getTime()) ); this.date = date; } /** * @return the price */ public double getPrice() { return price; } /** * @param price the price to set */ public void setPrice(double price) { this.price = Math.round( (price)*100 )/100.0; } /** * @return the category */ public Category getCategory() { return category; } /** * @param category the category to set */ public void setCategory(Category category) { this.category = category; } /** * Updates this article * @return True if successfull */ public boolean update(){ if( db.isDbReady() ){ return db.updateArticle(this); } return false; } /** * Deletes this article from database * @return True if successfull */ public boolean delete(){ return db.deleteArticle(this); } /** * @return String representation of article */ public String toString(){ Category category = getCategory(); String ret = ""; // Check formatter string if( formatterString == null ){ updateFormatterString(); } if( category != null ){ String categoryName = category.getName(); ret = String.format( formatterString, (new SimpleDateFormat("dd.MM.yyyy")).format(date), getArticle(), (categoryName.length() <= categoryNameLength) ? categoryName : categoryName.substring(0, categoryNameLength-1)+".", getPrice() ); } return ret; } /** * Converts timestamp to day only * @param timestamp * @return */ // private long convertTimestamp(long timestamp){ // System.out.println( "convert timestamp: " + timestamp + " % " + timestampDay + " = " + (timestamp%timestampDay) ); // return timestamp - (timestamp%timestampDay); // } /** * Updates formatter string for toString method of article */ public static void updateFormatterString(){ articleNameLength = Loader.config.getInt( ConfigurationValues.ARTICLE_NAME_LENGTH.getKey() ); categoryNameLength = Loader.config.getInt( ConfigurationValues.CATEGROY_NAME_LENGTH.getKey() ); currencySymbol = Loader.config.getString( ConfigurationValues.ARTICLE_CURRENCY_SYMBOL.getKey() ); numbersPreDecimalPlaces = Loader.config.getInt( ConfigurationValues.NUMBERS_PRE_DECIMAL_PLACES.getKey() ); numbersPostDecimalPlaces = Loader.config.getInt( ConfigurationValues.NUMBERS_POST_DECIMAL_PLACES.getKey() ); formatterString = "%s %-" + articleNameLength + "s [%-" + categoryNameLength + "s] %" + (numbersPreDecimalPlaces+numbersPostDecimalPlaces+1) + "." + numbersPostDecimalPlaces + "f " + currencySymbol; } }
/* * The MIT License * * Copyright (c) 2010, Brad Larson * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson.plugins.repo; import hudson.FilePath; import hudson.Launcher; import hudson.model.AbstractBuild; import hudson.plugins.repo.ChangeLogEntry.ModifiedFile; import hudson.scm.ChangeLogParser; import hudson.util.AtomicFileWriter; import hudson.util.IOException2; import hudson.util.XStream2; import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.Reader; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import org.xml.sax.SAXException; import com.thoughtworks.xstream.io.StreamException; /** * Utility functions to generate and parse a file listing the differences * between builds. Differences are saved as a list of ChangeLogEntry. */ public class ChangeLog extends ChangeLogParser { private static Logger debug = Logger.getLogger("hudson.plugins.repo.ChangeLog"); // TODO: Really need to add some unit tests for this class. That might // require creating git commits, which will be tricky. See the git plugin // for some possibilities. @SuppressWarnings("unchecked") @Override public RepoChangeLogSet parse( @SuppressWarnings("rawtypes") final AbstractBuild build, final File changelogFile) throws IOException, SAXException { final List<ChangeLogEntry> r; final XStream2 xs = new XStream2(); final Reader reader = new BufferedReader(new InputStreamReader(new FileInputStream( changelogFile), "UTF-8")); try { final Object obj = xs.fromXML(reader); r = (List<ChangeLogEntry>) obj; } finally { reader.close(); } return new RepoChangeLogSet(build, r); } /** * Generate a change log between two specified revision states and return it * as a list of change log entries. * * @param currentState * The current state of the repository * @param previousState * The previous state of the repository * @param launcher * The launcher used to run command-line programs * @param workspace * The FilePath of the workspace to use when computing * differences. This path might be on a slave machine. * @throws IOException * is thrown if we have problems writing to the changelogFile * @throws InterruptedException * is thrown if we are interrupted while waiting on the git * commands to run in a forked process. */ public static List<ChangeLogEntry> generateChangeLog( final RevisionState currentState, final RevisionState previousState, final Launcher launcher, final FilePath workspace) throws IOException, InterruptedException { final List<ProjectState> changes = currentState.whatChanged(previousState); debug.log(Level.FINEST, "generateChangeLog: changes " + changes); if (changes == null || changes.size() == 0) { // No changes or the first job return null; } final List<String> commands = new ArrayList<String>(5); final List<ChangeLogEntry> logs = new ArrayList<ChangeLogEntry>(); for (final ProjectState change : changes) { debug.log(Level.FINEST, "change: " + change); if (change.getRevision() == null) { // This project was just added to the manifest. logs.add(new ChangeLogEntry(change.getPath(), change .getServerPath(), null, null, null, null, null, null, null, "This project was added to the manifest.", null)); continue; } String newRevision = currentState.getRevision(change.getPath()); if (newRevision == null) { // This project was just removed from the manifest. logs.add(new ChangeLogEntry(change.getPath(), change .getServerPath(), null, null, null, null, null, null, null, "This project was removed from the manifest.", null)); continue; } final FilePath gitdir = new FilePath(workspace, change.getPath()); commands.clear(); commands.add("git"); commands.add("log"); commands.add("--raw"); commands.add("--first-parent"); commands.add("--format=\"zzREPOzz%H%n%an<%ae>%aD" + "%n%cn<%ce>%cD%n%s%n%n%byyREPOyy\""); // TODO: make this work with the -M flag to show copied and renamed // files. // TODO: even better, use jgit to do the diff. It would be faster, // more robust, etc. git was used to get this done faster, but jgit // is definitely preferable. Most of the code can probably be copied // from Gerrit. It might be tricky with master/slave setup. commands.add(change.getRevision() + ".." + newRevision); final OutputStream gitOutput = new ByteArrayOutputStream(); launcher.launch().stdout(gitOutput).pwd(gitdir).cmds(commands) .join(); final String[] changelogs = gitOutput.toString().split("zzREPOzz"); for (final String changelog : changelogs) { if (changelog.length() < 10) { // This isn't a helpful message. Skip it. continue; } int endLine = changelog.indexOf('\n'); final String revision = changelog.substring(0, endLine); int firstEmailPos = changelog.indexOf('<', endLine); final String authorName = changelog.substring(endLine + 1, firstEmailPos); int endEmail = changelog.indexOf('>', firstEmailPos); final String authorEmail = changelog.substring(firstEmailPos + 1, endEmail); endLine = changelog.indexOf('\n', endEmail); final String authorDate = changelog.substring(endEmail + 1, endLine); firstEmailPos = changelog.indexOf('<', endLine); final String committerName = changelog.substring(endLine + 1, firstEmailPos); endEmail = changelog.indexOf('>', firstEmailPos); final String committerEmail = changelog.substring(firstEmailPos + 1, endEmail); endLine = changelog.indexOf('\n', endEmail); final String committerDate = changelog.substring(endEmail + 1, endLine); final int endComment = changelog.indexOf("yyREPOyy", endLine); final String commitText = changelog.substring(endLine + 1, endComment); final String[] fileLines = changelog.substring(endComment).split("\n"); final List<ModifiedFile> modifiedFiles = new ArrayList<ModifiedFile>(); for (final String fileLine : fileLines) { if (!fileLine.startsWith(":")) { continue; } final char action = fileLine.substring(37, 38).charAt(0); final String path = fileLine.substring(39); modifiedFiles.add(new ModifiedFile(path, action)); } ChangeLogEntry nc = new ChangeLogEntry(change.getPath(), change .getServerPath(), revision, authorName, authorEmail, authorDate, committerName, committerEmail, committerDate, commitText, modifiedFiles); logs.add(nc); debug.log(Level.FINEST, nc.toString()); } } return logs; } /** * Generate a change log file containing the differences between one build * and the next and save the result as XML in a specified file. The function * uses git on the command line to determine the differences between * commits. * * @param currentState * The current state of the repository * @param previousState * The previous state of the repository * @param changelogFile * The file in which we will store the set of differences between * the two states * @param launcher * The launcher used to run command-line programs * @param workspace * The FilePath of the workspace to use when computing * differences. This path might be on a slave machine. * @throws IOException * is thrown if we have problems writing to the changelogFile * @throws InterruptedException * is thrown if we are interrupted while waiting on the git * commands to run in a forked process. */ public static void saveChangeLog(final RevisionState currentState, final RevisionState previousState, final File changelogFile, final Launcher launcher, final FilePath workspace) throws IOException, InterruptedException { List<ChangeLogEntry> logs = generateChangeLog(currentState, previousState, launcher, workspace); if (logs == null) { debug.info("No logs found"); return; } final XStream2 xs = new XStream2(); final AtomicFileWriter w = new AtomicFileWriter(changelogFile); try { w.write("<?xml version='1.0' encoding='UTF-8'?>\n"); xs.toXML(logs, w); w.commit(); } catch (final StreamException e) { throw new IOException2(e); } finally { w.close(); } } }
/* * Orion Viewer - pdf, djvu, xps and cbz file viewer for android devices * * Copyright (C) 2011-2013 Michael Bogdanov & Co * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package universe.constellation.orion.viewer; import android.content.Intent; import android.content.SharedPreferences; import android.net.Uri; import android.os.Bundle; import android.os.Environment; import android.preference.PreferenceManager; import android.support.v4.app.ListFragment; import android.support.v7.app.ActionBar; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.ListView; import universe.constellation.orion.viewer.android.TabListener; import universe.constellation.orion.viewer.prefs.GlobalOptions; import java.io.File; import java.io.FilenameFilter; /** * User: mike * Date: 24.12.11 * Time: 16:41 */ public class OrionFileManagerActivity extends OrionBaseActivity { private static final String LAST_FOLDER = "LAST_FOLDER"; public static class FilesListFragment extends ListFragment { public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); ((OrionFileManagerActivity)getActivity()).createFileView(this); } } public static class RecentListFragment extends ListFragment { public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); ((OrionFileManagerActivity)getActivity()).createRecentView(this); } } private SharedPreferences prefs; private GlobalOptions globalOptions; private boolean justCreated; protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Common.d("Creating file manager"); setContentView(device.getFileManagerLayoutId()); prefs = PreferenceManager.getDefaultSharedPreferences(getApplicationContext()); globalOptions = getOrionContext().getOptions(); initFileManager(); justCreated = true; } protected void onNewIntent(Intent intent) { Common.d("OrionFileManager: On new intent " + intent); boolean dontStartRecent = intent.getBooleanExtra(DONT_OPEN_RECENT, false); if (!dontStartRecent && globalOptions.isOpenRecentBook()) { if (!globalOptions.getRecentFiles().isEmpty()) { GlobalOptions.RecentEntry entry = globalOptions.getRecentFiles().get(0); File book = new File(entry.getPath()); if (book.exists()) { Common.d("Opening recent book"); openFile(book); } } } } @Override protected void onRestoreInstanceState(Bundle savedInstanceState) { super.onRestoreInstanceState(savedInstanceState); justCreated = false; } protected void onResume() { super.onResume(); if (justCreated) { justCreated = false; onNewIntent(getIntent()); } updatePathTextView(getStartFolder()); } private void createRecentView(ListFragment list) { ListView recent = list.getListView(); if (showRecentsAndSavePath()) { recent.setOnItemClickListener(new AdapterView.OnItemClickListener() { public void onItemClick(AdapterView<?> parent, View view, int position, long id) { GlobalOptions.RecentEntry entry = (GlobalOptions.RecentEntry) parent.getItemAtPosition(position); File file = new File(entry.getPath()); if (file.exists()) { openFile(file); } } }); list.setListAdapter(new FileChooser(this, globalOptions.getRecentFiles())); } else { recent.setVisibility(View.GONE); } } private void createFileView(ListFragment list) { ListView view = list.getListView(); view.setOnItemClickListener(new AdapterView.OnItemClickListener() { public void onItemClick(AdapterView<?> parent, View view, int position, long id) { File file = (File) parent.getItemAtPosition(position); if (file.isDirectory()) { File newFolder = ((FileChooser) parent.getAdapter()).changeFolder(file); updatePathTextView(newFolder.getAbsolutePath()); } else { if (showRecentsAndSavePath()) { SharedPreferences.Editor editor = prefs.edit(); editor.putString(Common.LAST_OPENED_DIRECTORY, file.getParentFile().getAbsolutePath()); editor.commit(); } openFile(file); } } }); list.setListAdapter(new FileChooser(this, getStartFolder(), getFileNameFilter())); } private void updatePathTextView(String newPath) { getSupportActionBar().setTitle(newPath); } protected void openFile(File file) { Common.d("Opening new book " + file.getPath()); Intent in = new Intent(Intent.ACTION_VIEW); in.setClass(getApplicationContext(), OrionViewerActivity.class); in.setData(Uri.fromFile(file)); in.addCategory(Intent.CATEGORY_DEFAULT); startActivity(in); } private void initFileManager() { ActionBar actionBar = getSupportActionBar(); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS); ActionBar.Tab tab = actionBar.newTab() .setIcon(R.drawable.folder) .setTabListener(new TabListener<FilesListFragment>( this, "files", FilesListFragment.class)); actionBar.addTab(tab); if (showRecentsAndSavePath()) { tab = actionBar.newTab() .setIcon(R.drawable.book) .setTabListener(new TabListener<RecentListFragment>( this, "recent", RecentListFragment.class) { }); actionBar.addTab(tab); } } @Override public boolean onCreateOptionsMenu(Menu menu) { boolean result = super.onCreateOptionsMenu(menu); if (result) { getMenuInflater().inflate(R.menu.file_manager_menu, menu); } return result; } public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.exit_menu_item: finish(); return true; } return false; } //customizable part public boolean showRecentsAndSavePath() { return true; } public FilenameFilter getFileNameFilter() { return FileChooser.DEFAULT_FILTER; } public String getStartFolder() { String lastOpenedDir = globalOptions.getLastOpenedDirectory(); if (lastOpenedDir != null && new File(lastOpenedDir).exists()) { return lastOpenedDir; } String path = Environment.getExternalStorageDirectory().getPath() + "/" + device.getDefaultDirectory(); if (new File(path).exists()) { return path; } String path1 = "/system/media/sdcard/" + device.getDefaultDirectory(); if (new File(path1).exists()) { return path1; } return Environment.getRootDirectory().getAbsolutePath(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.admin.indices.validate.query; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastShardOperationFailedException; import org.elasticsearch.action.support.broadcast.TransportBroadcastAction; import org.elasticsearch.cache.recycler.PageCacheRecycler; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.QueryParsingException; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.internal.DefaultSearchContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchLocalRequest; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicReferenceArray; /** * */ public class TransportValidateQueryAction extends TransportBroadcastAction<ValidateQueryRequest, ValidateQueryResponse, ShardValidateQueryRequest, ShardValidateQueryResponse> { private final IndicesService indicesService; private final ScriptService scriptService; private final PageCacheRecycler pageCacheRecycler; private final BigArrays bigArrays; @Inject public TransportValidateQueryAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ScriptService scriptService, PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, ValidateQueryAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, ValidateQueryRequest::new, ShardValidateQueryRequest::new, ThreadPool.Names.SEARCH); this.indicesService = indicesService; this.scriptService = scriptService; this.pageCacheRecycler = pageCacheRecycler; this.bigArrays = bigArrays; } @Override protected void doExecute(ValidateQueryRequest request, ActionListener<ValidateQueryResponse> listener) { request.nowInMillis = System.currentTimeMillis(); super.doExecute(request, listener); } @Override protected ShardValidateQueryRequest newShardRequest(int numShards, ShardRouting shard, ValidateQueryRequest request) { String[] filteringAliases = indexNameExpressionResolver.filteringAliases(clusterService.state(), shard.index(), request.indices()); return new ShardValidateQueryRequest(shard.shardId(), filteringAliases, request); } @Override protected ShardValidateQueryResponse newShardResponse() { return new ShardValidateQueryResponse(); } @Override protected GroupShardsIterator shards(ClusterState clusterState, ValidateQueryRequest request, String[] concreteIndices) { // Hard-code routing to limit request to a single shard, but still, randomize it... Map<String, Set<String>> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, Integer.toString(ThreadLocalRandom.current().nextInt(1000)), request.indices()); return clusterService.operationRouting().searchShards(clusterState, concreteIndices, routingMap, "_local"); } @Override protected ClusterBlockException checkGlobalBlock(ClusterState state, ValidateQueryRequest request) { return state.blocks().globalBlockedException(ClusterBlockLevel.READ); } @Override protected ClusterBlockException checkRequestBlock(ClusterState state, ValidateQueryRequest countRequest, String[] concreteIndices) { return state.blocks().indicesBlockedException(ClusterBlockLevel.READ, concreteIndices); } @Override protected ValidateQueryResponse newResponse(ValidateQueryRequest request, AtomicReferenceArray shardsResponses, ClusterState clusterState) { int successfulShards = 0; int failedShards = 0; boolean valid = true; List<ShardOperationFailedException> shardFailures = null; List<QueryExplanation> queryExplanations = null; for (int i = 0; i < shardsResponses.length(); i++) { Object shardResponse = shardsResponses.get(i); if (shardResponse == null) { // simply ignore non active shards } else if (shardResponse instanceof BroadcastShardOperationFailedException) { failedShards++; if (shardFailures == null) { shardFailures = new ArrayList<>(); } shardFailures.add(new DefaultShardOperationFailedException((BroadcastShardOperationFailedException) shardResponse)); } else { ShardValidateQueryResponse validateQueryResponse = (ShardValidateQueryResponse) shardResponse; valid = valid && validateQueryResponse.isValid(); if (request.explain() || request.rewrite()) { if (queryExplanations == null) { queryExplanations = new ArrayList<>(); } queryExplanations.add(new QueryExplanation( validateQueryResponse.getIndex(), validateQueryResponse.isValid(), validateQueryResponse.getExplanation(), validateQueryResponse.getError() )); } successfulShards++; } } return new ValidateQueryResponse(valid, queryExplanations, shardsResponses.length(), successfulShards, failedShards, shardFailures); } @Override protected ShardValidateQueryResponse shardOperation(ShardValidateQueryRequest request) { IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); IndexQueryParserService queryParserService = indexService.queryParserService(); IndexShard indexShard = indexService.shardSafe(request.shardId().id()); boolean valid; String explanation = null; String error = null; Engine.Searcher searcher = indexShard.acquireSearcher("validate_query"); DefaultSearchContext searchContext = new DefaultSearchContext(0, new ShardSearchLocalRequest(request.types(), request.nowInMillis(), request.filteringAliases()), null, searcher, indexService, indexShard, scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher, SearchService.NO_TIMEOUT ); SearchContext.setCurrent(searchContext); try { if (request.source() != null && request.source().length() > 0) { searchContext.parsedQuery(queryParserService.parseQuery(request.source())); } searchContext.preProcess(); valid = true; if (request.explain()) { explanation = searchContext.parsedQuery().query().toString(); } if (request.rewrite()) { explanation = getRewrittenQuery(searcher.searcher(), searchContext.query()); } } catch (QueryParsingException e) { valid = false; error = e.getDetailedMessage(); } catch (AssertionError|IOException e) { valid = false; error = e.getMessage(); } finally { searchContext.close(); SearchContext.removeCurrent(); } return new ShardValidateQueryResponse(request.shardId(), valid, explanation, error); } private String getRewrittenQuery(IndexSearcher searcher, Query query) throws IOException { Query queryRewrite = searcher.rewrite(query); if (queryRewrite instanceof MatchNoDocsQuery) { return query.toString(); } else { return queryRewrite.toString(); } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.snowdevicemanagement.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Information about the task assigned to one or many devices. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/snow-device-management-2021-08-04/TaskSummary" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class TaskSummary implements Serializable, Cloneable, StructuredPojo { /** * <p> * The state of the task assigned to one or many devices. * </p> */ private String state; /** * <p> * Optional metadata that you assign to a resource. You can use tags to categorize a resource in different ways, * such as by purpose, owner, or environment. * </p> */ private java.util.Map<String, String> tags; /** * <p> * The Amazon Resource Name (ARN) of the task. * </p> */ private String taskArn; /** * <p> * The task ID. * </p> */ private String taskId; /** * <p> * The state of the task assigned to one or many devices. * </p> * * @param state * The state of the task assigned to one or many devices. * @see TaskState */ public void setState(String state) { this.state = state; } /** * <p> * The state of the task assigned to one or many devices. * </p> * * @return The state of the task assigned to one or many devices. * @see TaskState */ public String getState() { return this.state; } /** * <p> * The state of the task assigned to one or many devices. * </p> * * @param state * The state of the task assigned to one or many devices. * @return Returns a reference to this object so that method calls can be chained together. * @see TaskState */ public TaskSummary withState(String state) { setState(state); return this; } /** * <p> * The state of the task assigned to one or many devices. * </p> * * @param state * The state of the task assigned to one or many devices. * @return Returns a reference to this object so that method calls can be chained together. * @see TaskState */ public TaskSummary withState(TaskState state) { this.state = state.toString(); return this; } /** * <p> * Optional metadata that you assign to a resource. You can use tags to categorize a resource in different ways, * such as by purpose, owner, or environment. * </p> * * @return Optional metadata that you assign to a resource. You can use tags to categorize a resource in different * ways, such as by purpose, owner, or environment. */ public java.util.Map<String, String> getTags() { return tags; } /** * <p> * Optional metadata that you assign to a resource. You can use tags to categorize a resource in different ways, * such as by purpose, owner, or environment. * </p> * * @param tags * Optional metadata that you assign to a resource. You can use tags to categorize a resource in different * ways, such as by purpose, owner, or environment. */ public void setTags(java.util.Map<String, String> tags) { this.tags = tags; } /** * <p> * Optional metadata that you assign to a resource. You can use tags to categorize a resource in different ways, * such as by purpose, owner, or environment. * </p> * * @param tags * Optional metadata that you assign to a resource. You can use tags to categorize a resource in different * ways, such as by purpose, owner, or environment. * @return Returns a reference to this object so that method calls can be chained together. */ public TaskSummary withTags(java.util.Map<String, String> tags) { setTags(tags); return this; } /** * Add a single Tags entry * * @see TaskSummary#withTags * @returns a reference to this object so that method calls can be chained together. */ public TaskSummary addTagsEntry(String key, String value) { if (null == this.tags) { this.tags = new java.util.HashMap<String, String>(); } if (this.tags.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.tags.put(key, value); return this; } /** * Removes all the entries added into Tags. * * @return Returns a reference to this object so that method calls can be chained together. */ public TaskSummary clearTagsEntries() { this.tags = null; return this; } /** * <p> * The Amazon Resource Name (ARN) of the task. * </p> * * @param taskArn * The Amazon Resource Name (ARN) of the task. */ public void setTaskArn(String taskArn) { this.taskArn = taskArn; } /** * <p> * The Amazon Resource Name (ARN) of the task. * </p> * * @return The Amazon Resource Name (ARN) of the task. */ public String getTaskArn() { return this.taskArn; } /** * <p> * The Amazon Resource Name (ARN) of the task. * </p> * * @param taskArn * The Amazon Resource Name (ARN) of the task. * @return Returns a reference to this object so that method calls can be chained together. */ public TaskSummary withTaskArn(String taskArn) { setTaskArn(taskArn); return this; } /** * <p> * The task ID. * </p> * * @param taskId * The task ID. */ public void setTaskId(String taskId) { this.taskId = taskId; } /** * <p> * The task ID. * </p> * * @return The task ID. */ public String getTaskId() { return this.taskId; } /** * <p> * The task ID. * </p> * * @param taskId * The task ID. * @return Returns a reference to this object so that method calls can be chained together. */ public TaskSummary withTaskId(String taskId) { setTaskId(taskId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getState() != null) sb.append("State: ").append(getState()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()).append(","); if (getTaskArn() != null) sb.append("TaskArn: ").append(getTaskArn()).append(","); if (getTaskId() != null) sb.append("TaskId: ").append(getTaskId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof TaskSummary == false) return false; TaskSummary other = (TaskSummary) obj; if (other.getState() == null ^ this.getState() == null) return false; if (other.getState() != null && other.getState().equals(this.getState()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; if (other.getTaskArn() == null ^ this.getTaskArn() == null) return false; if (other.getTaskArn() != null && other.getTaskArn().equals(this.getTaskArn()) == false) return false; if (other.getTaskId() == null ^ this.getTaskId() == null) return false; if (other.getTaskId() != null && other.getTaskId().equals(this.getTaskId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getState() == null) ? 0 : getState().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); hashCode = prime * hashCode + ((getTaskArn() == null) ? 0 : getTaskArn().hashCode()); hashCode = prime * hashCode + ((getTaskId() == null) ? 0 : getTaskId().hashCode()); return hashCode; } @Override public TaskSummary clone() { try { return (TaskSummary) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.snowdevicemanagement.model.transform.TaskSummaryMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Copyright 2015 The Netty Project * * The Netty Project licenses this file to you under the Apache License, version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.netty.microbench.channel; import static io.netty.util.internal.ObjectUtil.checkNotNull; import io.netty.buffer.ByteBufAllocator; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelHandler; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelHandlerInvoker; import io.netty.channel.ChannelPipeline; import io.netty.channel.ChannelProgressivePromise; import io.netty.channel.ChannelPromise; import io.netty.channel.EventLoop; import io.netty.channel.embedded.EmbeddedChannel; import io.netty.util.Attribute; import io.netty.util.AttributeKey; import io.netty.util.ReferenceCounted; import io.netty.util.concurrent.EventExecutor; import java.net.SocketAddress; public abstract class EmbeddedChannelWriteReleaseHandlerContext implements ChannelHandlerContext { private static final String HANDLER_NAME = "microbench-delegator-ctx"; private final EventLoop eventLoop; private final Channel channel; private final ByteBufAllocator alloc; private final ChannelHandler handler; private SocketAddress localAddress; public EmbeddedChannelWriteReleaseHandlerContext(ByteBufAllocator alloc, ChannelHandler handler) { this(alloc, handler, new EmbeddedChannel()); } public EmbeddedChannelWriteReleaseHandlerContext(ByteBufAllocator alloc, ChannelHandler handler, EmbeddedChannel channel) { this.alloc = checkNotNull(alloc, "alloc"); this.channel = checkNotNull(channel, "channel"); this.handler = checkNotNull(handler, "handler"); this.eventLoop = checkNotNull(channel.eventLoop(), "eventLoop"); } protected abstract void handleException(Throwable t); @Override public <T> Attribute<T> attr(AttributeKey<T> key) { return null; } @Override public <T> boolean hasAttr(AttributeKey<T> key) { return false; } @Override public Channel channel() { return channel; } @Override public EventExecutor executor() { return eventLoop; } @Override public ChannelHandlerInvoker invoker() { return eventLoop.asInvoker(); } @Override public String name() { return HANDLER_NAME; } @Override public ChannelHandler handler() { return handler; } @Override public boolean isRemoved() { return false; } @Override public ChannelHandlerContext fireChannelRegistered() { return this; } @Override public ChannelHandlerContext fireChannelUnregistered() { return this; } @Override public ChannelHandlerContext fireChannelActive() { return this; } @Override public ChannelHandlerContext fireChannelInactive() { return this; } @Override public ChannelHandlerContext fireExceptionCaught(Throwable cause) { try { handler().exceptionCaught(this, cause); } catch (Exception e) { handleException(e); } return null; } @Override public ChannelHandlerContext fireUserEventTriggered(Object event) { return this; } @Override public ChannelHandlerContext fireChannelRead(Object msg) { return this; } @Override public ChannelHandlerContext fireChannelReadComplete() { return this; } @Override public ChannelHandlerContext fireChannelWritabilityChanged() { return this; } @Override public ChannelFuture bind(SocketAddress localAddress) { return bind(localAddress, newPromise()); } @Override public ChannelFuture connect(SocketAddress remoteAddress) { return connect(remoteAddress, newPromise()); } @Override public ChannelFuture connect(SocketAddress remoteAddress, SocketAddress localAddress) { return connect(remoteAddress, localAddress, newPromise()); } @Override public ChannelFuture disconnect() { return disconnect(newPromise()); } @Override public ChannelFuture close() { return close(newPromise()); } @Override public ChannelFuture deregister() { return deregister(newPromise()); } @Override public ChannelFuture bind(SocketAddress localAddress, ChannelPromise promise) { try { channel().bind(localAddress, promise); this.localAddress = localAddress; } catch (Exception e) { promise.setFailure(e); handleException(e); } return promise; } @Override public ChannelFuture connect(SocketAddress remoteAddress, ChannelPromise promise) { try { channel().connect(remoteAddress, localAddress, promise); } catch (Exception e) { promise.setFailure(e); handleException(e); } return promise; } @Override public ChannelFuture connect(SocketAddress remoteAddress, SocketAddress localAddress, ChannelPromise promise) { try { channel().connect(remoteAddress, localAddress, promise); } catch (Exception e) { promise.setFailure(e); handleException(e); } return promise; } @Override public ChannelFuture disconnect(ChannelPromise promise) { try { channel().disconnect(promise); } catch (Exception e) { promise.setFailure(e); handleException(e); } return promise; } @Override public ChannelFuture close(ChannelPromise promise) { try { channel().close(promise); } catch (Exception e) { promise.setFailure(e); handleException(e); } return promise; } @Override public ChannelFuture deregister(ChannelPromise promise) { try { channel().deregister(promise); } catch (Exception e) { promise.setFailure(e); handleException(e); } return promise; } @Override public ChannelHandlerContext read() { try { channel().read(); } catch (Exception e) { handleException(e); } return this; } @Override public ChannelFuture write(Object msg) { return write(msg, newPromise()); } @Override public ChannelFuture write(Object msg, ChannelPromise promise) { try { if (msg instanceof ReferenceCounted) { ((ReferenceCounted) msg).release(); promise.setSuccess(); } else { channel().write(msg, promise); } } catch (Exception e) { promise.setFailure(e); handleException(e); } return promise; } @Override public ChannelHandlerContext flush() { channel().flush(); return this; } @Override public ChannelFuture writeAndFlush(Object msg, ChannelPromise promise) { return channel().writeAndFlush(msg, promise); } @Override public ChannelFuture writeAndFlush(Object msg) { return writeAndFlush(msg, newPromise()); } @Override public ChannelPipeline pipeline() { return channel().pipeline(); } @Override public ByteBufAllocator alloc() { return alloc; } @Override public ChannelPromise newPromise() { return channel().newPromise(); } @Override public ChannelProgressivePromise newProgressivePromise() { return channel().newProgressivePromise(); } @Override public ChannelFuture newSucceededFuture() { return channel().newSucceededFuture(); } @Override public ChannelFuture newFailedFuture(Throwable cause) { return channel().newFailedFuture(cause); } @Override public ChannelPromise voidPromise() { return channel().voidPromise(); } }
/* * Autopsy Forensic Browser * * Copyright 2013-2014 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.corecomponents; import java.awt.Cursor; import java.awt.FontMetrics; import java.awt.Graphics; import java.awt.dnd.DnDConstants; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.beans.PropertyChangeEvent; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import java.util.logging.Level; import javax.swing.Action; import javax.swing.JTable; import javax.swing.ListSelectionModel; import javax.swing.SwingUtilities; import javax.swing.event.ChangeEvent; import javax.swing.event.ListSelectionEvent; import javax.swing.event.TableColumnModelEvent; import javax.swing.event.TableColumnModelListener; import org.netbeans.swing.outline.DefaultOutlineModel; import org.openide.explorer.ExplorerManager; import org.openide.explorer.view.OutlineView; import org.openide.nodes.AbstractNode; import org.openide.nodes.Children; import org.openide.nodes.Node; import org.openide.nodes.Node.Property; import org.openide.nodes.Node.PropertySet; import org.openide.nodes.NodeEvent; import org.openide.nodes.NodeListener; import org.openide.nodes.NodeMemberEvent; import org.openide.nodes.NodeReorderEvent; import org.openide.nodes.Sheet; import org.openide.util.NbBundle; import org.openide.util.NbPreferences; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.corecomponentinterfaces.DataResultViewer; import org.sleuthkit.autopsy.coreutils.Logger; /** * DataResult sortable table viewer */ // @@@ Restore implementation of DataResultViewerTable as a DataResultViewer // service provider when DataResultViewers can be made compatible with node // multiple selection actions. //@ServiceProvider(service = DataResultViewer.class) public class DataResultViewerTable extends AbstractDataResultViewer { private String firstColumnLabel = NbBundle.getMessage(DataResultViewerTable.class, "DataResultViewerTable.firstColLbl"); private Set<Property<?>> propertiesAcc = new LinkedHashSet<>(); private final DummyNodeListener dummyNodeListener = new DummyNodeListener(); private static final String DUMMY_NODE_DISPLAY_NAME = NbBundle.getMessage(DataResultViewerTable.class, "DataResultViewerTable.dummyNodeDisplayName"); private Node currentRoot; /** * Creates a DataResultViewerTable object that is compatible with node * multiple selection actions. */ public DataResultViewerTable(ExplorerManager explorerManager) { super(explorerManager); initialize(); } /** * Creates a DataResultViewerTable object that is NOT compatible with node * multiple selection actions. */ public DataResultViewerTable() { initialize(); } private void initialize() { initComponents(); OutlineView ov = ((OutlineView) this.tableScrollPanel); ov.setAllowedDragActions(DnDConstants.ACTION_NONE); ov.getOutline().setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION); // don't show the root node ov.getOutline().setRootVisible(false); ov.getOutline().setDragEnabled(false); ov.getOutline().getColumnModel().addColumnModelListener(new TableColumnModelListener() { @Override public void columnAdded(TableColumnModelEvent e) {} @Override public void columnRemoved(TableColumnModelEvent e) {} @Override public void columnMarginChanged(ChangeEvent e) {} @Override public void columnSelectionChanged(ListSelectionEvent e) {} @Override public void columnMoved(TableColumnModelEvent e) { // change the order of the column in the array/hashset List<Node.Property<?>> props = new ArrayList<>(propertiesAcc); Node.Property<?> prop = props.remove(e.getFromIndex()); props.add(e.getToIndex(), prop); propertiesAcc.clear(); for (int j = 0; j < props.size(); ++j) { propertiesAcc.add(props.get(j)); } } }); /** * Add mouse listener to perform action on double-click * A somewhat hacky way to perform action even if the column clicked * is not the first one. */ ov.getOutline().addMouseListener(new MouseListener() { @Override public void mousePressed(MouseEvent e) {} @Override public void mouseReleased(MouseEvent e) {} @Override public void mouseEntered(MouseEvent e) {} @Override public void mouseExited(MouseEvent e) {} @Override public void mouseClicked(MouseEvent e) { if(e.getClickCount() == 2) { Node[] nodes = DataResultViewerTable.this.em.getSelectedNodes(); for(Node node : nodes) { Action action = node.getPreferredAction(); if(action != null) action.actionPerformed(null); } } } }); } /** * Expand node * * @param n Node to expand */ @Override public void expandNode(Node n) { super.expandNode(n); if (this.tableScrollPanel != null) { OutlineView ov = ((OutlineView) this.tableScrollPanel); ov.expandNode(n); } } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { tableScrollPanel = new OutlineView(this.firstColumnLabel); //new TreeTableView() tableScrollPanel.addComponentListener(new java.awt.event.ComponentAdapter() { public void componentResized(java.awt.event.ComponentEvent evt) { tableScrollPanelComponentResized(evt); } }); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(tableScrollPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 691, Short.MAX_VALUE) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(tableScrollPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 366, Short.MAX_VALUE) ); }// </editor-fold>//GEN-END:initComponents private void tableScrollPanelComponentResized(java.awt.event.ComponentEvent evt) {//GEN-FIRST:event_tableScrollPanelComponentResized }//GEN-LAST:event_tableScrollPanelComponentResized // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JScrollPane tableScrollPanel; // End of variables declaration//GEN-END:variables /** * Gets regular Bean property set properties from first child of Node. * * @param parent Node with at least one child to get properties from * * @return Properties, */ private Node.Property<?>[] getChildPropertyHeaders(Node parent) { Node firstChild = parent.getChildren().getNodeAt(0); if (firstChild == null) { throw new IllegalArgumentException( NbBundle.getMessage(this.getClass(), "DataResultViewerTable.illegalArgExc.noChildFromParent")); } else { for (PropertySet ps : firstChild.getPropertySets()) { if (ps.getName().equals(Sheet.PROPERTIES)) { return ps.getProperties(); } } throw new IllegalArgumentException( NbBundle.getMessage(this.getClass(), "DataResultViewerTable.illegalArgExc.childWithoutPropertySet")); } } /** * Gets regular Bean property set properties from all first children and, * recursively, subchildren of Node. Note: won't work out the box for lazy * load - you need to set all children props for the parent by hand * * @param parent Node with at least one child to get properties from * * @return Properties, */ @SuppressWarnings("rawtypes") private Node.Property[] getAllChildPropertyHeaders(Node parent) { Node firstChild = parent.getChildren().getNodeAt(0); Property[] properties = null; if (firstChild == null) { throw new IllegalArgumentException( NbBundle.getMessage(this.getClass(), "DataResultViewerTable.illegalArgExc.noChildFromParent")); } else { Set<Property> allProperties = new LinkedHashSet<>(); while (firstChild != null) { for (PropertySet ps : firstChild.getPropertySets()) { final Property[] props = ps.getProperties(); final int propsNum = props.length; for (int i = 0; i < propsNum; ++i) { allProperties.add(props[i]); } } firstChild = firstChild.getChildren().getNodeAt(0); } properties = allProperties.toArray(new Property<?>[0]); } return properties; } /** * Gets regular Bean property set properties from all children and, * recursively, subchildren of Node. Note: won't work out the box for lazy * load - you need to set all children props for the parent by hand * * @param parent Node with at least one child to get properties from * @param rows max number of rows to retrieve properties for (can be used * for memory optimization) */ private void getAllChildPropertyHeadersRec(Node parent, int rows) { Children children = parent.getChildren(); int childCount = 0; for (Node child : children.getNodes()) { if (++childCount > rows) { return; } for (PropertySet ps : child.getPropertySets()) { final Property<?>[] props = ps.getProperties(); final int propsNum = props.length; for (int j = 0; j < propsNum; ++j) { propertiesAcc.add(props[j]); } } getAllChildPropertyHeadersRec(child, rows); } } @Override public boolean isSupported(Node selectedNode) { return true; } /** * Thread note: Make sure to run this in the EDT as it causes GUI * operations. * * @param selectedNode */ @Override public void setNode(Node selectedNode) { // change the cursor to "waiting cursor" for this operation this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); try { boolean hasChildren = false; if (selectedNode != null) { // @@@ This just did a DB round trip to get the count and the results were not saved... hasChildren = selectedNode.getChildren().getNodesCount() > 0; } Node oldNode = this.em.getRootContext(); if (oldNode != null) { oldNode.removeNodeListener(dummyNodeListener); } // if there's no selection node, do nothing if (hasChildren) { Node root = selectedNode; dummyNodeListener.reset(); root.addNodeListener(dummyNodeListener); setupTable(root); } else { final OutlineView ov = ((OutlineView) this.tableScrollPanel); Node emptyNode = new AbstractNode(Children.LEAF); em.setRootContext(emptyNode); // make empty node ov.getOutline().setAutoResizeMode(JTable.AUTO_RESIZE_ALL_COLUMNS); ov.setPropertyColumns(); // set the empty property header } } finally { this.setCursor(null); } } /** * Create Column Headers based on the Content represented by the Nodes in * the table. * * @param root The parent Node of the ContentNodes */ private void setupTable(final Node root) { em.setRootContext(root); final OutlineView ov = ((OutlineView) this.tableScrollPanel); if (ov == null) { return; } storeState(); // set the new root as current currentRoot = root; List<Node.Property<?>> props = loadState(); /* * OutlineView makes the first column be the result of * node.getDisplayName with the icon. This duplicates our first column, * which is the file name, etc. So, pop that property off the list, but * use its display name as the header for the column so that the header * can change depending on the type of data being displayed. * * NOTE: This assumes that the first property is always the one tha * duplicates getDisplayName(). This seems like a big assumption and * could be made more robust. */ if (props.size() > 0) { Node.Property<?> prop = props.remove(0); ((DefaultOutlineModel) ov.getOutline().getOutlineModel()).setNodesColumnLabel(prop.getDisplayName()); } // Get the columns setup with respect to names and sortability String[] propStrings = new String[props.size() * 2]; for (int i = 0; i < props.size(); i++) { props.get(i).setValue("ComparableColumnTTV", Boolean.TRUE); //NON-NLS //First property column is sorted initially if (i == 0) { props.get(i).setValue("TreeColumnTTV", Boolean.TRUE); // Identifies special property representing first (tree) column. NON-NLS props.get(i).setValue("SortingColumnTTV", Boolean.TRUE); // TreeTableView should be initially sorted by this property column. NON-NLS } propStrings[2 * i] = props.get(i).getName(); propStrings[2 * i + 1] = props.get(i).getDisplayName(); } ov.setPropertyColumns(propStrings); // show the horizontal scroll panel and show all the content & header int totalColumns = props.size(); //int scrollWidth = ttv.getWidth(); int margin = 4; int startColumn = 1; // If there is only one column (which was removed from props above) // Just let the table resize itself. ov.getOutline().setAutoResizeMode((props.size() > 0) ? JTable.AUTO_RESIZE_OFF : JTable.AUTO_RESIZE_ALL_COLUMNS); // get first 100 rows values for the table Object[][] content; content = getRowValues(root, 100); if (content != null) { // get the fontmetrics final Graphics graphics = ov.getGraphics(); if (graphics != null) { final FontMetrics metrics = graphics.getFontMetrics(); // for the "Name" column int nodeColWidth = Math.min(getMaxColumnWidth(0, metrics, margin, 40, firstColumnLabel, content), 250); // Note: 40 is the width of the icon + node lines. Change this value if those values change! ov.getOutline().getColumnModel().getColumn(0).setPreferredWidth(nodeColWidth); // get the max for each other column for (int colIndex = startColumn; colIndex <= totalColumns; colIndex++) { int colWidth = Math.min(getMaxColumnWidth(colIndex, metrics, margin, 8, props, content), 350); ov.getOutline().getColumnModel().getColumn(colIndex).setPreferredWidth(colWidth); } } // if there's no content just auto resize all columns if (content.length <= 0) { // turn on the auto resize ov.getOutline().setAutoResizeMode(JTable.AUTO_RESIZE_ALL_COLUMNS); } } } // Store the state of current root Node. private void storeState() { if(currentRoot == null || propertiesAcc.isEmpty()) return; TableFilterNode tfn; if(currentRoot instanceof TableFilterNode) tfn = (TableFilterNode) currentRoot; else return; List<Node.Property<?>> props = new ArrayList<>(propertiesAcc); for (int i = 0; i < props.size(); i++) { Property<?> prop = props.get(i); NbPreferences.forModule(this.getClass()).put(getUniqueColName(prop, tfn.getItemType()), String.valueOf(i)); } } // Load the state of current root Node if exists. private List<Node.Property<?>> loadState() { propertiesAcc.clear(); this.getAllChildPropertyHeadersRec(currentRoot, 100); List<Node.Property<?>> props = new ArrayList<>(propertiesAcc); // If node is not table filter node, use default order for columns TableFilterNode tfn; if(currentRoot instanceof TableFilterNode) { tfn = (TableFilterNode) currentRoot; } else { Logger.getLogger(DataResultViewerTable.class.getName()).log(Level.INFO, "Node {0} is not TableFilterNode, columns are going to be in default order", currentRoot.getName()); return props; } List<Node.Property<?>> orderedProps = new ArrayList<>(propertiesAcc); for (Property<?> prop : props) { Integer value = Integer.valueOf(NbPreferences.forModule(this.getClass()).get(getUniqueColName(prop, tfn.getItemType()), "-1")); if (value >= 0) { /** * The original contents of orderedProps do not matter when setting the new ordered values. The reason * we copy propertiesAcc into it first is to give it the currect size so we can set() in any index. */ orderedProps.set(value, prop); } } propertiesAcc.clear(); for (Property<?> prop : orderedProps) { propertiesAcc.add(prop); } return orderedProps; } // Get unique name for node and it's property. private String getUniqueColName(Property<?> prop, String type) { return Case.getCurrentCase().getName() + "." + type + "." + prop.getName().replaceAll("[^a-zA-Z0-9_]", "") + ".columnOrder"; } // Populate a two-dimensional array with rows of property values for up // to maxRows children of the node passed in. private static Object[][] getRowValues(Node node, int maxRows) { int numRows = Math.min(maxRows, node.getChildren().getNodesCount()); Object[][] rowValues = new Object[numRows][]; int rowCount = 0; for (Node child : node.getChildren().getNodes()) { if (rowCount >= maxRows) { break; } // BC: I got this once, I think it was because the table // refreshed while we were in this method // could be better synchronized. Or it was from // the lazy nodes updating... Didn't have time // to fully debug it. if (rowCount > numRows) { break; } PropertySet[] propertySets = child.getPropertySets(); if (propertySets.length > 0) { Property<?>[] properties = propertySets[0].getProperties(); rowValues[rowCount] = new Object[properties.length]; for (int j = 0; j < properties.length; ++j) { try { rowValues[rowCount][j] = properties[j].getValue(); } catch (IllegalAccessException | InvocationTargetException ignore) { rowValues[rowCount][j] = "n/a"; //NON-NLS } } } ++rowCount; } return rowValues; } @Override public String getTitle() { return NbBundle.getMessage(this.getClass(), "DataResultViewerTable.title"); } @Override public DataResultViewer createInstance() { return new DataResultViewerTable(); } /** * Gets the max width of the column from the given index, header, and table. * * @param index the index of the column on the table / header * @param metrics the font metrics that this component use * @param margin the left/right margin of the column * @param padding the left/right padding of the column * @param header the property headers of the table * @param table the object table * * @return max the maximum width of the column */ @SuppressWarnings("rawtypes") private int getMaxColumnWidth(int index, FontMetrics metrics, int margin, int padding, List<Node.Property<?>> header, Object[][] table) { // set the tree (the node / names column) width String headerName = header.get(index - 1).getDisplayName(); return getMaxColumnWidth(index, metrics, margin, padding, headerName, table); } /** * Gets the max width of the column from the given index, header, and table. * * @param index the index of the column on the table / header * @param metrics the font metrics that this component use * @param margin the left/right margin of the column * @param padding the left/right padding of the column * @param header the column header for the comparison * @param table the object table * * @return max the maximum width of the column */ private synchronized int getMaxColumnWidth(int index, FontMetrics metrics, int margin, int padding, String header, Object[][] table) { // set the tree (the node / names column) width String headerName = header; int headerWidth = metrics.stringWidth(headerName); // length of the header int colWidth = 0; // Get maximum width of column data for (int i = 0; i < table.length; i++) { if (table[i] == null || index >= table[i].length) { continue; } String test = table[i][index].toString(); colWidth = Math.max(colWidth, metrics.stringWidth(test)); } colWidth += padding; // add the padding on the most left gap headerWidth += 8; // add the padding to the header (change this value if the header padding value is changed) // Set the width int width = Math.max(headerWidth, colWidth); width += 2 * margin; // Add margin return width; } @Override public void clearComponent() { this.tableScrollPanel.removeAll(); this.tableScrollPanel = null; super.clearComponent(); } private class DummyNodeListener implements NodeListener { private volatile boolean load = true; public void reset() { load = true; } @Override public void childrenAdded(final NodeMemberEvent nme) { Node[] delta = nme.getDelta(); if (load && containsReal(delta)) { load = false; if (SwingUtilities.isEventDispatchThread()) { setupTable(nme.getNode()); } else { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { setupTable(nme.getNode()); } }); } } } private boolean containsReal(Node[] delta) { for (Node n : delta) { if (!n.getDisplayName().equals(DUMMY_NODE_DISPLAY_NAME)) { return true; } } return false; } @Override public void childrenRemoved(NodeMemberEvent nme) { } @Override public void childrenReordered(NodeReorderEvent nre) { } @Override public void nodeDestroyed(NodeEvent ne) { } @Override public void propertyChange(PropertyChangeEvent evt) { } } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.intellij.lang.regexp.intention; import com.intellij.ide.util.PropertiesComponent; import com.intellij.lang.Language; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CustomShortcutSet; import com.intellij.openapi.application.*; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.event.DocumentEvent; import com.intellij.openapi.editor.event.DocumentListener; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.fileTypes.LanguageFileType; import com.intellij.openapi.fileTypes.PlainTextFileType; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.Balloon; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiLanguageInjectionHost; import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil; import com.intellij.ui.EditorTextField; import com.intellij.ui.JBColor; import com.intellij.ui.components.JBLabel; import com.intellij.ui.scale.JBUIScale; import com.intellij.util.Alarm; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import org.intellij.lang.regexp.*; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.TestOnly; import javax.swing.*; import java.awt.*; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * @author Konstantin Bulenkov */ public class CheckRegExpForm { public static final Key<Boolean> CHECK_REG_EXP_EDITOR = Key.create("CHECK_REG_EXP_EDITOR"); private static final String LAST_EDITED_REGEXP = "last.edited.regexp"; private static final JBColor BACKGROUND_COLOR_MATCH = new JBColor(0xe7fadb, 0x445542); private static final JBColor BACKGROUND_COLOR_NOMATCH = new JBColor(0xffb1a0, 0x6e2b28); private final PsiFile myRegexpFile; private EditorTextField mySampleText; private EditorTextField myRegExp; private JPanel myRootPanel; private JBLabel myMessage; private Project myProject; public CheckRegExpForm(@NotNull PsiFile regexpFile) { myRegexpFile = regexpFile; } private void createUIComponents() { myProject = myRegexpFile.getProject(); Document document = PsiDocumentManager.getInstance(myProject).getDocument(myRegexpFile); final Language language = myRegexpFile.getLanguage(); final LanguageFileType fileType; if (language instanceof RegExpLanguage) { fileType = RegExpLanguage.INSTANCE.getAssociatedFileType(); } else { // for correct syntax highlighting fileType = new RegExpFileType(language); } myRegExp = new EditorTextField(document, myProject, fileType, false, false) { @Override protected EditorEx createEditor() { final EditorEx editor = super.createEditor(); editor.putUserData(CHECK_REG_EXP_EDITOR, Boolean.TRUE); editor.setEmbeddedIntoDialogWrapper(true); return editor; } @Override protected void updateBorder(@NotNull EditorEx editor) { setupBorder(editor); } }; final String sampleText = PropertiesComponent.getInstance(myProject).getValue(LAST_EDITED_REGEXP, "Sample Text"); mySampleText = new EditorTextField(sampleText, myProject, PlainTextFileType.INSTANCE) { @Override protected EditorEx createEditor() { final EditorEx editor = super.createEditor(); editor.setEmbeddedIntoDialogWrapper(true); return editor; } @Override protected void updateBorder(@NotNull EditorEx editor) { setupBorder(editor); } }; mySampleText.setOneLineMode(false); int preferredWidth = Math.max(JBUIScale.scale(250), myRegExp.getPreferredSize().width); myRegExp.setPreferredWidth(preferredWidth); mySampleText.setPreferredWidth(preferredWidth); myRootPanel = new JPanel(new BorderLayout()) { Disposable disposable; Alarm updater; @Override public void addNotify() { super.addNotify(); disposable = Disposer.newDisposable(); IdeFocusManager.getGlobalInstance().requestFocus(mySampleText, true); registerFocusShortcut(myRegExp, "shift TAB", mySampleText); registerFocusShortcut(myRegExp, "TAB", mySampleText); registerFocusShortcut(mySampleText, "shift TAB", myRegExp); registerFocusShortcut(mySampleText, "TAB", myRegExp); updater = new Alarm(Alarm.ThreadToUse.POOLED_THREAD, disposable); DocumentListener documentListener = new DocumentListener() { @Override public void documentChanged(@NotNull DocumentEvent e) { update(); } }; myRegExp.addDocumentListener(documentListener); mySampleText.addDocumentListener(documentListener); update(); mySampleText.selectAll(); } private void registerFocusShortcut(JComponent source, String shortcut, EditorTextField target) { AnAction action = new AnAction() { @Override public void actionPerformed(@NotNull AnActionEvent e) { IdeFocusManager.findInstance().requestFocus(target.getFocusTarget(), true); } }; action.registerCustomShortcutSet(CustomShortcutSet.fromString(shortcut), source); } private void update() { updater.cancelAllRequests(); if (!updater.isDisposed()) { updater.addRequest(() -> { final RegExpMatchResult result = isMatchingText(myRegexpFile, myRegExp.getText(), mySampleText.getText()); ApplicationManager.getApplication().invokeLater(() -> setBalloonState(result), ModalityState.any(), __ -> updater.isDisposed()); }, 0); } } @Override public void removeNotify() { super.removeNotify(); Disposer.dispose(disposable); PropertiesComponent.getInstance(myProject).setValue(LAST_EDITED_REGEXP, mySampleText.getText()); } }; myRootPanel.setBorder(JBUI.Borders.empty(UIUtil.DEFAULT_VGAP, UIUtil.DEFAULT_HGAP)); } void setBalloonState(RegExpMatchResult result) { mySampleText.setBackground(result == RegExpMatchResult.MATCHES ? BACKGROUND_COLOR_MATCH : BACKGROUND_COLOR_NOMATCH); switch (result) { case MATCHES: myMessage.setText("Matches!"); break; case NO_MATCH: myMessage.setText("No match"); break; case TIMEOUT: myMessage.setText("Pattern is too complex"); break; case BAD_REGEXP: myMessage.setText("Bad pattern"); break; case INCOMPLETE: myMessage.setText("More input expected"); break; default: throw new AssertionError(); } myRootPanel.revalidate(); Balloon balloon = JBPopupFactory.getInstance().getParentBalloonFor(myRootPanel); if (balloon != null && !balloon.isDisposed()) balloon.revalidate(); } @NotNull public JComponent getPreferredFocusedComponent() { return mySampleText; } @NotNull public JPanel getRootPanel() { return myRootPanel; } @TestOnly public static boolean isMatchingTextTest(@NotNull PsiFile regexpFile, @NotNull String sampleText) { return isMatchingText(regexpFile, regexpFile.getText(), sampleText) == RegExpMatchResult.MATCHES; } static RegExpMatchResult isMatchingText(@NotNull final PsiFile regexpFile, String regexpText, @NotNull String sampleText) { final Language regexpFileLanguage = regexpFile.getLanguage(); final RegExpMatcherProvider matcherProvider = RegExpMatcherProvider.EP.forLanguage(regexpFileLanguage); if (matcherProvider != null) { final RegExpMatchResult result = ReadAction.compute(() -> { final PsiLanguageInjectionHost host = InjectedLanguageUtil.findInjectionHost(regexpFile); if (host != null) { return matcherProvider.matches(regexpText, regexpFile, host, sampleText, 1000L); } return null; }); if (result != null) { return result; } } final Integer patternFlags = ReadAction.compute(() -> { final PsiLanguageInjectionHost host = InjectedLanguageUtil.findInjectionHost(regexpFile); int flags = 0; if (host != null) { for (RegExpModifierProvider provider : RegExpModifierProvider.EP.allForLanguage(host.getLanguage())) { flags = provider.getFlags(host, regexpFile); if (flags > 0) break; } } return flags; }); try { //noinspection MagicConstant final Matcher matcher = Pattern.compile(regexpText, patternFlags).matcher(StringUtil.newBombedCharSequence(sampleText, 1000)); if (matcher.matches()) { return RegExpMatchResult.MATCHES; } else if (matcher.hitEnd()) { return RegExpMatchResult.INCOMPLETE; } else { return RegExpMatchResult.NO_MATCH; } } catch (ProcessCanceledException ignore) { return RegExpMatchResult.TIMEOUT; } catch (Exception ignore) {} return RegExpMatchResult.BAD_REGEXP; } }
/** * Copyright 2017 Hortonworks. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package com.hortonworks.streamline.streams.service; import com.codahale.metrics.annotation.Timed; import com.hortonworks.streamline.common.QueryParam; import com.hortonworks.streamline.common.exception.service.exception.request.EntityNotFoundException; import com.hortonworks.streamline.common.util.WSUtils; import com.hortonworks.streamline.streams.catalog.Topology; import com.hortonworks.streamline.streams.catalog.TopologyProcessor; import com.hortonworks.streamline.streams.catalog.service.StreamCatalogService; import com.hortonworks.streamline.streams.security.Roles; import com.hortonworks.streamline.streams.security.SecurityUtil; import com.hortonworks.streamline.streams.security.StreamlineAuthorizer; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.SecurityContext; import javax.ws.rs.core.UriInfo; import java.util.Collection; import java.util.List; import static com.hortonworks.streamline.common.util.WSUtils.buildTopologyIdAndVersionIdAwareQueryParams; import static com.hortonworks.streamline.streams.security.Permission.READ; import static com.hortonworks.streamline.streams.security.Permission.WRITE; import static javax.ws.rs.core.Response.Status.CREATED; import static javax.ws.rs.core.Response.Status.OK; /** * Processor component within an StreamlineTopology */ @Path("/v1/catalog") @Produces(MediaType.APPLICATION_JSON) public class TopologyProcessorCatalogResource { private final StreamlineAuthorizer authorizer; private final StreamCatalogService catalogService; public TopologyProcessorCatalogResource(StreamlineAuthorizer authorizer, StreamCatalogService catalogService) { this.authorizer = authorizer; this.catalogService = catalogService; } /** * <p> * Lists all the processors in the topology or the ones matching specific query params. For example to * list all the processors in the topology, * </p> * <b>GET /api/v1/catalog/topologies/:TOPOLOGY_ID/processors</b> * <p> * <pre> * { * "responseCode": 1000, * "responseMessage": "Success", * "entities": [{ * "id": 1, * "topologyId": 1, * "name": "ParserProcessor", * "config": { * "properties": { * "parallelism": 1 * } * }, * "type": "PARSER", * "outputStreams": [{stream1 data..}, {stream2 data..}] * }] * } * </pre> */ @GET @Path("/topologies/{topologyId}/processors") @Timed public Response listTopologyProcessors(@PathParam("topologyId") Long topologyId, @Context UriInfo uriInfo, @Context SecurityContext securityContext) throws Exception { Long currentVersionId = catalogService.getCurrentVersionId(topologyId); return listTopologyProcessors( buildTopologyIdAndVersionIdAwareQueryParams(topologyId, currentVersionId, uriInfo), topologyId, securityContext); } @GET @Path("/topologies/{topologyId}/versions/{versionId}/processors") @Timed public Response listTopologyProcessorsForVersion(@PathParam("topologyId") Long topologyId, @PathParam("versionId") Long versionId, @Context UriInfo uriInfo, @Context SecurityContext securityContext) throws Exception { return listTopologyProcessors( buildTopologyIdAndVersionIdAwareQueryParams(topologyId, versionId, uriInfo), topologyId, securityContext); } private Response listTopologyProcessors(List<QueryParam> queryParams, Long topologyId, SecurityContext securityContext) throws Exception { SecurityUtil.checkRoleOrPermissions(authorizer, securityContext, Roles.ROLE_TOPOLOGY_USER, Topology.NAMESPACE, topologyId, READ); Collection<TopologyProcessor> sources = catalogService.listTopologyProcessors(queryParams); if (sources != null) { return WSUtils.respondEntities(sources, OK); } throw EntityNotFoundException.byFilter(queryParams.toString()); } /** * <p> * Gets the 'CURRENT' version of specific topology processor by Id. For example, * </p> * <b>GET /api/v1/catalog/topologies/:TOPOLOGY_ID/processors/:PROCESSOR_ID</b> * <pre> * { * "responseCode": 1000, * "responseMessage": "Success", * "entity": { * "id": 1, * "topologyId": 1, * "name": "ParserProcessor", * "config": { * "properties": { * "parallelism": 1 * } * }, * "type": "PARSER", * "outputStreams": [{stream1 data..}, {stream2 data..}] * } * } * </pre> */ @GET @Path("/topologies/{topologyId}/processors/{id}") @Timed public Response getTopologyProcessorById(@PathParam("topologyId") Long topologyId, @PathParam("id") Long processorId, @Context SecurityContext securityContext) { SecurityUtil.checkRoleOrPermissions(authorizer, securityContext, Roles.ROLE_TOPOLOGY_USER, Topology.NAMESPACE, topologyId, READ); TopologyProcessor source = catalogService.getTopologyProcessor(topologyId, processorId); if (source != null) { return WSUtils.respondEntity(source, OK); } throw EntityNotFoundException.byId(buildMessageForCompositeId(topologyId, processorId)); } @GET @Path("/topologies/{topologyId}/versions/{versionId}/processors/{id}") @Timed public Response getTopologyProcessorByIdAndVersion(@PathParam("topologyId") Long topologyId, @PathParam("id") Long processorId, @PathParam("versionId") Long versionId, @Context SecurityContext securityContext) { SecurityUtil.checkRoleOrPermissions(authorizer, securityContext, Roles.ROLE_TOPOLOGY_USER, Topology.NAMESPACE, topologyId, READ); TopologyProcessor processor = catalogService.getTopologyProcessor(topologyId, processorId, versionId); if (processor != null) { return WSUtils.respondEntity(processor, OK); } throw EntityNotFoundException.byVersion(buildMessageForCompositeId(topologyId, processorId), versionId.toString()); } /** * <p> * Creates a topology processor. For example, * </p> * <b>POST /api/v1/catalog/topologies/:TOPOLOGY_ID/processors</b> * <pre> * { * "name": "ParserProcessor", * "config": { * "properties": { * "parallelism": 1 * } * }, * "type": "PARSER", * "outputStreamIds": [1] * OR * "outputStreams" : [{stream1 data..}, {stream2 data..}] * } * </pre> * <i>Sample success response: </i> * <pre> * { * "responseCode": 1000, * "responseMessage": "Success", * "entity": { * "id": 1, * "topologyId": 1, * "name": "ParserProcessor", * "config": { * "properties": { * "parallelism": 1 * } * }, * "type": "PARSER", * "outputStreams": [{stream1 data..}, {stream2 data..}] * } * } * </pre> */ @POST @Path("/topologies/{topologyId}/processors") @Timed public Response addTopologyProcessor(@PathParam("topologyId") Long topologyId, TopologyProcessor topologyProcessor, @Context SecurityContext securityContext) { SecurityUtil.checkRoleOrPermissions(authorizer, securityContext, Roles.ROLE_TOPOLOGY_SUPER_ADMIN, Topology.NAMESPACE, topologyId, WRITE); TopologyProcessor createdProcessor = catalogService.addTopologyProcessor(topologyId, topologyProcessor); return WSUtils.respondEntity(createdProcessor, CREATED); } /** * <p>Updates a topology processor.</p> * <p> * <b>PUT /api/v1/catalog/topologies/:TOPOLOGY_ID/processors/:PROCESSOR_ID</b> * <pre> * { * "name": "ParserProcessor", * "config": { * "properties": { * "parallelism": 5 * } * }, * "type": "PARSER", * "outputStreamIds": [1] * } * </pre> * <i>Sample success response: </i> * <pre> * { * "responseCode": 1000, * "responseMessage": "Success", * "entity": { * "id": 1, * "topologyId": 1, * "name": "ParserProcessor", * "config": { * "properties": { * "parallelism": 5 * } * }, * "type": "PARSER", * "outputStreamIds": [1] * } * } * </pre> */ @PUT @Path("/topologies/{topologyId}/processors/{id}") @Timed public Response addOrUpdateTopologyProcessor(@PathParam("topologyId") Long topologyId, @PathParam("id") Long processorId, TopologyProcessor topologyProcessor, @Context SecurityContext securityContext) { SecurityUtil.checkRoleOrPermissions(authorizer, securityContext, Roles.ROLE_TOPOLOGY_SUPER_ADMIN, Topology.NAMESPACE, topologyId, WRITE); TopologyProcessor createdTopologyProcessor = catalogService.addOrUpdateTopologyProcessor( topologyId, processorId, topologyProcessor); return WSUtils.respondEntity(createdTopologyProcessor, CREATED); } /** * <p> * Removes a topology processor. * </p> * <b>DELETE /api/v1/catalog/topologies/:TOPOLOGY_ID/processors/:PROCESSOR_ID</b> * <pre> * { * "responseCode": 1000, * "responseMessage": "Success", * "entity": { * "id": 1, * "topologyId": 1, * "name": "ParserProcessor", * "config": { * "properties": { * "parallelism": 5 * } * }, * "type": "PARSER", * "outputStreamIds": [1] * } * } * </pre> */ @DELETE @Path("/topologies/{topologyId}/processors/{id}") @Timed public Response removeTopologyProcessor(@PathParam("topologyId") Long topologyId, @PathParam("id") Long processorId, @javax.ws.rs.QueryParam("removeEdges") boolean removeEdges, @Context SecurityContext securityContext) { SecurityUtil.checkRoleOrPermissions(authorizer, securityContext, Roles.ROLE_TOPOLOGY_SUPER_ADMIN, Topology.NAMESPACE, topologyId, WRITE); TopologyProcessor topologyProcessor = catalogService.removeTopologyProcessor(topologyId, processorId, removeEdges); if (topologyProcessor != null) { return WSUtils.respondEntity(topologyProcessor, OK); } throw EntityNotFoundException.byId(processorId.toString()); } private String buildMessageForCompositeId(Long topologyId, Long processorId) { return String.format("topology id <%d>, processor id <%d>", topologyId, processorId); } }
package com.rockwellcollins.atc.agree.analysis; import java.math.BigInteger; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import jkind.lustre.BinaryExpr; import jkind.lustre.BinaryOp; import jkind.lustre.BoolExpr; import jkind.lustre.Equation; import jkind.lustre.Expr; import jkind.lustre.IdExpr; import jkind.lustre.IfThenElseExpr; import jkind.lustre.IntExpr; import jkind.lustre.NamedType; import jkind.lustre.Node; import jkind.lustre.NodeCallExpr; import jkind.lustre.SubrangeIntType; import jkind.lustre.Type; import jkind.lustre.UnaryExpr; import jkind.lustre.UnaryOp; import jkind.lustre.VarDecl; public class AgreeCalendarUtils { static private String dfaName = null; static public Node getDFANode(String name, int synchrony){ if(synchrony <= 0){ throw new AgreeException("Attempt to use quasi-synchrony of value: "+synchrony); } dfaName = name; //VarDecl rVar = new VarDecl("_r", // new SubrangeIntType(BigInteger.valueOf(-synchrony), // BigInteger.valueOf(synchrony))); VarDecl rVar = new VarDecl("_r", NamedType.INT); IdExpr r = new IdExpr(rVar.id); VarDecl rIsBoundedVar = new VarDecl("_r_is_bounded", NamedType.BOOL); IdExpr rIsBounded = new IdExpr(rIsBoundedVar.id); VarDecl outVar = new VarDecl("_out", NamedType.BOOL); IdExpr out = new IdExpr(outVar.id); VarDecl clkVar0 = new VarDecl("_clk0", NamedType.BOOL); IdExpr p = new IdExpr(clkVar0.id); VarDecl clkVar1 = new VarDecl("_clk1", NamedType.BOOL); IdExpr q = new IdExpr(clkVar1.id); List<VarDecl> inputs = new ArrayList<>(); inputs.add(clkVar0); inputs.add(clkVar1); List<VarDecl> outputs = new ArrayList<>(); outputs.add(outVar); List<VarDecl> locals = new ArrayList<>(); locals.add(rVar); locals.add(rIsBoundedVar); Expr intZeroExpr = new IntExpr(BigInteger.ZERO); Expr intOneExpr = new IntExpr(BigInteger.ONE); Expr intNegOneExpr = new IntExpr(BigInteger.valueOf(-1)); Expr intSyncValExpr = new IntExpr(BigInteger.valueOf(synchrony)); Expr intNegSyncValxpr = new IntExpr(BigInteger.valueOf(-synchrony)); //(0 -> pre r) Expr rPreExpr = new BinaryExpr(new IntExpr(BigInteger.ZERO), BinaryOp.ARROW, new UnaryExpr(UnaryOp.PRE, r)); //(0 -> pre r) < 0 Expr rPreLTExpr = new BinaryExpr(rPreExpr, BinaryOp.LESS, intZeroExpr); //(0 -> pre r) > 0 Expr rPreGTExpr = new BinaryExpr(rPreExpr, BinaryOp.GREATER, intZeroExpr); //(0 -> pre r) + 1 Expr rPrePlus = new BinaryExpr(rPreExpr, BinaryOp.PLUS, intOneExpr); //(0 -> pre r) - 1 Expr rPreMinus = new BinaryExpr(rPreExpr, BinaryOp.MINUS, intOneExpr); //if (0 -> pre r) < 0 then 1 else ((0 -> pre r) + 1) Expr ifExpr0 = new IfThenElseExpr(rPreLTExpr, intOneExpr, rPrePlus); //if (0 -> pre r) > 0 then -1 else ((0 -> pre r) - 1) Expr ifExpr1 = new IfThenElseExpr(rPreGTExpr, intNegOneExpr, rPreMinus); //if q then (if (0 -> pre r) > 0 then -1 else ((0 -> pre r) - 1)) else (0 -> pre r) Expr ifExprClk1 = new IfThenElseExpr(q, ifExpr1, rPreExpr); //if p then (if (0 -> pre r) < 0 then 1 else ((0 -> pre r) + 1)) //else if q then (if (0 -> pre r) > 0 then -1 else ((0 -> pre r) - 1)) //else (0 -> pre r); Expr ifExprClk0 = new IfThenElseExpr(p, ifExpr0, ifExprClk1); //if p and q then 0 //else if p then (if (0 -> pre r) < 0 then 1 else ((0 -> pre r) + 1)) //else if q then (if (0 -> pre r) > 0 then -1 else ((0 -> pre r) - 1)) //else (0 -> pre r); Expr rExpr = new IfThenElseExpr(new BinaryExpr(p, BinaryOp.AND, q), intZeroExpr, ifExprClk0); //((0 -> pre r) >= 2 and p) Expr condExpr0 = new BinaryExpr(new BinaryExpr(rPreExpr, BinaryOp.GREATEREQUAL, intSyncValExpr), BinaryOp.AND, p); //((0 -> pre r) <= -2 and q) Expr condExpr1 = new BinaryExpr(new BinaryExpr(rPreExpr, BinaryOp.LESSEQUAL, intNegSyncValxpr), BinaryOp.AND, q); //not (((0 -> pre r) >= 2 and p) or ((0 -> pre r) <= -2 and q)) Expr outExpr = new UnaryExpr(UnaryOp.NOT, new BinaryExpr(condExpr0, BinaryOp.OR, condExpr1)); //r <= 2 and r >= -2 Expr rIsBoundedExpr = new BinaryExpr(new BinaryExpr(r, BinaryOp.LESSEQUAL, intSyncValExpr), BinaryOp.AND, new BinaryExpr(r, BinaryOp.GREATEREQUAL, intNegSyncValxpr)); List<Equation> equations = new ArrayList<>(); equations.add(new Equation(r, rExpr)); equations.add(new Equation(rIsBounded, rIsBoundedExpr)); equations.add(new Equation(out, outExpr)); List<String> properties = new ArrayList<>(); properties.add(rIsBounded.id); return new Node(dfaName, inputs, outputs, locals, equations, properties); } public static Node getExplicitCalendarNode(String nodeName, List<IdExpr> calendar, List<Expr> clocks) { //filter the calendar if some clocks are not present List<IdExpr> filteredCalendar = new ArrayList<>(); Map<String, List<Integer>> clockTickMap = new HashMap<>(); for(IdExpr calId : calendar){ for(Expr clockExpr : clocks){ IdExpr clockId = (IdExpr)clockExpr; if(calId.id.equals(clockId.id)){ filteredCalendar.add(clockId); break; } } } int i = 0; for(IdExpr clockId : filteredCalendar){ List<Integer> ticks = clockTickMap.get(clockId.id); if(ticks == null){ ticks = new ArrayList<>(); clockTickMap.put(clockId.id, ticks); } ticks.add(i++); } for(Expr clockExpr : clocks){ IdExpr clockId = (IdExpr)clockExpr; if(clockTickMap.get(clockId.id) == null){ throw new AgreeException("Clock Id '"+clockId.id+"' is not present in calendar statement"); } } //add all of the clocks to to the inputs of the node List<VarDecl> inputs = new ArrayList<>(); for(Expr clockExpr : clocks){ VarDecl input = new VarDecl(((IdExpr)clockExpr).id, NamedType.BOOL); inputs.add(input); } //the output is the variable asserting the calendar List<VarDecl> outputs = new ArrayList<>(); IdExpr outputAssert = new IdExpr("__CALENDAR_ASSERTION"); outputs.add(new VarDecl(outputAssert.id, NamedType.BOOL)); //create a variable that counts through the calendar elements List<VarDecl> locals = new ArrayList<>(); VarDecl clockCounterVar = new VarDecl("__CALANDER_COUNTER", NamedType.INT); locals.add(clockCounterVar); List<Equation> equations = new ArrayList<>(); //create the expression for the counter variable IdExpr clockCountId = new IdExpr(clockCounterVar.id); IntExpr calendarSize = new IntExpr(BigInteger.valueOf(filteredCalendar.size()-1)); Expr preClockCount = new UnaryExpr(UnaryOp.PRE, clockCountId); Expr preLast = new BinaryExpr(preClockCount, BinaryOp.EQUAL, calendarSize); Expr prePlus = new BinaryExpr(preClockCount, BinaryOp.PLUS, new IntExpr(BigInteger.ONE)); Expr ifClock = new IfThenElseExpr(preLast, new IntExpr(BigInteger.ZERO), prePlus); Expr clockArrow = new BinaryExpr(new IntExpr(BigInteger.ZERO), BinaryOp.ARROW, ifClock); Equation clockCountEq = new Equation(clockCountId, clockArrow); equations.add(clockCountEq); //create constraints for which calendar element is ticking Expr calendarConstraint = new BoolExpr(true); for(Expr clockExpr : clocks){ IdExpr clockId = (IdExpr)clockExpr; List<Integer> ticks = clockTickMap.get(clockId.id); Expr clockTicking = new BoolExpr(false); for(Integer tick : ticks){ Expr clockIsTickValue = new BinaryExpr(clockCountId, BinaryOp.EQUAL, new IntExpr(BigInteger.valueOf(tick.longValue()))); clockTicking = new BinaryExpr(clockTicking, BinaryOp.OR, clockIsTickValue); } Expr ifExpr = new IfThenElseExpr(clockTicking, clockId, new UnaryExpr(UnaryOp.NOT, clockId)); calendarConstraint = new BinaryExpr(calendarConstraint, BinaryOp.AND, ifExpr); } Equation outEq = new Equation(outputAssert, calendarConstraint); equations.add(outEq); return new Node(nodeName, inputs, outputs, locals, equations); } static public Node getCalendarNode(String name, int numClks){ if(dfaName == null){ throw new AgreeException("Each call to getCalendarNode must be preeceded by a call to getDFANode"); } Node calendarNode; Expr nodeExpr = null; String clkVarPrefix = "_clk_"; IdExpr outVar = new IdExpr("_out"); //make the inputs and outputs List<VarDecl> inputs = new ArrayList<>(); List<IdExpr> clks = new ArrayList<>(); for(int i = 0; i < numClks; i++){ inputs.add(new VarDecl(clkVarPrefix+i, NamedType.BOOL)); clks.add(new IdExpr(clkVarPrefix+i)); } List<VarDecl> outputs = new ArrayList<>(); outputs.add(new VarDecl(outVar.id, NamedType.BOOL)); for(int i = 0; i < clks.size()-1; i++){ Expr clk0 = clks.get(i); for(int j = i+1; j < clks.size(); j++){ Expr clk1 = clks.get(j); Expr dfaExpr = getDFAExpr(clk0, clk1); if(nodeExpr == null){ nodeExpr = dfaExpr; }else{ nodeExpr = new BinaryExpr(nodeExpr, BinaryOp.AND, dfaExpr); } } } Equation nodeEq = new Equation(outVar, nodeExpr); calendarNode = new Node(name, inputs, outputs, new ArrayList<VarDecl>(), Collections.singletonList(nodeEq)); dfaName = null; return calendarNode; } static public List<Equation> getAllClksHaveTicked(String name, String clkPref, List<Expr> clks){ Expr result = new BoolExpr(true); List<Equation> eqs = new ArrayList<>(); int i = 0; for(Expr clk : clks){ Equation clkHasTicked = getClkHasTicked(new IdExpr(clkPref+i), clk); result = new BinaryExpr(result, BinaryOp.AND, clkHasTicked.lhs.get(0)); eqs.add(clkHasTicked); i++; } eqs.add(new Equation(new IdExpr(name), result)); return eqs; } static public Equation getClkHasTicked(IdExpr clkTickedId, Expr clkExpr){ // clkTickedID = clkExpr -> clkExpr or pre clkTickedId Expr tickExpr = new BinaryExpr(clkExpr, BinaryOp.OR, new UnaryExpr(UnaryOp.PRE, clkTickedId)); return new Equation(clkTickedId, new BinaryExpr(clkExpr, BinaryOp.ARROW, tickExpr)); } static private Expr getDFAExpr(Expr clk0, Expr clk1){ return new NodeCallExpr(dfaName, clk0, clk1); } public static Node queueCircleNode(String nodeName, Type type, int queueSize){ List<VarDecl> inputs = new ArrayList<>(); List<VarDecl> outputs = new ArrayList<>(); List<VarDecl> locals = new ArrayList<>(); List<IdExpr> els = new ArrayList<>(); List<Equation> eqs = new ArrayList<>(); String elBase = "el"; IdExpr elemIn = new IdExpr("el_in"); IdExpr elemOut = new IdExpr("el_out"); IdExpr insert = new IdExpr("insert"); IdExpr remove = new IdExpr("remove"); IdExpr output = new IdExpr("output"); IdExpr input = new IdExpr("input"); IdExpr numEls = new IdExpr("num_els"); inputs.add(new VarDecl(input.id, type)); inputs.add(new VarDecl(elemIn.id, NamedType.BOOL)); inputs.add(new VarDecl(elemOut.id, NamedType.BOOL)); outputs.add(new VarDecl(numEls.id, NamedType.INT)); outputs.add(new VarDecl(output.id, type)); locals.add(new VarDecl(insert.id, NamedType.INT)); locals.add(new VarDecl(remove.id, NamedType.INT)); for(int i = 0 ; i < queueSize; i++){ IdExpr el = new IdExpr(elBase+i); els.add(el); locals.add(new VarDecl(el.id, type)); } //equations for insert Expr preInsert = new UnaryExpr(UnaryOp.PRE, insert); Expr preInsertMore = new BinaryExpr(preInsert, BinaryOp.PLUS, new IntExpr(BigInteger.ONE)); Expr insertIf0 = new IfThenElseExpr(elemIn, preInsertMore, preInsert); Expr insertIfCond = new BinaryExpr(preInsert, BinaryOp.EQUAL, new IntExpr(BigInteger.valueOf(queueSize-1))); insertIfCond = new BinaryExpr(elemIn, BinaryOp.AND, insertIfCond); Expr insertIf1 = new IfThenElseExpr(insertIfCond, new IntExpr(BigInteger.ZERO), insertIf0); Expr insertIf2 = new IfThenElseExpr(elemIn, new IntExpr(BigInteger.ONE), new IntExpr(BigInteger.ZERO)); Expr insertExpr = new BinaryExpr(insertIf2, BinaryOp.ARROW, insertIf1); Equation insertEq = new Equation(insert, insertExpr); eqs.add(insertEq); //equations for remove Expr preRemove = new UnaryExpr(UnaryOp.PRE, remove); Expr preRemoveMore = new BinaryExpr(preRemove, BinaryOp.PLUS, new IntExpr(BigInteger.ONE)); Expr removeIf0 = new IfThenElseExpr(elemOut, preRemoveMore, preRemove); Expr removeIfCond = new BinaryExpr(preRemove, BinaryOp.EQUAL, new IntExpr(BigInteger.valueOf(queueSize-1))); removeIfCond = new BinaryExpr(elemOut, BinaryOp.AND, removeIfCond); Expr removeExpr = new IfThenElseExpr(removeIfCond, new IntExpr(BigInteger.ZERO), removeIf0); removeExpr = new BinaryExpr(new IntExpr(BigInteger.ZERO), BinaryOp.ARROW, removeExpr); Equation removeEq = new Equation(remove, removeExpr); eqs.add(removeEq); Expr preElemIn = new UnaryExpr(UnaryOp.PRE, elemIn); Expr preElemOut = new UnaryExpr(UnaryOp.PRE, elemOut); Expr preNumEls = new UnaryExpr(UnaryOp.PRE, numEls); Expr preNumElsMore = new BinaryExpr(preNumEls, BinaryOp.PLUS, new IntExpr(BigInteger.ONE)); Expr preNumElsLessExpr = new BinaryExpr(preNumEls, BinaryOp.MINUS, new IntExpr(BigInteger.ONE)); Expr numElsIf0 = new IfThenElseExpr(preElemIn, preNumElsMore, preNumEls); Expr numElsExpr = new IfThenElseExpr(preElemOut, preNumElsLessExpr, numElsIf0); numElsExpr = new BinaryExpr(new IntExpr(BigInteger.ZERO), BinaryOp.ARROW, numElsExpr); Equation numElsEq = new Equation(numEls, numElsExpr); eqs.add(numElsEq); //equation for the output Expr outputExpr = els.get(queueSize-1); for(int i = 0; i < queueSize-1; i++){ Expr cond = new BinaryExpr(preRemove, BinaryOp.EQUAL, new IntExpr(BigInteger.valueOf(i))); outputExpr = new IfThenElseExpr(cond, els.get(i), outputExpr); } outputExpr = new BinaryExpr(els.get(0), BinaryOp.ARROW, outputExpr); Equation outputEq = new Equation(output, outputExpr); eqs.add(outputEq); //equations for each queue element for(int i = 0; i < queueSize; i++){ Expr preEl = new UnaryExpr(UnaryOp.PRE, els.get(i)); Expr cond = new UnaryExpr(UnaryOp.PRE, insert); cond = new BinaryExpr(cond, BinaryOp.EQUAL, new IntExpr(BigInteger.valueOf(i))); cond = new BinaryExpr(elemIn, BinaryOp.AND, cond); Expr elemIfExpr = new IfThenElseExpr(cond, input, preEl); Expr elExpr = new BinaryExpr(input, BinaryOp.ARROW, elemIfExpr); Equation elEq = new Equation(els.get(i), elExpr); eqs.add(elEq); } //queue properties: List<String> props = new ArrayList<>(); //don't remove more than are present: //Expr propExpr0 = new BinaryExpr(preRemove, BinaryOp.EQUAL, preInsert); //Expr propExpr1 = new BinaryExpr(remove, BinaryOp.EQUAL, preRemove); //Expr propImpl = new BinaryExpr(propExpr0, BinaryOp.IMPLIES, propExpr1); //Expr propArrow = new BinaryExpr(remove, BinaryOp.LESSEQUAL, insert); //propArrow = new BinaryExpr(propArrow, BinaryOp.ARROW, propImpl); Expr propExpr = new BinaryExpr(numEls, BinaryOp.GREATEREQUAL, new IntExpr(BigInteger.ZERO)); IdExpr propId0 = new IdExpr("__REMOVE_LTE_INSERT_"+nodeName); locals.add(new VarDecl(propId0.id, NamedType.BOOL)); Equation propEq0 = new Equation(propId0, propExpr); eqs.add(propEq0); props.add(propId0.id); return new Node(nodeName, inputs, outputs, locals, eqs, props); } public static Node queueNode(String nodeName, Type type, int queueSize){ List<VarDecl> inputs = new ArrayList<>(); List<VarDecl> outputs = new ArrayList<>(); List<VarDecl> locals = new ArrayList<>(); List<IdExpr> els = new ArrayList<>(); List<Equation> eqs = new ArrayList<>(); String elBase = "el"; IdExpr elemIn = new IdExpr("el_in"); IdExpr elemOut = new IdExpr("el_out"); IdExpr insert = new IdExpr("insert"); IdExpr output = new IdExpr("output"); IdExpr input = new IdExpr("input"); IdExpr numEls = new IdExpr("num_els"); inputs.add(new VarDecl(input.id, type)); inputs.add(new VarDecl(elemIn.id, NamedType.BOOL)); inputs.add(new VarDecl(elemOut.id, NamedType.BOOL)); outputs.add(new VarDecl(numEls.id, NamedType.INT)); outputs.add(new VarDecl(output.id, type)); locals.add(new VarDecl(insert.id, NamedType.INT)); //add an extra "dummy element" for handling too many inserts for(int i = 0 ; i < queueSize + 1; i++){ IdExpr el = new IdExpr(elBase+i); els.add(el); locals.add(new VarDecl(el.id, type)); } //equation for insert Expr preElemIn = new UnaryExpr(UnaryOp.PRE, elemIn); Expr preElemOut = new UnaryExpr(UnaryOp.PRE, elemOut); Expr preInsert = new UnaryExpr(UnaryOp.PRE, insert); Expr preInsertMore = new BinaryExpr(preInsert, BinaryOp.PLUS, new IntExpr(BigInteger.ONE)); Expr preInsertLess = new BinaryExpr(preInsert, BinaryOp.MINUS, new IntExpr(BigInteger.ONE)); Expr insertIf0 = new IfThenElseExpr(preElemIn, preInsertMore, preInsert); Expr insertIf1 = new IfThenElseExpr(preElemOut, preInsertLess, insertIf0); Expr insertExpr = new BinaryExpr(new IntExpr(BigInteger.ZERO), BinaryOp.ARROW, insertIf1); Equation insertEq = new Equation(insert, insertExpr); eqs.add(insertEq); //equation for numEls Expr preNumEls = new UnaryExpr(UnaryOp.PRE, numEls); Expr preNumElsMore = new BinaryExpr(preNumEls, BinaryOp.PLUS, new IntExpr(BigInteger.ONE)); Expr preNumElsLessExpr = new BinaryExpr(preNumEls, BinaryOp.MINUS, new IntExpr(BigInteger.ONE)); Expr numElsIf0 = new IfThenElseExpr(preElemIn, preNumElsMore, preNumEls); Expr numElsExpr = new IfThenElseExpr(preElemOut, preNumElsLessExpr, numElsIf0); numElsExpr = new BinaryExpr(new IntExpr(BigInteger.ZERO), BinaryOp.ARROW, numElsExpr); Equation numElsEq = new Equation(numEls, numElsExpr); eqs.add(numElsEq); //equation for the output Equation outputEq = new Equation(output, new IdExpr(elBase+0)); eqs.add(outputEq); //equations for each queue element Expr preInput = new UnaryExpr(UnaryOp.PRE, input); for(int i = 0; i < queueSize; i++){ Expr preEl = new UnaryExpr(UnaryOp.PRE, els.get(i)); Expr cond = new UnaryExpr(UnaryOp.PRE, insert); cond = new BinaryExpr(cond, BinaryOp.EQUAL, new IntExpr(BigInteger.valueOf(i))); cond = new BinaryExpr(preElemIn, BinaryOp.AND, cond); Expr elemIf0 = new IfThenElseExpr(cond, preInput, preEl); Expr elemIf1 = new IfThenElseExpr(preElemOut, els.get(i+1), elemIf0); Expr elExpr = new BinaryExpr(input, BinaryOp.ARROW, elemIf1); Equation elEq = new Equation(els.get(i), elExpr); eqs.add(elEq); } //special case for the dummy element Equation elEq = new Equation(els.get(queueSize), input); eqs.add(elEq); //queue properties: List<String> props = new ArrayList<>(); //don't remove more than are present: //Expr propExpr0 = new BinaryExpr(preRemove, BinaryOp.EQUAL, preInsert); //Expr propExpr1 = new BinaryExpr(remove, BinaryOp.EQUAL, preRemove); //Expr propImpl = new BinaryExpr(propExpr0, BinaryOp.IMPLIES, propExpr1); //Expr propArrow = new BinaryExpr(remove, BinaryOp.LESSEQUAL, insert); //propArrow = new BinaryExpr(propArrow, BinaryOp.ARROW, propImpl); Expr propExpr = new BinaryExpr(numEls, BinaryOp.GREATEREQUAL, new IntExpr(BigInteger.ZERO)); IdExpr propId0 = new IdExpr("__REMOVE_LTE_INSERT_"+nodeName); locals.add(new VarDecl(propId0.id, NamedType.BOOL)); Equation propEq0 = new Equation(propId0, propExpr); eqs.add(propEq0); props.add(propId0.id); return new Node(nodeName, inputs, outputs, locals, eqs, props); } public static Node queueMultiplexNode(String nodeName, Type type, int numInputs){ List<VarDecl> inputs = new ArrayList<>(); List<VarDecl> outputs = new ArrayList<>(); List<VarDecl> locals = new ArrayList<>(); List<IdExpr> clks = new ArrayList<>(); List<IdExpr> ins = new ArrayList<>(); List<Equation> eqs = new ArrayList<>(); for(int i = 0; i < numInputs; i++){ IdExpr inId = new IdExpr("in"+i); IdExpr clkId = new IdExpr("out"+i); ins.add(inId); clks.add(clkId); inputs.add(new VarDecl(inId.id, type)); inputs.add(new VarDecl(clkId.id, NamedType.BOOL)); } IdExpr output = new IdExpr("output"); outputs.add(new VarDecl(output.id, type)); //the output expression Expr outExpr = ins.get(0); // just an arbitrary value for(int i = 0; i < numInputs; i++){ outExpr = new IfThenElseExpr(clks.get(i), ins.get(i), outExpr); } Equation outEq = new Equation(output, outExpr); eqs.add(outEq); return new Node(nodeName, inputs, outputs, locals, eqs); } public static Expr getSingleTick(List<Expr> clocks) { Expr returnExpr = new BoolExpr(false); for(Expr clock0 : clocks){ Expr tickExpr = clock0; for(Expr clock1: clocks){ if(clock0 != clock1){ Expr notClock1 = new UnaryExpr(UnaryOp.NOT, clock1); tickExpr = new BinaryExpr(tickExpr, BinaryOp.AND, notClock1); } } returnExpr = new BinaryExpr(tickExpr, BinaryOp.OR, returnExpr); } return returnExpr; } }
package com.evolveum.midpoint.provisioning.ucf.impl; import java.util.Collection; import java.util.HashMap; import java.util.Map; import javax.xml.namespace.QName; import org.apache.commons.lang.StringUtils; import org.identityconnectors.framework.common.objects.AttributeInfo; import org.identityconnectors.framework.common.objects.Name; import org.identityconnectors.framework.common.objects.ObjectClass; import org.identityconnectors.framework.common.objects.OperationalAttributeInfos; import org.identityconnectors.framework.common.objects.Uid; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.schema.processor.ObjectClassComplexTypeDefinition; import com.evolveum.midpoint.schema.processor.ResourceAttribute; import com.evolveum.midpoint.schema.processor.ResourceAttributeContainer; import com.evolveum.midpoint.schema.processor.ResourceAttributeContainerDefinition; import com.evolveum.midpoint.schema.processor.ResourceAttributeDefinition; import com.evolveum.midpoint.schema.processor.ResourceSchema; import com.evolveum.midpoint.schema.util.ObjectTypeUtil; import com.evolveum.midpoint.schema.util.ShadowUtil; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.xml.ns._public.common.common_3.ConnectorType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType; public class IcfNameMapper { private static final String CUSTOM_OBJECTCLASS_PREFIX = "Custom"; private static final String CUSTOM_OBJECTCLASS_SUFFIX = "ObjectClass"; private static final Map<String,QName> specialAttributeMapIcf = new HashMap<String,QName>(); private static final Map<QName,String> specialAttributeMapMp = new HashMap<QName,String>(); private ResourceSchema resourceSchema = null; // Used when there is no schema (schemaless resource) private String resourceSchemaNamespace = null; public IcfNameMapper(String resourceSchemaNamespace) { super(); this.resourceSchemaNamespace = resourceSchemaNamespace; } public ResourceSchema getResourceSchema() { return resourceSchema; } public void setResourceSchema(ResourceSchema resourceSchema) { this.resourceSchema = resourceSchema; if (resourceSchema != null) { resourceSchemaNamespace = resourceSchema.getNamespace(); } } private static void initialize() { addSpecialAttributeMapping(Name.NAME, ConnectorFactoryIcfImpl.ICFS_NAME); addSpecialAttributeMapping(Uid.NAME, ConnectorFactoryIcfImpl.ICFS_UID); addOperationalAttributeMapping(OperationalAttributeInfos.CURRENT_PASSWORD); addOperationalAttributeMapping(OperationalAttributeInfos.DISABLE_DATE); addOperationalAttributeMapping(OperationalAttributeInfos.ENABLE); addOperationalAttributeMapping(OperationalAttributeInfos.ENABLE_DATE); addOperationalAttributeMapping(OperationalAttributeInfos.LOCK_OUT); addOperationalAttributeMapping(OperationalAttributeInfos.PASSWORD); addOperationalAttributeMapping(OperationalAttributeInfos.PASSWORD_EXPIRATION_DATE); addOperationalAttributeMapping(OperationalAttributeInfos.PASSWORD_EXPIRED); addOperationalAttributeMapping(SecretIcfOperationalAttributes.DESCRIPTION); addOperationalAttributeMapping(SecretIcfOperationalAttributes.GROUPS); addOperationalAttributeMapping(SecretIcfOperationalAttributes.LAST_LOGIN_DATE); } private static void addSpecialAttributeMapping(String icfName, QName qname) { specialAttributeMapIcf.put(icfName, qname); specialAttributeMapMp.put(qname, icfName); } private static void addOperationalAttributeMapping( SecretIcfOperationalAttributes opAttr) { addOperationalAttributeMapping(opAttr.getName()); } private static void addOperationalAttributeMapping(AttributeInfo attrInfo) { addOperationalAttributeMapping(attrInfo.getName()); } private static void addOperationalAttributeMapping(String icfName) { QName qName = convertUnderscoreAttributeNameToQName(icfName); addSpecialAttributeMapping(icfName, qName); } public QName convertAttributeNameToQName(String icfAttrName, ResourceAttributeContainerDefinition attributesContainerDefinition) { return convertAttributeNameToQName(icfAttrName, attributesContainerDefinition.getComplexTypeDefinition()); } public QName convertAttributeNameToQName(String icfAttrName, ObjectClassComplexTypeDefinition ocDef) { if (specialAttributeMapIcf.containsKey(icfAttrName)) { for (ResourceAttributeDefinition attributeDefinition: ocDef.getAttributeDefinitions()) { if (icfAttrName.equals(attributeDefinition.getFrameworkAttributeName())) { return attributeDefinition.getName(); } } // fallback, compatibility return specialAttributeMapIcf.get(icfAttrName); } QName attrXsdName = new QName(resourceSchemaNamespace, icfAttrName, ConnectorFactoryIcfImpl.NS_ICF_RESOURCE_INSTANCE_PREFIX); return attrXsdName; } public QName convertAttributeNameToQName(String icfAttrName, ResourceAttributeDefinition attrDef) { if (specialAttributeMapIcf.containsKey(icfAttrName)) { if (icfAttrName.equals(attrDef.getFrameworkAttributeName())) { return attrDef.getName(); } // fallback, compatibility return specialAttributeMapIcf.get(icfAttrName); } return attrDef.getName(); } public String convertAttributeNameToIcf(ResourceAttribute<?> attribute, ObjectClassComplexTypeDefinition ocDef) throws SchemaException { ResourceAttributeDefinition attrDef = attribute.getDefinition(); if (attrDef == null) { attrDef = ocDef.findAttributeDefinition(attribute.getElementName()); if (attrDef == null) { throw new SchemaException("No attribute "+attribute.getElementName()+" in object class "+ocDef.getTypeName()); } } return convertAttributeNameToIcf(attrDef); } public <T> String convertAttributeNameToIcf(QName attributeName, ObjectClassComplexTypeDefinition ocDef, String desc) throws SchemaException { ResourceAttributeDefinition<T> attrDef = ocDef.findAttributeDefinition(attributeName); if (attrDef == null) { throw new SchemaException("No attribute "+attributeName+" in object class "+ocDef.getTypeName() + " " + desc); } return convertAttributeNameToIcf(attrDef); } public String convertAttributeNameToIcf(ResourceAttributeDefinition<?> attrDef) throws SchemaException { if (attrDef.getFrameworkAttributeName() != null) { return attrDef.getFrameworkAttributeName(); } QName attrQName = attrDef.getName(); if (specialAttributeMapMp.containsKey(attrQName)) { return specialAttributeMapMp.get(attrQName); } if (!attrQName.getNamespaceURI().equals(resourceSchemaNamespace)) { throw new SchemaException("No mapping from QName " + attrQName + " to an ICF attribute in resource schema namespace: " + resourceSchemaNamespace); } return attrQName.getLocalPart(); } private boolean isUnderscoreSyntax(String icfAttrName) { return icfAttrName.startsWith("__") && icfAttrName.endsWith("__"); } private static QName convertUnderscoreAttributeNameToQName(String icfAttrName) { // Strip leading and trailing underscores String inside = icfAttrName.substring(2, icfAttrName.length()-2); StringBuilder sb = new StringBuilder(); int lastIndex = 0; while (true) { int nextIndex = inside.indexOf("_", lastIndex); if (nextIndex < 0) { String upcase = inside.substring(lastIndex, inside.length()); sb.append(toCamelCase(upcase, lastIndex == 0)); break; } String upcase = inside.substring(lastIndex, nextIndex); sb.append(toCamelCase(upcase, lastIndex == 0)); lastIndex = nextIndex + 1; } return new QName(ConnectorFactoryIcfImpl.NS_ICF_SCHEMA, sb.toString()); } private static String toCamelCase(String upcase, boolean lowCase) { if (lowCase) { return StringUtils.lowerCase(upcase); } else { return StringUtils.capitalize(StringUtils.lowerCase(upcase)); } } /** * Maps ICF native objectclass name to a midPoint QName objctclass name. * <p/> * The mapping is "stateless" - it does not keep any mapping database or any * other state. There is a bi-directional mapping algorithm. * <p/> * TODO: mind the special characters in the ICF objectclass names. */ public QName objectClassToQname(ObjectClass icfObjectClass, String schemaNamespace, boolean legacySchema) { if (icfObjectClass == null) { return null; } if (icfObjectClass.is(ObjectClass.ALL_NAME)) { return null; } if (legacySchema) { if (icfObjectClass.is(ObjectClass.ACCOUNT_NAME)) { return new QName(schemaNamespace, ConnectorFactoryIcfImpl.ACCOUNT_OBJECT_CLASS_LOCAL_NAME, ConnectorFactoryIcfImpl.NS_ICF_SCHEMA_PREFIX); } else if (icfObjectClass.is(ObjectClass.GROUP_NAME)) { return new QName(schemaNamespace, ConnectorFactoryIcfImpl.GROUP_OBJECT_CLASS_LOCAL_NAME, ConnectorFactoryIcfImpl.NS_ICF_SCHEMA_PREFIX); } else { return new QName(schemaNamespace, CUSTOM_OBJECTCLASS_PREFIX + icfObjectClass.getObjectClassValue() + CUSTOM_OBJECTCLASS_SUFFIX, ConnectorFactoryIcfImpl.NS_ICF_RESOURCE_INSTANCE_PREFIX); } } else { return new QName(schemaNamespace, icfObjectClass.getObjectClassValue()); } } public ObjectClass objectClassToIcf(PrismObject<? extends ShadowType> shadow, String schemaNamespace, ConnectorType connectorType, boolean legacySchema) { ShadowType shadowType = shadow.asObjectable(); QName qnameObjectClass = shadowType.getObjectClass(); if (qnameObjectClass == null) { ResourceAttributeContainer attrContainer = ShadowUtil .getAttributesContainer(shadowType); if (attrContainer == null) { return null; } ResourceAttributeContainerDefinition objectClassDefinition = attrContainer.getDefinition(); qnameObjectClass = objectClassDefinition.getTypeName(); } return objectClassToIcf(qnameObjectClass, schemaNamespace, connectorType, legacySchema); } /** * Maps a midPoint QName objctclass to the ICF native objectclass name. * <p/> * The mapping is "stateless" - it does not keep any mapping database or any * other state. There is a bi-directional mapping algorithm. * <p/> * TODO: mind the special characters in the ICF objectclass names. */ public ObjectClass objectClassToIcf(ObjectClassComplexTypeDefinition objectClassDefinition, String schemaNamespace, ConnectorType connectorType, boolean legacySchema) { QName qnameObjectClass = objectClassDefinition.getTypeName(); return objectClassToIcf(qnameObjectClass, schemaNamespace, connectorType, legacySchema); } public ObjectClass objectClassToIcf(QName qnameObjectClass, String schemaNamespace, ConnectorType connectorType, boolean legacySchema) { if (!schemaNamespace.equals(qnameObjectClass.getNamespaceURI())) { throw new IllegalArgumentException("ObjectClass QName " + qnameObjectClass + " is not in the appropriate namespace for " + connectorType + ", expected: " + schemaNamespace); } String lname = qnameObjectClass.getLocalPart(); if (legacySchema) { if (ConnectorFactoryIcfImpl.ACCOUNT_OBJECT_CLASS_LOCAL_NAME.equals(lname)) { return ObjectClass.ACCOUNT; } else if (ConnectorFactoryIcfImpl.GROUP_OBJECT_CLASS_LOCAL_NAME.equals(lname)) { return ObjectClass.GROUP; } else if (lname.startsWith(CUSTOM_OBJECTCLASS_PREFIX) && lname.endsWith(CUSTOM_OBJECTCLASS_SUFFIX)) { String icfObjectClassName = lname.substring(CUSTOM_OBJECTCLASS_PREFIX.length(), lname.length() - CUSTOM_OBJECTCLASS_SUFFIX.length()); return new ObjectClass(icfObjectClassName); } else { throw new IllegalArgumentException("Cannot recognize objectclass QName " + qnameObjectClass + " for " + ObjectTypeUtil.toShortString(connectorType) + ", expected: " + schemaNamespace); } } else { return new ObjectClass(lname); } } static { initialize(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import org.apache.lucene.document.LatLonPoint; import org.apache.lucene.geo.Polygon; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.mapper.GeoPointFieldMapper.GeoPointFieldType; import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Objects; public class GeoPolygonQueryBuilder extends AbstractQueryBuilder<GeoPolygonQueryBuilder> { public static final String NAME = "geo_polygon"; /** * The default value for ignore_unmapped. */ public static final boolean DEFAULT_IGNORE_UNMAPPED = false; private static final ParseField VALIDATION_METHOD = new ParseField("validation_method"); private static final ParseField POINTS_FIELD = new ParseField("points"); private static final ParseField IGNORE_UNMAPPED_FIELD = new ParseField("ignore_unmapped"); private final String fieldName; private final List<GeoPoint> shell; private GeoValidationMethod validationMethod = GeoValidationMethod.DEFAULT; private boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED; public GeoPolygonQueryBuilder(String fieldName, List<GeoPoint> points) { if (Strings.isEmpty(fieldName)) { throw new IllegalArgumentException("fieldName must not be null"); } if (points == null || points.isEmpty()) { throw new IllegalArgumentException("polygon must not be null or empty"); } else { GeoPoint start = points.get(0); if (start.equals(points.get(points.size() - 1))) { if (points.size() < 4) { throw new IllegalArgumentException("too few points defined for geo_polygon query"); } } else { if (points.size() < 3) { throw new IllegalArgumentException("too few points defined for geo_polygon query"); } } } this.fieldName = fieldName; this.shell = new ArrayList<>(points); if (!shell.get(shell.size() - 1).equals(shell.get(0))) { shell.add(shell.get(0)); } } /** * Read from a stream. */ public GeoPolygonQueryBuilder(StreamInput in) throws IOException { super(in); fieldName = in.readString(); int size = in.readVInt(); shell = new ArrayList<>(size); for (int i = 0; i < size; i++) { shell.add(in.readGeoPoint()); } validationMethod = GeoValidationMethod.readFromStream(in); ignoreUnmapped = in.readBoolean(); } @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(fieldName); out.writeVInt(shell.size()); for (GeoPoint point : shell) { out.writeGeoPoint(point); } validationMethod.writeTo(out); out.writeBoolean(ignoreUnmapped); } public String fieldName() { return fieldName; } public List<GeoPoint> points() { return shell; } /** Sets the validation method to use for geo coordinates. */ public GeoPolygonQueryBuilder setValidationMethod(GeoValidationMethod method) { this.validationMethod = method; return this; } /** Returns the validation method to use for geo coordinates. */ public GeoValidationMethod getValidationMethod() { return this.validationMethod; } /** * Sets whether the query builder should ignore unmapped fields (and run a * {@link MatchNoDocsQuery} in place of this query) or throw an exception if * the field is unmapped. */ public GeoPolygonQueryBuilder ignoreUnmapped(boolean ignoreUnmapped) { this.ignoreUnmapped = ignoreUnmapped; return this; } /** * Gets whether the query builder will ignore unmapped fields (and run a * {@link MatchNoDocsQuery} in place of this query) or throw an exception if * the field is unmapped. */ public boolean ignoreUnmapped() { return ignoreUnmapped; } @Override protected Query doToQuery(QueryShardContext context) throws IOException { MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType == null) { if (ignoreUnmapped) { return new MatchNoDocsQuery(); } else { throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]"); } } if (!(fieldType instanceof GeoPointFieldType)) { throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field"); } List<GeoPoint> shell = new ArrayList<>(this.shell.size()); for (GeoPoint geoPoint : this.shell) { shell.add(new GeoPoint(geoPoint)); } final int shellSize = shell.size(); // validation was not available prior to 2.x, so to support bwc // percolation queries we only ignore_malformed on 2.x created indexes if (!GeoValidationMethod.isIgnoreMalformed(validationMethod)) { for (GeoPoint point : shell) { if (!GeoUtils.isValidLatitude(point.lat())) { throw new QueryShardException(context, "illegal latitude value [{}] for [{}]", point.lat(), GeoPolygonQueryBuilder.NAME); } if (!GeoUtils.isValidLongitude(point.lon())) { throw new QueryShardException(context, "illegal longitude value [{}] for [{}]", point.lon(), GeoPolygonQueryBuilder.NAME); } } } if (GeoValidationMethod.isCoerce(validationMethod)) { for (GeoPoint point : shell) { GeoUtils.normalizePoint(point, true, true); } } double[] lats = new double[shellSize]; double[] lons = new double[shellSize]; GeoPoint p; for (int i=0; i<shellSize; ++i) { p = shell.get(i); lats[i] = p.lat(); lons[i] = p.lon(); } return LatLonPoint.newPolygonQuery(fieldType.name(), new Polygon(lats, lons)); } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); builder.startObject(fieldName); builder.startArray(POINTS_FIELD.getPreferredName()); for (GeoPoint point : shell) { builder.startArray().value(point.lon()).value(point.lat()).endArray(); } builder.endArray(); builder.endObject(); builder.field(VALIDATION_METHOD.getPreferredName(), validationMethod); builder.field(IGNORE_UNMAPPED_FIELD.getPreferredName(), ignoreUnmapped); printBoostAndQueryName(builder); builder.endObject(); } public static GeoPolygonQueryBuilder fromXContent(XContentParser parser) throws IOException { String fieldName = null; List<GeoPoint> shell = null; Float boost = null; GeoValidationMethod validationMethod = null; String queryName = null; String currentFieldName = null; XContentParser.Token token; boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { fieldName = currentFieldName; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_ARRAY) { if (POINTS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { shell = new ArrayList<>(); while ((token = parser.nextToken()) != Token.END_ARRAY) { shell.add(GeoUtils.parseGeoPoint(parser)); } } else { throw new ParsingException(parser.getTokenLocation(), "[geo_polygon] query does not support [" + currentFieldName + "]"); } } else { throw new ParsingException(parser.getTokenLocation(), "[geo_polygon] query does not support token type [" + token.name() + "] under [" + currentFieldName + "]"); } } } else if (token.isValue()) { if ("_name".equals(currentFieldName)) { queryName = parser.text(); } else if ("boost".equals(currentFieldName)) { boost = parser.floatValue(); } else if (IGNORE_UNMAPPED_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { ignoreUnmapped = parser.booleanValue(); } else if (VALIDATION_METHOD.match(currentFieldName, parser.getDeprecationHandler())) { validationMethod = GeoValidationMethod.fromString(parser.text()); } else { throw new ParsingException(parser.getTokenLocation(), "[geo_polygon] query does not support [" + currentFieldName + "]"); } } else { throw new ParsingException(parser.getTokenLocation(), "[geo_polygon] unexpected token type [" + token.name() + "]"); } } GeoPolygonQueryBuilder builder = new GeoPolygonQueryBuilder(fieldName, shell); if (validationMethod != null) { // if GeoValidationMethod was explicitly set ignore deprecated coerce and ignoreMalformed settings builder.setValidationMethod(validationMethod); } if (queryName != null) { builder.queryName(queryName); } if (boost != null) { builder.boost(boost); } builder.ignoreUnmapped(ignoreUnmapped); return builder; } @Override protected boolean doEquals(GeoPolygonQueryBuilder other) { return Objects.equals(validationMethod, other.validationMethod) && Objects.equals(fieldName, other.fieldName) && Objects.equals(shell, other.shell) && Objects.equals(ignoreUnmapped, other.ignoreUnmapped); } @Override protected int doHashCode() { return Objects.hash(validationMethod, fieldName, shell, ignoreUnmapped); } @Override public String getWriteableName() { return NAME; } }
package com.github.texxel.data; import com.github.texxel.data.exceptions.MissingDataException; import com.github.texxel.data.exceptions.WrongTypeException; import java.util.*; /** * <p>PData (primitive data) is the link between the complexity of {@link DataIn}/{@link DataOut} and conversion to a * text file. The Data class knows how to convert itself so it only contains primitive data. A {@link DataLoader} then * provides the link between strings and PData.</p> * * <p>If a getXXX() method is asked for something that does not exist, then it will throw a MissingDataException. If the * data does exist but is of the incorrect type, then a WrongTypeException will be thrown</p> * * <p>Null values in PData is supported. Null values will be taken as Type.NULL</p> */ public final class PData { /** * All the primitive types that might be in a PData */ public enum Type { DOUBLE, LONG, STRING, BOOLEAN, DATA, NULL; } final Map<String, Object> data = new HashMap<>(); final Set<String> publicKeys = Collections.unmodifiableSet(data.keySet()); /** always throws an exception. The method determine the cause of the error and throw a tasteful error message */ private <T> T crash(String key, Type expected) { if (data.containsKey(key)) { throw new WrongTypeException("Expected '" + expected + "' from key '" + key + "' but had '" + getType(key) + "'"); } else { throw new MissingDataException("No data mapped to '" + key + "'"); } } /** * Gets a long value from the data. Double values will NOT be converted to integer values (will return 0 as if the * data is missing) */ public long getLong( String key ) { Object obj = data.get( key ); if (obj instanceof Long) return (Long)obj; return crash(key, Type.LONG); } /** * Gets a double value from the data. Integer values will be converted to double values */ public double getDouble( String key ) { Object obj = data.get( key ); if (obj instanceof Double) return (Double)obj; else if (obj instanceof Long) return (long)obj; return crash(key, Type.DOUBLE); } public String getString( String key ) { Object obj = data.get( key ); if (obj instanceof String) return (String)obj; return crash(key, Type.STRING); } public boolean getBoolean( String key ) { Object obj = data.get( key ); if (obj instanceof Boolean) return (Boolean)obj; return crash(key, Type.BOOLEAN); } public PData getSection( String key ) { Object obj = data.get( key ); if (obj instanceof PData) return (PData)obj; return crash(key, Type.DATA); } public PData set( String key, long value ) { data.put( key, value ); return this; } public PData set( String key, double value ) { data.put( key, value ); return this; } public PData set( String key, String value ) { data.put( key, value ); return this; } public PData set( String key, boolean value ) { data.put( key, value ); return this; } public PData setNull( String key ) { data.put(key, null); return this; } public PData createSection( String key ) { PData child = new PData(); data.put( key, child ); return child; } public PData delete( String key ) { data.remove( key ); return this; } /** * Gets an unmodifiable list of all the keys in this Data * @return the keys of this data */ public Set<String> keys() { return publicKeys; } public boolean contains (String key) { return data.containsKey(key); } /** * Gets the type of data at the given key. If the data has not been entered, then null is returned. Note: if the * data has Null entered into it, then Type.NuLL will be returned for that type * @param key the key to look into * @return the type at the key */ public Type getType (String key) { Object obj = data.get( key ); if ( obj == null ) return data.containsKey(key) ? Type.NULL : null; if ( obj instanceof String ) return Type.STRING; if ( obj instanceof Long ) return Type.LONG; if ( obj instanceof Double ) return Type.DOUBLE; if ( obj instanceof Boolean ) return Type.BOOLEAN; if ( obj instanceof PData ) return Type.DATA; throw new RuntimeException( "This should ever happen" ); } /** * Creates a full copy of this data object. * @return a clone */ public PData copy () { PData clone = new PData(); for (Map.Entry<String, Object> entry : data.entrySet()) { String key = entry.getKey(); Object value = entry.getValue(); if (value instanceof PData) { clone.data.put(key, ((PData) value).copy()); } else { clone.data.put(key, value); } } return clone; } /** * Creates a human readable dump of the data in this structure. Should only be used for debugging */ @Override public String toString () { StringBuilder builder = new StringBuilder(); toString(0, builder); return builder.toString(); } private void toString( int depth, StringBuilder builder ) { builder.append("{\n"); for (String key : keys()) { addSpace(depth+1, builder); builder.append(key + ": "); if (getType(key) == Type.DATA) { getSection(key).toString(depth+1, builder); } else { builder.append(data.get(key)); } builder.append(",\n"); } addSpace(depth, builder); builder.append("}"); } private void addSpace(int depth, StringBuilder builder) { for ( int i = 0; i < depth; i++) { builder.append(" "); } } }
/********************************************************** * * TriCubicSpline.java * * Class for performing an interpolation on the tabulated * function y = f(x1,x2,x3) using a natural tricubic spline * Assumes second derivatives at end points = 0 (natural spine) * * WRITTEN BY: Dr Michael Thomas Flanagan * * DATE: May 2002 * UPDATE: 20 May 2003, 17 February 2006, 27 July 2007, 4 December 2007, 31 October 2009, 5 January 2011 * * DOCUMENTATION: * See Michael Thomas Flanagan's Java library on-line web page: * http://www.ee.ucl.ac.uk/~mflanaga/java/TriCubicSpline.html * http://www.ee.ucl.ac.uk/~mflanaga/java/ * * Copyright (c) 2003 - 2011 Michael Thomas Flanagan * * PERMISSION TO COPY: * Permission to use, copy and modify this software and its documentation for * NON-COMMERCIAL purposes is granted, without fee, provided that an acknowledgement * to the author, Michael Thomas Flanagan at http:\\www.ee.ucl.ac.uk/~mflanaga, appears in all copies. * * Dr Michael Thomas Flanagan makes no representations about the suitability * or fitness of the software for any or for a particular purpose. * Michael Thomas Flanagan shall not be liable for any damages suffered * as a result of using, modifying or distributing this software or its derivatives. * ***************************************************************************************/ package flanagan.interpolation; import flanagan.math.Fmath; public class TriCubicSpline{ private int nPoints = 0; // no. of x1 tabulated points private int mPoints = 0; // no. of x2 tabulated points private int lPoints = 0; // no. of x3 tabulated points private double[][][] y = null; // y=f(x1,x2) tabulated function private double[] x1 = null; // x1 in tabulated function f(x1,x2,x3) private double[] x2 = null; // x2 in tabulated function f(x1,x2,x3) private double[] x3 = null; // x3 in tabulated function f(x1,x2,x3) private double[] xMin = new double[3]; // minimum values of x1, x2 and x3 private double[] xMax = new double[3]; // maximum values of x1, x2 and x3 private BiCubicSpline[] bcsn = null; // nPoints array of BiCubicSpline instances private CubicSpline csm = null; // CubicSpline instance private double[][][] d2ydx2inner = null; // inner matrix of second derivatives private boolean derivCalculated = false; // = true when the called bicubic spline derivatives have been calculated private boolean averageIdenticalAbscissae = false; // if true: the the ordinate values for identical abscissae are averaged // If false: the abscissae values are separated by 0.001 of the total abscissae range; private static double potentialRoundingError = 5e-15; // potential rounding error used in checking wheter a value lies within the interpolation bounds (static value) private static boolean roundingCheck = true; // = true: points outside the interpolation bounds by less than the potential rounding error rounded to the bounds limit (static value) // Constructor public TriCubicSpline(double[] x1, double[] x2, double[] x3, double[][][] y){ this.nPoints=x1.length; this.mPoints=x2.length; this.lPoints=x3.length; if(this.nPoints!=y.length)throw new IllegalArgumentException("Arrays x1 and y-row are of different length " + this.nPoints + " " + y.length); if(this.mPoints!=y[0].length)throw new IllegalArgumentException("Arrays x2 and y-column are of different length "+ this.mPoints + " " + y[0].length); if(this.lPoints!=y[0][0].length)throw new IllegalArgumentException("Arrays x3 and y-column are of different length "+ this.mPoints + " " + y[0][0].length); if(this.nPoints<3 || this.mPoints<3 || this.lPoints<3)throw new IllegalArgumentException("The tabulated 3D array must have a minimum size of 3 X 3 X 3"); this.csm = new CubicSpline(this.nPoints); this.bcsn = BiCubicSpline.oneDarray(this.nPoints, this.mPoints, this.lPoints); this.x1 = new double[this.nPoints]; this.x2 = new double[this.mPoints]; this.x3 = new double[this.lPoints]; this.y = new double[this.nPoints][this.mPoints][this.lPoints]; this.d2ydx2inner = new double[this.nPoints][this.mPoints][this.lPoints]; for(int i=0; i<this.nPoints; i++){ this.x1[i]=x1[i]; } this.xMin[0] = Fmath.minimum(this.x1); this.xMax[0] = Fmath.maximum(this.x1); for(int j=0; j<this.mPoints; j++){ this.x2[j]=x2[j]; } this.xMin[1] = Fmath.minimum(this.x2); this.xMax[1] = Fmath.maximum(this.x2); for(int j=0; j<this.lPoints; j++){ this.x3[j]=x3[j]; } this.xMin[2] = Fmath.minimum(this.x3); this.xMax[2] = Fmath.maximum(this.x3); for(int i =0; i<this.nPoints; i++){ for(int j=0; j<this.mPoints; j++){ for(int k=0; k<this.lPoints; k++){ this.y[i][j][k]=y[i][j][k]; } } } double[][] yTempml = new double[this.mPoints][this.lPoints]; for(int i=0; i<this.nPoints; i++){ for(int j=0; j<this.mPoints; j++){ for(int k=0; k<this.lPoints; k++){ yTempml[j][k]=y[i][j][k]; } } this.bcsn[i].resetData(x2,x3,yTempml); this.d2ydx2inner[i] = this.bcsn[i].getDeriv(); } derivCalculated = true; } // Constructor with data arrays initialised to zero // Primarily for use by QuadriCubicSpline public TriCubicSpline(int nP, int mP, int lP){ this.nPoints=nP; this.mPoints=mP; this.lPoints=lP; if(this.nPoints<3 || this.mPoints<3 || this.lPoints<3)throw new IllegalArgumentException("The data matrix must have a minimum size of 3 X 3 X 3"); this.csm = new CubicSpline(this.nPoints); this.bcsn = BiCubicSpline.oneDarray(this.nPoints, this.mPoints, this.lPoints); this.x1 = new double[this.nPoints]; this.x2 = new double[this.mPoints]; this.x3 = new double[this.lPoints]; this.y = new double[this.nPoints][this.mPoints][this.lPoints]; this.d2ydx2inner = new double[this.nPoints][this.mPoints][this.lPoints]; } // METHODS // Reset rounding error check option // Default option: points outside the interpolation bounds by less than the potential rounding error rounded to the bounds limit // This method causes this check to be ignored and an exception to be thrown if any point lies outside the interpolation bounds public static void noRoundingErrorCheck(){ TriCubicSpline.roundingCheck = false; BiCubicSpline.noRoundingErrorCheck(); CubicSpline.noRoundingErrorCheck(); } // Reset potential rounding error value // Default option: points outside the interpolation bounds by less than the potential rounding error rounded to the bounds limit // The default value for the potential rounding error is 5e-15*times the 10^exponent of the value outside the bounds // This method allows the 5e-15 to be reset public static void potentialRoundingError(double potentialRoundingError){ TriCubicSpline.potentialRoundingError = potentialRoundingError; BiCubicSpline.potentialRoundingError(potentialRoundingError); CubicSpline.potentialRoundingError(potentialRoundingError); } // Reset the default handing of identical abscissae with different ordinates // from the default option of separating the two relevant abscissae by 0.001 of the range // to avraging the relevant ordinates public void averageIdenticalAbscissae(){ this.averageIdenticalAbscissae = true; for(int i=0; i<this.bcsn.length; i++)this.bcsn[i].averageIdenticalAbscissae(); this.csm.averageIdenticalAbscissae(); } // Returns a new TriCubicSpline setting internal array size to nP x mP x lP and all array values to zero with natural spline default // Primarily for use in this.oneDarray for QuadriCubicSpline public static TriCubicSpline zero(int nP, int mP, int lP){ if(nP<3 || mP<3 || lP<3)throw new IllegalArgumentException("A minimum of three x three x three data points is needed"); TriCubicSpline aa = new TriCubicSpline(nP, mP, lP); return aa; } // Create a one dimensional array of TriCubicSpline objects of length nP each of internal array size mP x lP xkP // Primarily for use in quadriCubicSpline public static TriCubicSpline[] oneDarray(int nP, int mP, int lP, int kP){ if(mP<3 || lP<3 || kP<3)throw new IllegalArgumentException("A minimum of three x three x three data points is needed"); TriCubicSpline[] a = new TriCubicSpline[nP]; for(int i=0; i<nP; i++){ a[i]=TriCubicSpline.zero(mP, lP, kP); } return a; } // Resets the x1, x2, x3, y data arrays // Primarily for use in QuadriCubicSpline public void resetData(double[] x1, double[] x2, double[] x3, double[][][] y){ if(x1.length!=y.length)throw new IllegalArgumentException("Arrays x1 and y row are of different length"); if(x2.length!=y[0].length)throw new IllegalArgumentException("Arrays x2 and y column are of different length"); if(x3.length!=y[0][0].length)throw new IllegalArgumentException("Arrays x3 and y column are of different length"); if(this.nPoints!=x1.length)throw new IllegalArgumentException("Original array length not matched by new array length"); if(this.mPoints!=x2.length)throw new IllegalArgumentException("Original array length not matched by new array length"); if(this.lPoints!=x3.length)throw new IllegalArgumentException("Original array length not matched by new array length"); for(int i=0; i<this.nPoints; i++){ this.x1[i]=x1[i]; } this.xMin[0] = Fmath.minimum(this.x1); this.xMax[0] = Fmath.maximum(this.x1); for(int i=0; i<this.mPoints; i++){ this.x2[i]=x2[i]; } this.xMin[1] = Fmath.minimum(this.x2); this.xMax[1] = Fmath.maximum(this.x2); for(int i=0; i<this.lPoints; i++){ this.x3[i]=x3[i]; } this.xMin[2] = Fmath.minimum(this.x3); this.xMax[2] = Fmath.maximum(this.x3); for(int i=0; i<this.nPoints; i++){ for(int j=0; j<this.mPoints; j++){ for(int k=0; k<this.lPoints; k++){ this.y[i][j][k]=y[i][j][k]; } } } this.csm = new CubicSpline(this.nPoints); this.bcsn = BiCubicSpline.oneDarray(this.nPoints, this.mPoints, this.lPoints); double[][] yTempml = new double[this.mPoints][this.lPoints]; for(int i=0; i<this.nPoints; i++){ for(int j=0; j<this.mPoints; j++){ for(int k=0; k<this.lPoints; k++){ yTempml[j][k]=y[i][j][k]; } } this.bcsn[i].resetData(x2,x3,yTempml); this.d2ydx2inner[i] = this.bcsn[i].getDeriv(); } derivCalculated = true; } // Get minimum limits public double[] getXmin(){ return this.xMin; } // Get maximum limits public double[] getXmax(){ return this.xMax; } // Get limits to x public double[] getLimits(){ double[] limits = {xMin[0], xMax[0], xMin[1], xMax[1], xMin[2], xMax[2]}; return limits; } // Display limits to x public void displayLimits(){ System.out.println(" "); for(int i=0; i<3; i++){ System.out.println("The limits to the x array " + i + " are " + xMin[i] + " and " + xMax[i]); } System.out.println(" "); } // Returns an interpolated value of y for values of x1, x2 and x3 // from a tabulated function y=f(x1,x2,x3) public double interpolate(double xx1, double xx2, double xx3){ double[] yTempm = new double[nPoints]; for (int i=0;i<nPoints;i++){ yTempm[i]=this.bcsn[i].interpolate(xx2, xx3); } this.csm.resetData(x1,yTempm); return this.csm.interpolate(xx1); } // Get inner matrix of derivatives // Primarily used by QuadriCubicSpline public double[][][] getDeriv(){ return this.d2ydx2inner; } // Set inner matrix of derivatives // Primarily used by QuadriCubicSpline public void setDeriv(double[][][] d2ydx2){ this.d2ydx2inner = d2ydx2; this.derivCalculated = true; } }
package com.sam_chordas.android.stockhawk.ui; import android.app.LoaderManager; import android.content.Context; import android.content.CursorLoader; import android.content.Intent; import android.content.Loader; import android.database.Cursor; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.os.Bundle; import android.support.v7.app.ActionBar; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.helper.ItemTouchHelper; import android.text.InputType; import android.view.Gravity; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.TextView; import android.widget.Toast; import com.afollestad.materialdialogs.MaterialDialog; import com.google.android.gms.gcm.GcmNetworkManager; import com.google.android.gms.gcm.PeriodicTask; import com.google.android.gms.gcm.Task; import com.melnykov.fab.FloatingActionButton; import com.sam_chordas.android.stockhawk.R; import com.sam_chordas.android.stockhawk.data.QuoteColumns; import com.sam_chordas.android.stockhawk.data.QuoteProvider; import com.sam_chordas.android.stockhawk.rest.QuoteCursorAdapter; import com.sam_chordas.android.stockhawk.rest.RecyclerViewItemClickListener; import com.sam_chordas.android.stockhawk.rest.Utils; import com.sam_chordas.android.stockhawk.service.StockIntentService; import com.sam_chordas.android.stockhawk.service.StockTaskService; import com.sam_chordas.android.stockhawk.touch_helper.SimpleItemTouchHelperCallback; public class MyStocksActivity extends AppCompatActivity implements LoaderManager.LoaderCallbacks<Cursor>{ /** * Fragment managing the behaviors, interactions and presentation of the navigation drawer. */ /** * Used to store the last screen title. For use in {@link #restoreActionBar()}. */ private CharSequence mTitle; private Intent mServiceIntent; private ItemTouchHelper mItemTouchHelper; private static final int CURSOR_LOADER_ID = 0; private QuoteCursorAdapter mCursorAdapter; private Context mContext; private Cursor mCursor; boolean isConnected; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); mContext = this; ConnectivityManager cm = (ConnectivityManager) mContext.getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo activeNetwork = cm.getActiveNetworkInfo(); isConnected = activeNetwork != null && activeNetwork.isConnectedOrConnecting(); setContentView(R.layout.activity_my_stocks); // The intent service is for executing immediate pulls from the Yahoo API // GCMTaskService can only schedule tasks, they cannot execute immediately mServiceIntent = new Intent(this, StockIntentService.class); if (savedInstanceState == null){ // Run the initialize task service so that some stocks appear upon an empty database mServiceIntent.putExtra("tag", "init"); if (isConnected){ startService(mServiceIntent); } else{ noInternetAlert(); } } RecyclerView recyclerView = (RecyclerView) findViewById(R.id.recycler_view); recyclerView.setLayoutManager(new LinearLayoutManager(this)); getLoaderManager().initLoader(CURSOR_LOADER_ID, null, this); mCursorAdapter = new QuoteCursorAdapter(this, null); recyclerView.addOnItemTouchListener(new RecyclerViewItemClickListener(this, new RecyclerViewItemClickListener.OnItemClickListener() { @Override public void onItemClick(View v, int position) { Cursor c = mCursorAdapter.getCursor(); c.moveToPosition(position); String symbol = c.getString(c.getColumnIndex(QuoteColumns.SYMBOL)); Intent detailIntent = new Intent(getApplicationContext(), MyStockGraphActivity.class); detailIntent.putExtra(Intent.EXTRA_TEXT, symbol); startActivity(detailIntent); } })); recyclerView.setAdapter(mCursorAdapter); FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab); fab.attachToRecyclerView(recyclerView); fab.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (isConnected){ new MaterialDialog.Builder(mContext).title(R.string.symbol_search) .content(R.string.content_test) .inputType(InputType.TYPE_CLASS_TEXT) .positiveText(R.string.action_add_stock) .input(R.string.input_hint, R.string.input_prefill, new MaterialDialog.InputCallback() { @Override public void onInput(MaterialDialog dialog, CharSequence input) { // On FAB click, receive user input. Make sure the stock doesn't already exist // in the DB and proceed accordingly Cursor c = getContentResolver().query(QuoteProvider.Quotes.CONTENT_URI, new String[] { QuoteColumns.SYMBOL }, QuoteColumns.SYMBOL + "= ?", new String[] { input.toString() }, null); if (c.getCount() != 0) { Toast toast = Toast.makeText(MyStocksActivity.this, R.string.stock_already_saved, Toast.LENGTH_LONG); toast.setGravity(Gravity.CENTER, Gravity.CENTER, 0); toast.show(); return; } else { // Add the stock to DB mServiceIntent.putExtra("tag", "add"); mServiceIntent.putExtra("symbol", input.toString()); startService(mServiceIntent); } } }) .show(); } else { noInternetAlert(); } } }); ItemTouchHelper.Callback callback = new SimpleItemTouchHelperCallback(mCursorAdapter); mItemTouchHelper = new ItemTouchHelper(callback); mItemTouchHelper.attachToRecyclerView(recyclerView); mTitle = getTitle(); if (isConnected){ long period = 3600L; long flex = 10L; String periodicTag = "periodic"; // create a periodic task to pull stocks once every hour after the app has been opened. This // is so Widget data stays up to date. PeriodicTask periodicTask = new PeriodicTask.Builder() .setService(StockTaskService.class) .setPeriod(period) .setFlex(flex) .setTag(periodicTag) .setRequiredNetwork(Task.NETWORK_STATE_CONNECTED) .setRequiresCharging(false) .build(); // Schedule task with tag "periodic." This ensure that only the stocks present in the DB // are updated. GcmNetworkManager.getInstance(this).schedule(periodicTask); } } @Override public void onResume() { super.onResume(); getLoaderManager().restartLoader(CURSOR_LOADER_ID, null, this); } public void noInternetAlert(){ TextView tv = (TextView) findViewById(R.id.no_internet); tv.setText(R.string.no_internet_data_not_up_to_date); tv.setVisibility(View.VISIBLE); } public void restoreActionBar() { ActionBar actionBar = getSupportActionBar(); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); actionBar.setDisplayShowTitleEnabled(true); actionBar.setTitle(mTitle); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.my_stocks, menu); restoreActionBar(); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_settings) { return true; } if (id == R.id.action_change_units){ // this is for changing stock changes from percent value to dollar value Utils.showPercent = !Utils.showPercent; this.getContentResolver().notifyChange(QuoteProvider.Quotes.CONTENT_URI, null); } return super.onOptionsItemSelected(item); } @Override public Loader<Cursor> onCreateLoader(int id, Bundle args){ // This narrows the return to only the stocks that are most current. return new CursorLoader(this, QuoteProvider.Quotes.CONTENT_URI, new String[]{ QuoteColumns._ID, QuoteColumns.SYMBOL, QuoteColumns.BIDPRICE, QuoteColumns.PERCENT_CHANGE, QuoteColumns.CHANGE, QuoteColumns.ISUP}, QuoteColumns.ISCURRENT + " = ?", new String[]{"1"}, null); } @Override public void onLoadFinished(Loader<Cursor> loader, Cursor data){ mCursorAdapter.swapCursor(data); mCursor = data; } @Override public void onLoaderReset(Loader<Cursor> loader){ mCursorAdapter.swapCursor(null); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.physical.impl.xsort.managed; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Iterator; import java.util.concurrent.TimeUnit; import org.apache.drill.common.exceptions.UserException; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.exec.cache.VectorAccessibleSerializable; import org.apache.drill.exec.memory.BufferAllocator; import org.apache.drill.exec.ops.OperatorContext; import org.apache.drill.exec.physical.impl.spill.SpillSet; import org.apache.drill.exec.record.BatchSchema; import org.apache.drill.exec.record.SchemaUtil; import org.apache.drill.exec.record.TransferPair; import org.apache.drill.exec.record.TypedFieldId; import org.apache.drill.exec.record.VectorAccessible; import org.apache.drill.exec.record.VectorContainer; import org.apache.drill.exec.record.VectorWrapper; import org.apache.drill.exec.record.WritableBatch; import org.apache.drill.exec.record.selection.SelectionVector2; import org.apache.drill.exec.record.selection.SelectionVector4; import com.google.common.base.Stopwatch; /** * Represents a group of batches spilled to disk. * <p> * The batches are defined by a schema which can change over time. When the schema changes, * all existing and new batches are coerced into the new schema. Provides a * uniform way to iterate over records for one or more batches whether * the batches are in memory or on disk. * <p> * The <code>BatchGroup</code> operates in two modes as given by the two * subclasses: * <ul> * <li>Input mode (@link InputBatchGroup): Used to buffer in-memory batches * prior to spilling.</li> * <li>Spill mode (@link SpilledBatchGroup): Holds a "memento" to a set * of batches written to disk. Acts as both a reader and writer for * those batches.</li> */ public abstract class BatchGroup implements VectorAccessible, AutoCloseable { static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BatchGroup.class); /** * The input batch group gathers batches buffered in memory before * spilling. The structure of the data is: * <ul> * <li>Contains a single batch received from the upstream (input) * operator.</li> * <li>Associated selection vector that provides a sorted * indirection to the values in the batch.</li> * </ul> */ public static class InputBatch extends BatchGroup { private final SelectionVector2 sv2; private final int dataSize; public InputBatch(VectorContainer container, SelectionVector2 sv2, OperatorContext context, int dataSize) { super(container, context); this.sv2 = sv2; this.dataSize = dataSize; } public SelectionVector2 getSv2() { return sv2; } public int getDataSize() { return dataSize; } @Override public int getRecordCount() { if (sv2 != null) { return sv2.getCount(); } else { return super.getRecordCount(); } } @Override public int getNextIndex() { int val = super.getNextIndex(); if (val == -1) { return val; } return sv2.getIndex(val); } @Override public void close() throws IOException { try { super.close(); } finally { if (sv2 != null) { sv2.clear(); } } } } /** * Holds a set of spilled batches, represented by a file on disk. * Handles reads from, and writes to the spill file. The data structure * is: * <ul> * <li>A pointer to a file that contains serialized batches.</li> * <li>When writing, each batch is appended to the output file.</li> * <li>When reading, iterates over each spilled batch, and for each * of those, each spilled record.</li> * </ul> * <p> * Starts out with no current batch. Defines the current batch to be the * (shell: schema without data) of the last batch spilled to disk. * <p> * When reading, has destructive read-once behavior: closing the * batch (after reading) deletes the underlying spill file. * <p> * This single class does three tasks: load data, hold data and * read data. This should be split into three separate classes. But, * the original (combined) structure is retained for expedience at * present. */ public static class SpilledRun extends BatchGroup { private InputStream inputStream; private OutputStream outputStream; private String path; private SpillSet spillSet; private BufferAllocator allocator; private int spilledBatches = 0; public SpilledRun(SpillSet spillSet, String path, OperatorContext context) throws IOException { super(null, context); this.spillSet = spillSet; this.path = path; this.allocator = context.getAllocator(); outputStream = spillSet.openForOutput(path); } public void addBatch(VectorContainer newContainer) throws IOException { int recordCount = newContainer.getRecordCount(); @SuppressWarnings("resource") WritableBatch batch = WritableBatch.getBatchNoHVWrap(recordCount, newContainer, false); VectorAccessibleSerializable outputBatch = new VectorAccessibleSerializable(batch, allocator); Stopwatch watch = Stopwatch.createStarted(); outputBatch.writeToStream(outputStream); newContainer.zeroVectors(); logger.trace("Wrote {} records in {} us", recordCount, watch.elapsed(TimeUnit.MICROSECONDS)); spilledBatches++; // Hold onto the husk of the last added container so that we have a // current container when starting to read rows back later. currentContainer = newContainer; currentContainer.setRecordCount(0); } @Override public int getNextIndex() { if (pointer == getRecordCount()) { if (spilledBatches == 0) { return -1; } try { currentContainer.zeroVectors(); getBatch(); } catch (IOException e) { // Release any partially-loaded data. currentContainer.clear(); throw UserException.dataReadError(e) .message("Failure while reading spilled data") .build(logger); } // The pointer indicates the NEXT index, not the one we // return here. At this point, we just started reading a // new batch and have returned index 0. So, the next index // is 1. pointer = 1; return 0; } return super.getNextIndex(); } private VectorContainer getBatch() throws IOException { if (inputStream == null) { inputStream = spillSet.openForInput(path); } VectorAccessibleSerializable vas = new VectorAccessibleSerializable(allocator); Stopwatch watch = Stopwatch.createStarted(); vas.readFromStream(inputStream); VectorContainer c = vas.get(); if (schema != null) { c = SchemaUtil.coerceContainer(c, schema, context); } logger.trace("Read {} records in {} us", c.getRecordCount(), watch.elapsed(TimeUnit.MICROSECONDS)); spilledBatches--; currentContainer.zeroVectors(); Iterator<VectorWrapper<?>> wrapperIterator = c.iterator(); for (@SuppressWarnings("rawtypes") VectorWrapper w : currentContainer) { TransferPair pair = wrapperIterator.next().getValueVector().makeTransferPair(w.getValueVector()); pair.transfer(); } currentContainer.setRecordCount(c.getRecordCount()); c.zeroVectors(); return c; } /** * Close resources owned by this batch group. Each can fail; report * only the first error. This is cluttered because this class tries * to do multiple tasks. TODO: Split into multiple classes. */ @Override public void close() throws IOException { IOException ex = null; try { super.close(); } catch (IOException e) { ex = e; } try { closeOutputStream(); } catch (IOException e) { ex = ex == null ? e : ex; } try { closeInputStream(); } catch (IOException e) { ex = ex == null ? e : ex; } try { spillSet.delete(path); } catch (IOException e) { ex = ex == null ? e : ex; } if (ex != null) { throw ex; } } private void closeInputStream() throws IOException { if (inputStream == null) { return; } long readLength = spillSet.getPosition(inputStream); spillSet.tallyReadBytes(readLength); inputStream.close(); inputStream = null; logger.trace("Summary: Read {} bytes from {}", readLength, path); } public long closeOutputStream() throws IOException { if (outputStream == null) { return 0; } long writeSize = spillSet.getPosition(outputStream); spillSet.tallyWriteBytes(writeSize); outputStream.close(); outputStream = null; logger.trace("Summary: Wrote {} bytes to {}", writeSize, path); return writeSize; } } protected VectorContainer currentContainer; protected int pointer = 0; protected final OperatorContext context; protected BatchSchema schema; public BatchGroup(VectorContainer container, OperatorContext context) { this.currentContainer = container; this.context = context; } /** * Updates the schema for this batch group. The current as well as any * deserialized batches will be coerced to this schema. * @param schema */ public void setSchema(BatchSchema schema) { currentContainer = SchemaUtil.coerceContainer(currentContainer, schema, context); this.schema = schema; } public int getNextIndex() { if (pointer == getRecordCount()) { return -1; } int val = pointer++; assert val < currentContainer.getRecordCount(); return val; } public VectorContainer getContainer() { return currentContainer; } @Override public void close() throws IOException { currentContainer.zeroVectors(); } @Override public VectorWrapper<?> getValueAccessorById(Class<?> clazz, int... ids) { return currentContainer.getValueAccessorById(clazz, ids); } @Override public TypedFieldId getValueVectorId(SchemaPath path) { return currentContainer.getValueVectorId(path); } @Override public BatchSchema getSchema() { return currentContainer.getSchema(); } @Override public int getRecordCount() { return currentContainer.getRecordCount(); } public int getUnfilteredRecordCount() { return currentContainer.getRecordCount(); } @Override public Iterator<VectorWrapper<?>> iterator() { return currentContainer.iterator(); } @Override public SelectionVector2 getSelectionVector2() { throw new UnsupportedOperationException(); } @Override public SelectionVector4 getSelectionVector4() { throw new UnsupportedOperationException(); } }
package com.example.bamboo.demoweek1.view.extended; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.opengl.GLES20; import android.opengl.GLSurfaceView; import android.opengl.GLUtils; import android.opengl.Matrix; import com.example.bamboo.demoweek1.SoundInterface; import com.example.bamboo.demoweek1.view.object.Ground; import com.example.bamboo.demoweek1.view.object.Obstacle; import com.example.bamboo.demoweek1.view.object.ObstacleTriangle; import com.example.bamboo.demoweek1.view.object.Square; import java.util.ArrayList; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; //Class in charge of object creation public class ExtendRenderer implements GLSurfaceView.Renderer { private SoundInterface mContext; private HealthControl mHealthControl; private DrawObject mSquare, mGround; private long mLastClickTime; private long mLastClickTime2; private boolean isTriangleObstacle = true; private final float[] mMVPMatrix = new float[16]; private final float[] mProjectionMatrix = new float[16]; private final float[] mViewMatrix = new float[16]; private boolean isJumping = false; private boolean isObstacle = false; private ArrayList<DrawObject> list = new ArrayList<>(); private int mTextureDataHandle; private static Bitmap mRawData; public static void setRawData(Bitmap data) { if (ExtendRenderer.mRawData == null) { ExtendRenderer.mRawData = data; } } private ScheduledThreadPoolExecutor mExecutor; private Runnable mPeriodiclyGenerateObstacle; private static final int OBSTACLE_GENERATE_PERIOD = 5000; @Override public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) { GLES20.glClearColor(0.31f, 0.765f, 0.969f, 1.0f); mTextureDataHandle = 0; mTextureDataHandle = loadTexture(mRawData); mSquare = new Square(mTextureDataHandle); mGround = new Ground(); mPeriodiclyGenerateObstacle = new Runnable() { @Override public void run() { if (!isObstacle) isObstacle = true; } }; //Periodically generate obstacle to increase difficulty if (mExecutor == null) { mExecutor = (ScheduledThreadPoolExecutor) Executors.newScheduledThreadPool(10); mExecutor.scheduleWithFixedDelay(mPeriodiclyGenerateObstacle, 5000, OBSTACLE_GENERATE_PERIOD, TimeUnit.MILLISECONDS); } } @Override public void onSurfaceChanged(GL10 gl10, int width, int height) { GLES20.glViewport(0,0, width, height); float ratio = (float) width/height; Matrix.frustumM(mProjectionMatrix, 0, -ratio, ratio, -1,1,3,7); list.clear(); } @Override public void onDrawFrame(GL10 gl10) { GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); Matrix.setLookAtM(mViewMatrix, 0,0,0,3, 0f,0f,0f,0f, 1f,0f); Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0); mSquare.draw(mMVPMatrix, isJumping, mContext); mGround.draw(mMVPMatrix, false, null); //This method is deprecated, only use in earlier version of the app, use to check which obstacle type the fragment //is requesting if (isObstacle) { if (isTriangleObstacle) { list.add(new ObstacleTriangle()); } else { list.add(new Obstacle()); } isObstacle = false; } for (int i = 0; i < list.size(); i++) { if (list.get(i).canRemove()) { //Remove when out of screen list.remove(i); } else { list.get(i).draw(mMVPMatrix, true, null); if (collisionCheck(mSquare, list.get(i))) { //Play collide sound mContext.playCollide(); long lastClickTime = mLastClickTime; long now = System.currentTimeMillis(); mLastClickTime = now; if (now - lastClickTime < 100) { // Too fast: ignore } else { // Register the click ExtendGLSurfaceView.vibrate(50); mHealthControl.decrease(); } } } } } public void goUp() { isJumping = true; } public void goDown() { isJumping = false; } public void addObstacle() { long lastClickTime = mLastClickTime2; long now = System.currentTimeMillis(); mLastClickTime2 = now; if (now - lastClickTime < 500) { // Too fast: ignore } else { // Register isObstacle = true; } } public boolean collisionCheck(DrawObject o1, DrawObject o2) { return ( Math.abs( o1.getCenterX() - o2.getCenterX() ) * 2 < o1.getWidth() + o2.getWidth() ) && ( Math.abs( o1.getCenterY() - o2.getCenterY() ) < o1.getHeight() + o2.getHeight() ); } public void setContext (SoundInterface context) { if (context != null) { mContext = context; } } public void setHealthControl (HealthControl context) { if (context != null) { mHealthControl = context; } } public static int loadTexture (Bitmap data) { if (data != null) { int[] textureHandle = new int[1]; GLES20.glGenTextures(1, textureHandle, 0); if (textureHandle[0] != 0) { BitmapFactory.Options options = new BitmapFactory.Options(); options.inScaled = true; Bitmap bitmap = data; GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT); GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0); // bitmap.recycle(); return textureHandle[0]; } else { throw new RuntimeException("Error loading texture"); } } else { return -1; } } public interface DrawObject { void draw (float[] matrix, boolean behaviour, SoundInterface context); boolean canRemove(); float getWidth(); float getHeight(); float getCenterX(); float getCenterY(); } public interface HealthControl { void decrease(); } }
package umontreal.ssj.rng; import umontreal.ssj.util.PrintfFormat; /* * Class: F2wPoly * Description: * Environment: Java * Software: SSJ * Copyright (C) 2001 Pierre L'Ecuyer and Universite de Montreal * Organization: DIRO, Universite de Montreal * @author: * @since * * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ class F2wPoly { private int r; //dimension private F2w.F2wElem coeff[]; //coefficients non-nuls private int noCoeff[]; //position des coefficients non-nuls private F2w f2wBase; //l'espace des F2w private F2wPolyElem z_i[]; //resultats du calculs des z^i dans F2wPoly /* Constructeur : Les coefficients coeff sont ceux du polynome P(z). Ils doivent avoir ete cree par la base f2wBase. */ public F2wPoly(int r, F2w.F2wElem coeff[], int noCoeff[], F2w f2wBase) { this.r = r; this.coeff = new F2w.F2wElem[coeff.length]; this.noCoeff = new int[noCoeff.length]; for(int i = 0; i < coeff.length; i++) { this.coeff[i] = coeff[i]; this.noCoeff[i] = noCoeff[i]; } this.f2wBase = f2wBase; //initialisation des z (pre-calculs) z_i = new F2wPolyElem[2 * r]; z_i[0] = new F2wPolyElem(); for(int i = 0; i < r; i++) z_i[0].value[i].value = 0; z_i[0].value[0].value = 0x80000000; for(int i = 1; i < 2*r; i++) z_i[i] = z_i[i-1].multiplyZ(); } /* Differentes methodes permettant d'acceder aux constructeurs de F2wPolyElem (on ne peut y acceder directement puisque F2wPolyElem a besoin de connaitre a quel F2wPoly il appartient). */ public F2wPolyElem createElem() { return new F2wPolyElem(); } public F2wPolyElem createElem(F2wPolyElem that) { return new F2wPolyElem(that); } public F2wPolyElem createElem(int[] value) { return new F2wPolyElem(value); } /* Retourne le F2wPolyElem qui est egal a au polynome "z" */ public F2wPolyElem createZ() { int[] val = new int[r]; for(int i = 0; i < r; i++) val[i] = 0; val[1] = 0x80000000; return createElem(val); } /* Represente l'espace (F2w) dont font partie les coefficients des polynomes. F2w est definie comme etant F2[zeta]\Q(zeta). Note : la classe a ete concue pour que w = 32. */ public static class F2w { private int w; //dimension private int modQ; //modulo private F2wElem[] zeta_i; public F2w(int modQ) { this(32, modQ); } private F2w(int w, int modQ) { this.w = w; this.modQ = modQ; // initialisation des zetas zeta_i = new F2wElem[w]; zeta_i[0] = new F2wElem(0x80000000); for(int i = 1; i < w; i++) zeta_i[i] = zeta_i[i-1].multiplyZeta(2); } /* Methodes permettant d'acceder aux constructeurs de F2wElem. */ public F2wElem createElem() { return new F2wElem(); } public F2wElem createElem(F2wElem that) { return new F2wElem(that); } public F2wElem createElem(int val) { return new F2wElem(val); } public int getDim() { return w; } public int getModulo() { return modQ; } /* Classe representant les elements de F2w. Le bit le plus significatif (celui en 0x80000000) represente zeta^0 et le moins significatif (celui en 0x00000001) represente zeta^31. La multiplication par zeta se fait donc par un rigth-shift, suivis d'un modulo si le resultat depasse zeta^32. */ public class F2wElem { private int value; //constructeurs private F2wElem() { value = 0; } private F2wElem(F2wElem that) { if(this.getBase() != that.getBase()) throw new IllegalArgumentException ("The copied F2wElem must originate from the same F2w."); this.value = that.value; } private F2wElem(int val) { value = val; } public F2w getBase() { return F2w.this; } public int getValue() { return value; } // calcule this * zeta^k dans F2w public F2wElem multiplyZeta(int k) { int res = value; if(k == 0) return new F2wElem(res); else { for(int i = 0; i < k; i++) if((1 & res) != 0) res = (res >>> 1) ^ modQ; else res >>>= 1; return new F2wElem(res); } } // calcule this * that dans F2w public F2wElem multiply(F2wElem that) { if(this.getBase() != that.getBase()) throw new IllegalArgumentException ("Both F2wElem must originate from the same F2w."); int res = 0; int verif = 1; for(int i = w - 1; i >= 0; i--) { if((that.value & verif) != 0) res ^= this.multiplyZeta(i).value; verif <<= 1; } return new F2wElem(res); } // calcule (this)^2 dans F2w public F2wElem square() { int res = 0; for(int i = 0; i < w; i++) if((value & (0x80000000 >>> i)) != 0) res ^= zeta_i[i].value; return new F2wElem(res); } public String toString() { StringBuffer sb = new StringBuffer(); int temp = value; for(int i = 0; i < 32; i++) { sb.append((temp & 1) == 1 ? '1' : '0'); temp >>>= 1; } return sb.reverse().toString(); } } } /* Represente un polynome faisant partie de F2wPoly. Le premier element de value est le coefficient de z^0, tandis que le dernier est le coefficient de z^(r-1). */ public class F2wPolyElem { private F2w.F2wElem[] value; private F2wPolyElem() { value = new F2w.F2wElem[r]; for(int i = 0; i < r; i++) value[i] = f2wBase.createElem(); } private F2wPolyElem(F2wPolyElem that) { if(this.getBase() != that.getBase()) throw new IllegalArgumentException ("The copied F2wPolyElem must come from the same F2wPoly."); value = new F2w.F2wElem[r]; for(int i = 0; i < r; i++) this.value[i] = f2wBase.createElem(that.value[i]); } private F2wPolyElem(int[] value) { if(r != value.length) throw new IllegalArgumentException ("Array length must be equal to r (" + r + ")"); this.value = new F2w.F2wElem[r]; for(int i = 0; i < r; i++) this.value[i] = f2wBase.createElem(value[i]); } public F2wPoly getBase() { return F2wPoly.this; } //multiplie par this par z^1 dans F2w[z] public F2wPolyElem multiplyZ() { F2wPolyElem res = new F2wPolyElem(); res.value[0].value = 0; for(int i = 1; i < r; i++) res.value[i].value = this.value[i-1].value; for(int i = 0; i < noCoeff.length; i++) res.value[noCoeff[i]].value ^= this.value[r-1].multiply(coeff[i]).value; return res; } //calcule this * that dans F2w[z] public F2wPolyElem multiply(F2wPolyElem that) { if(this.getBase() != that.getBase()) throw new IllegalArgumentException ("Both F2wPolyElem must originate from the same F2wPoly."); F2wPolyElem res = new F2wPolyElem(); for(int i = 0; i < r; i++) for(int j = 0; j < r; j++) { F2w.F2wElem temp = this.value[i].multiply(that.value[j]); for(int k = 0; k < r; k++) res.value[k].value ^= z_i[i+j].value[k].multiply(temp).value; } return res; } //calcule this^(2^d) public F2wPolyElem exponentiateBase2 (int d) { F2wPolyElem res = new F2wPolyElem(this); F2wPolyElem temp= new F2wPolyElem(); for (int i = 0; i < d; i++) { for(int j = 0; j < r; j++) temp.value[j].value = 0; for(int j = 0; j < r; j++) { F2w.F2wElem coeff = res.value[j].square(); for(int k = 0; k < r; k++) temp.value[k].value ^= z_i[2*j].value[k].multiply(coeff).value; } for(int j = 0; j < r; j++) res.value[j].value = temp.value[j].value; } return res; } public void copyFrom(F2wPolyElem that) { if(this.getBase() != that.getBase()) throw new IllegalArgumentException ("Both F2wPolyElem must originate from the same F2wPoly."); for(int i = 0; i < r; i++) this.value[i].value = that.value[i].value; } public void copyFrom(int[] val) { if(r != value.length) throw new IllegalArgumentException ("Array length must be equal to r (" + r + ")"); for(int i = 0; i < r; i++) this.value[i].value = val[i]; } public void copyTo(int[] val) { if(r != value.length) throw new IllegalArgumentException ("Array length must be equal to r (" + r + ")"); for(int i = 0; i < r; i++) val[i] = this.value[i].value; } public String toString() { StringBuffer sb = new StringBuffer("{"); for(int i = 0; i < r - 1; i++) sb.append(value[i].toString() + ", " + PrintfFormat.NEWLINE + " "); if(r > 0) sb.append(value[r-1].toString()); sb.append("}"); return sb.toString(); } } public static void main(String[] args) { F2w f1 = new F2w(32, 0x00010002); F2w f2 = new F2w(32, 0x10204080); F2w.F2wElem e1 = f1.createElem(0x12345678); F2w.F2wElem e2 = f2.createElem(0x19414111); F2w.F2wElem e3 = e1.multiply(e2); /* F2w f2w = new F2w(32, 0xFA4F9B3F); F2wPoly poly = new F2wPoly(25, new F2w.F2wElem[]{f2w.createElem(0xE6A68D20), f2w.createElem(0x287AB842)}, new int[]{7, 0}, f2w); F2wPolyElem gen = poly.createElem (new int[]{0x95F24DAB, 0x0B685215, 0xE76CCAE7, 0xAF3EC239, 0x715FAD23, 0x24A590AD, 0x69E4B5EF, 0xBF456141, 0x96BC1B7B, 0xA7BDF825, 0xC1DE75B7, 0x8858A9C9, 0x2DA87693, 0xB657F9DD, 0xFFDC8A8F, 0x8121DA71, 0x8B823ECB, 0x885D05F5, 0x4E20CD47, 0x5A9AD5D9, 0x512C0C03, 0xEA857CCD, 0x4CC1D30F, 0x8891A8A1, 0xA6B7AADB}); for(int i = 0; i < poly.z_i.length; i++) { System.out.println(i + " :"); System.out.println(poly.z_i[i]); System.out.println(); } */ } }
/* * Copyright 2019 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.template.soy.invocationbuilders.javatypes; import static com.google.template.soy.invocationbuilders.javatypes.CodeGenUtils.castFunction; /** * Class for simple java types (e.g. boolean, String, Number, SafeHtml) that do not need specialized * / complex logic. There are static constants at the top of this class for each of the types we * support. Callers must use these constants and cannot instantiate new instances of this class. * * <p>NOTE: For doubles and longs, see {@link PrimitiveJavaNumberType.DOUBLE} and {@link * PrimitiveJavaNumberType.LONG}. These are not simple types because they need special logic for * coercing {@code Number} types to{@code Long} or {@code Double} at runtime. */ public class SimpleJavaType extends JavaType { /** * Constants for all of the simple types we support. Use {@link #asNullable} to get the * corresponding nullable type. */ public static final SimpleJavaType BOOLEAN = new PrimitiveJavaType( /* boxedType= */ "java.lang.Boolean", /* primitiveType= */ "boolean", /* genericType=*/ "java.lang.Boolean", /* isNullable=*/ false, castFunction("asBool"), castFunction("asBool"), castFunction("asNullableBool")); public static final SimpleJavaType INT = new PrimitiveJavaType( /* boxedType= */ "java.lang.Long", /* primitiveType= */ "long", /* genericType=*/ "? extends java.lang.Number", /* isNullable=*/ false, castFunction("asInt"), castFunction("asBoxedInt"), castFunction("asNullableInt")); public static final SimpleJavaType FLOAT = new PrimitiveJavaType( /* boxedType= */ "java.lang.Double", /* primitiveType= */ "double", /* genericType=*/ "? extends java.lang.Number", /* isNullable=*/ false, castFunction("asFloat"), castFunction("asBoxedFloat"), castFunction("asNullableFloat")); public static final SimpleJavaType NUMBER = new SimpleJavaType( "java.lang.Number", "? extends java.lang.Number", /* isNullable=*/ false, castFunction("asNumber"), castFunction("asNullableNumber")); public static final SimpleJavaType HTML = new SimpleJavaType( "com.google.common.html.types.SafeHtml", castFunction("asHtml"), castFunction("asNullableHtml")); public static final SimpleJavaType JS = new SimpleJavaType( "com.google.common.html.types.SafeScript", castFunction("asJs"), castFunction("asNullableJs")); public static final SimpleJavaType URL = new SimpleJavaType( "com.google.common.html.types.SafeUrl", castFunction("asUri"), castFunction("asNullableUri")); public static final SimpleJavaType TRUSTED_RESOURCE_URL = new SimpleJavaType( "com.google.common.html.types.TrustedResourceUrl", /* asReference=*/ castFunction("asTrustedResourceUri"), /* asNullableReference=*/ castFunction("asNullableTrustedResourceUri")); public static final SimpleJavaType STRING = new SimpleJavaType( "java.lang.String", /* asReference=*/ castFunction("asString"), /* asNullableReference=*/ castFunction("asNullableString")); private static final CodeGenUtils.Member AS_SOY_VALUE = castFunction("asSoyValue"); public static final SimpleJavaType OBJECT = new SimpleJavaType( "java.lang.Object", "?", /* isNullable=*/ false, AS_SOY_VALUE, AS_SOY_VALUE); public static final SimpleJavaType ATTRIBUTES = new SimpleJavaType( "com.google.template.soy.data.SanitizedContent", /* asReference=*/ castFunction("asAttributes"), /* asNullableReference=*/ castFunction("asNullableAttributes")); public static final SimpleJavaType CSS = new SimpleJavaType( "com.google.template.soy.data.CssParam", /* asReference=*/ castFunction("asCss"), /* asNullableReference=*/ castFunction("asNullableCss")); public static final SimpleJavaType MESSAGE = new SimpleJavaType( "com.google.protobuf.Message", /* asReference=*/ castFunction("asProto"), /* asNullableReference=*/ castFunction("asNullableProto")); private final String javaTypeString; private final String genericsTypeArgumentString; final CodeGenUtils.Member asReference; final CodeGenUtils.Member asNullableReference; private SimpleJavaType( String javaTypeString, CodeGenUtils.Member asReference, CodeGenUtils.Member asNullableReference) { this(javaTypeString, javaTypeString, /* isNullable=*/ false, asReference, asNullableReference); } private SimpleJavaType( String javaTypeString, String genericsTypeArgumentString, boolean isNullable, CodeGenUtils.Member asReference, CodeGenUtils.Member asNullableReference) { super(isNullable); this.javaTypeString = javaTypeString; this.genericsTypeArgumentString = genericsTypeArgumentString; this.asReference = asReference; this.asNullableReference = asNullableReference; } @Override public String toJavaTypeString() { return javaTypeString; } @Override String asGenericsTypeArgumentString() { return genericsTypeArgumentString; } @Override public SimpleJavaType asNullable() { return new SimpleJavaType( javaTypeString, genericsTypeArgumentString, true, asReference, asNullableReference); } @Override public String getAsInlineCastFunction(int depth) { return "AbstractBuilder::" + getMapperFunction(); } CodeGenUtils.Member getMapperFunction() { return isNullable() ? asNullableReference : asReference; } @Override public String asInlineCast(String variable, int depth) { return getMapperFunction() + "(" + variable + ")"; } /** * Special boolean subtype. Uses primitive unless the type needs to be nullable, and then we * switch to the boxed Boolean. */ private static final class PrimitiveJavaType extends SimpleJavaType { final String primitiveType; final String boxedType; final String genericType; final CodeGenUtils.Member asFunctionBoxed; PrimitiveJavaType( String boxedType, String primitiveType, String genericType, boolean isNullable, CodeGenUtils.Member asFunctionUnboxed, CodeGenUtils.Member asFunctionBoxed, CodeGenUtils.Member asNullableFunction) { // Use the boxed type if the type needs to be nullable (or generic). Otherwise use the // primitive. super( isNullable ? boxedType : primitiveType, boxedType, isNullable, asFunctionUnboxed, asNullableFunction); this.genericType = genericType; this.asFunctionBoxed = asFunctionBoxed; this.primitiveType = primitiveType; this.boxedType = boxedType; } @Override public String asTypeLiteralString() { return boxedType; } @Override String asGenericsTypeArgumentString() { return genericType; } @Override public PrimitiveJavaType asNullable() { return new PrimitiveJavaType( boxedType, primitiveType, genericType, true, asReference, asFunctionBoxed, asNullableReference); } @Override public String getAsInlineCastFunction(int depth) { return "AbstractBuilder::" + (isNullable() ? asNullableReference : asFunctionBoxed); } } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.1-792 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2013.11.15 at 07:01:26 PM IRST // package org.cloudface.tosca.model; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAnyElement; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import org.w3c.dom.Element; /** * <p>Java class for tPlan complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="tPlan"> * &lt;complexContent> * &lt;extension base="{http://docs.oasis-open.org/tosca/ns/2011/12}tExtensibleElements"> * &lt;sequence> * &lt;element name="Precondition" type="{http://docs.oasis-open.org/tosca/ns/2011/12}tCondition" minOccurs="0"/> * &lt;element name="InputParameters" minOccurs="0"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="InputParameter" type="{http://docs.oasis-open.org/tosca/ns/2011/12}tParameter" maxOccurs="unbounded"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="OutputParameters" minOccurs="0"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="OutputParameter" type="{http://docs.oasis-open.org/tosca/ns/2011/12}tParameter" maxOccurs="unbounded"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;choice> * &lt;element name="PlanModel"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;any processContents='lax' namespace='##other'/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;element name="PlanModelReference"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;attribute name="reference" use="required" type="{http://www.w3.org/2001/XMLSchema}anyURI" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * &lt;/choice> * &lt;/sequence> * &lt;attribute name="id" use="required" type="{http://www.w3.org/2001/XMLSchema}ID" /> * &lt;attribute name="name" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="planType" use="required" type="{http://www.w3.org/2001/XMLSchema}anyURI" /> * &lt;attribute name="planLanguage" use="required" type="{http://www.w3.org/2001/XMLSchema}anyURI" /> * &lt;anyAttribute processContents='lax' namespace='##other'/> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "tPlan", propOrder = { "precondition", "inputParameters", "outputParameters", "planModel", "planModelReference" }) public class TPlan extends TExtensibleElements { @XmlElement(name = "Precondition") protected TCondition precondition; @XmlElement(name = "InputParameters") protected TPlan.InputParameters inputParameters; @XmlElement(name = "OutputParameters") protected TPlan.OutputParameters outputParameters; @XmlElement(name = "PlanModel") protected TPlan.PlanModel planModel; @XmlElement(name = "PlanModelReference") protected TPlan.PlanModelReference planModelReference; @XmlAttribute(required = true) @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID @XmlSchemaType(name = "ID") protected String id; @XmlAttribute protected String name; @XmlAttribute(required = true) @XmlSchemaType(name = "anyURI") protected String planType; @XmlAttribute(required = true) @XmlSchemaType(name = "anyURI") protected String planLanguage; /** * Gets the value of the precondition property. * * @return * possible object is * {@link TCondition } * */ public TCondition getPrecondition() { return precondition; } /** * Sets the value of the precondition property. * * @param value * allowed object is * {@link TCondition } * */ public void setPrecondition(TCondition value) { this.precondition = value; } /** * Gets the value of the inputParameters property. * * @return * possible object is * {@link TPlan.InputParameters } * */ public TPlan.InputParameters getInputParameters() { return inputParameters; } /** * Sets the value of the inputParameters property. * * @param value * allowed object is * {@link TPlan.InputParameters } * */ public void setInputParameters(TPlan.InputParameters value) { this.inputParameters = value; } /** * Gets the value of the outputParameters property. * * @return * possible object is * {@link TPlan.OutputParameters } * */ public TPlan.OutputParameters getOutputParameters() { return outputParameters; } /** * Sets the value of the outputParameters property. * * @param value * allowed object is * {@link TPlan.OutputParameters } * */ public void setOutputParameters(TPlan.OutputParameters value) { this.outputParameters = value; } /** * Gets the value of the planModel property. * * @return * possible object is * {@link TPlan.PlanModel } * */ public TPlan.PlanModel getPlanModel() { return planModel; } /** * Sets the value of the planModel property. * * @param value * allowed object is * {@link TPlan.PlanModel } * */ public void setPlanModel(TPlan.PlanModel value) { this.planModel = value; } /** * Gets the value of the planModelReference property. * * @return * possible object is * {@link TPlan.PlanModelReference } * */ public TPlan.PlanModelReference getPlanModelReference() { return planModelReference; } /** * Sets the value of the planModelReference property. * * @param value * allowed object is * {@link TPlan.PlanModelReference } * */ public void setPlanModelReference(TPlan.PlanModelReference value) { this.planModelReference = value; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } /** * Gets the value of the planType property. * * @return * possible object is * {@link String } * */ public String getPlanType() { return planType; } /** * Sets the value of the planType property. * * @param value * allowed object is * {@link String } * */ public void setPlanType(String value) { this.planType = value; } /** * Gets the value of the planLanguage property. * * @return * possible object is * {@link String } * */ public String getPlanLanguage() { return planLanguage; } /** * Sets the value of the planLanguage property. * * @param value * allowed object is * {@link String } * */ public void setPlanLanguage(String value) { this.planLanguage = value; } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="InputParameter" type="{http://docs.oasis-open.org/tosca/ns/2011/12}tParameter" maxOccurs="unbounded"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "inputParameter" }) public static class InputParameters { @XmlElement(name = "InputParameter", required = true) protected List<TParameter> inputParameter; /** * Gets the value of the inputParameter property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the inputParameter property. * * <p> * For example, to add a new item, do as follows: * <pre> * getInputParameter().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link TParameter } * * */ public List<TParameter> getInputParameter() { if (inputParameter == null) { inputParameter = new ArrayList<TParameter>(); } return this.inputParameter; } } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="OutputParameter" type="{http://docs.oasis-open.org/tosca/ns/2011/12}tParameter" maxOccurs="unbounded"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "outputParameter" }) public static class OutputParameters { @XmlElement(name = "OutputParameter", required = true) protected List<TParameter> outputParameter; /** * Gets the value of the outputParameter property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the outputParameter property. * * <p> * For example, to add a new item, do as follows: * <pre> * getOutputParameter().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link TParameter } * * */ public List<TParameter> getOutputParameter() { if (outputParameter == null) { outputParameter = new ArrayList<TParameter>(); } return this.outputParameter; } } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;any processContents='lax' namespace='##other'/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "any" }) public static class PlanModel { @XmlAnyElement(lax = true) protected Object any; /** * Gets the value of the any property. * * @return * possible object is * {@link Object } * {@link Element } * */ public Object getAny() { return any; } /** * Sets the value of the any property. * * @param value * allowed object is * {@link Object } * {@link Element } * */ public void setAny(Object value) { this.any = value; } } /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;attribute name="reference" use="required" type="{http://www.w3.org/2001/XMLSchema}anyURI" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "") public static class PlanModelReference { @XmlAttribute(required = true) @XmlSchemaType(name = "anyURI") protected String reference; /** * Gets the value of the reference property. * * @return * possible object is * {@link String } * */ public String getReference() { return reference; } /** * Sets the value of the reference property. * * @param value * allowed object is * {@link String } * */ public void setReference(String value) { this.reference = value; } } }
/* * ============================================================================= * * Copyright (c) 2011-2018, The THYMELEAF team (http://www.thymeleaf.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * ============================================================================= */ package org.thymeleaf.util; import java.math.BigDecimal; import java.math.BigInteger; import java.math.RoundingMode; /** * * @author Daniel Fern&aacute;ndez * * @since 1.0 * */ public final class AggregateUtils { /** * <p> * Returns the sum of all the numbers contained in the provided * iterable (e.g. a collection). * </p> * * @param target the iterable containing the number objects * @return the sum, as a BigDecimal */ public static BigDecimal sum(final Iterable<? extends Number> target) { Validate.notNull(target, "Cannot aggregate on null"); Validate.containsNoNulls(target, "Cannot aggregate on iterable containing nulls"); BigDecimal total = BigDecimal.ZERO; int size = 0; for (final Number element : target) { total = total.add(toBigDecimal(element)); size++; } if (size == 0) { return null; } return total; } /** * <p> * Returns the sum of all the numbers contained in the provided array. * </p> * * @param target the array of numbers * @return the sum, as a BigDecimal */ public static BigDecimal sum(final Object[] target) { Validate.notNull(target, "Cannot aggregate on null"); Validate.containsNoNulls(target, "Cannot aggregate on array containing nulls"); if (target.length == 0) { return null; } BigDecimal total = BigDecimal.ZERO; for (final Object element : target) { total = total.add(toBigDecimal((Number)element)); } return total; } /** * <p> * Returns the sum of all the numbers contained in the provided array. * </p> * * @param target the array of numbers * @return the sum, as a BigDecimal */ public static BigDecimal sum(final byte[] target) { Validate.notNull(target, "Cannot aggregate on null"); if (target.length == 0) { return null; } BigDecimal total = BigDecimal.ZERO; for (final byte element : target) { total = total.add(toBigDecimal(element)); } return total; } /** * <p> * Returns the sum of all the numbers contained in the provided array. * </p> * * @param target the array of numbers * @return the sum, as a BigDecimal */ public static BigDecimal sum(final short[] target) { Validate.notNull(target, "Cannot aggregate on null"); if (target.length == 0) { return null; } BigDecimal total = BigDecimal.ZERO; for (final short element : target) { total = total.add(toBigDecimal(element)); } return total; } /** * <p> * Returns the sum of all the numbers contained in the provided array. * </p> * * @param target the array of numbers * @return the sum, as a BigDecimal */ public static BigDecimal sum(final int[] target) { Validate.notNull(target, "Cannot aggregate on null"); if (target.length == 0) { return null; } BigDecimal total = BigDecimal.ZERO; for (final int element : target) { total = total.add(toBigDecimal(element)); } return total; } /** * <p> * Returns the sum of all the numbers contained in the provided array. * </p> * * @param target the array of numbers * @return the sum, as a BigDecimal */ public static BigDecimal sum(final long[] target) { Validate.notNull(target, "Cannot aggregate on null"); if (target.length == 0) { return null; } BigDecimal total = BigDecimal.ZERO; for (final long element : target) { total = total.add(toBigDecimal(element)); } return total; } /** * <p> * Returns the sum of all the numbers contained in the provided array. * </p> * * @param target the array of numbers * @return the sum, as a BigDecimal */ public static BigDecimal sum(final float[] target) { Validate.notNull(target, "Cannot aggregate on null"); if (target.length == 0) { return null; } BigDecimal total = BigDecimal.ZERO; for (final float element : target) { total = total.add(toBigDecimal(element)); } return total; } /** * <p> * Returns the sum of all the numbers contained in the provided array. * </p> * * @param target the array of numbers * @return the sum, as a BigDecimal */ public static BigDecimal sum(final double[] target) { Validate.notNull(target, "Cannot aggregate on null"); if (target.length == 0) { return null; } BigDecimal total = BigDecimal.ZERO; for (final double element : target) { total = total.add(toBigDecimal(element)); } return total; } /** * <p> * Returns the average of all the numbers contained in the provided * iterable (e.g. a collection). * </p> * * @param target the iterable containing the number objects * @return the average, as a BigDecimal */ public static BigDecimal avg(final Iterable<? extends Number> target) { Validate.notNull(target, "Cannot aggregate on null"); Validate.containsNoNulls(target, "Cannot aggregate on array containing nulls"); BigDecimal total = BigDecimal.ZERO; int size = 0; for (final Number element : target) { total = total.add(toBigDecimal(element)); size++; } if (size == 0) { return null; } final BigDecimal divisor = BigDecimal.valueOf(size); try { return total.divide(divisor); } catch (final ArithmeticException e) { // We will get an arithmetic exception if: 1. Divisor is zero, which is impossible; or 2. Division // returns a number with a non-terminating decimal expansion. In the latter case, we will set the // scale manually. return total.divide(divisor, Math.max(total.scale(), 10), RoundingMode.HALF_UP); } } /** * <p> * Returns the average of all the numbers contained in the provided array. * </p> * * @param target the array of numbers * @return the average, as a BigDecimal */ public static BigDecimal avg(final Object[] target) { Validate.notNull(target, "Cannot aggregate on null"); Validate.containsNoNulls(target, "Cannot aggregate on array containing nulls"); if (target.length == 0) { return null; } BigDecimal total = BigDecimal.ZERO; for (final Object element : target) { total = total.add(toBigDecimal((Number)element)); } final BigDecimal divisor = BigDecimal.valueOf(target.length); try { return total.divide(divisor); } catch (final ArithmeticException e) { // We will get an arithmetic exception if: 1. Divisor is zero, which is impossible; or 2. Division // returns a number with a non-terminating decimal expansion. In the latter case, we will set the // scale manually. return total.divide(divisor, Math.max(total.scale(), 10), RoundingMode.HALF_UP); } } /** * <p> * Returns the average of all the numbers contained in the provided array. * </p> * * @param target the array of numbers * @return the average, as a BigDecimal */ public static BigDecimal avg(final byte[] target) { Validate.notNull(target, "Cannot aggregate on null"); if (target.length == 0) { return null; } BigDecimal total = BigDecimal.ZERO; for (final byte element : target) { total = total.add(toBigDecimal(element)); } final BigDecimal divisor = BigDecimal.valueOf(target.length); try { return total.divide(divisor); } catch (final ArithmeticException e) { // We will get an arithmetic exception if: 1. Divisor is zero, which is impossible; or 2. Division // returns a number with a non-terminating decimal expansion. In the latter case, we will set the // scale manually. return total.divide(divisor, Math.max(total.scale(), 10), RoundingMode.HALF_UP); } } /** * <p> * Returns the average of all the numbers contained in the provided array. * </p> * * @param target the array of numbers * @return the average, as a BigDecimal */ public static BigDecimal avg(final short[] target) { Validate.notNull(target, "Cannot aggregate on null"); if (target.length == 0) { return null; } BigDecimal total = BigDecimal.ZERO; for (final short element : target) { total = total.add(toBigDecimal(element)); } final BigDecimal divisor = BigDecimal.valueOf(target.length); try { return total.divide(divisor); } catch (final ArithmeticException e) { // We will get an arithmetic exception if: 1. Divisor is zero, which is impossible; or 2. Division // returns a number with a non-terminating decimal expansion. In the latter case, we will set the // scale manually. return total.divide(divisor, Math.max(total.scale(), 10), RoundingMode.HALF_UP); } } /** * <p> * Returns the average of all the numbers contained in the provided array. * </p> * * @param target the array of numbers * @return the average, as a BigDecimal */ public static BigDecimal avg(final int[] target) { Validate.notNull(target, "Cannot aggregate on null"); if (target.length == 0) { return null; } BigDecimal total = BigDecimal.ZERO; for (final int element : target) { total = total.add(toBigDecimal(element)); } final BigDecimal divisor = BigDecimal.valueOf(target.length); try { return total.divide(divisor); } catch (final ArithmeticException e) { // We will get an arithmetic exception if: 1. Divisor is zero, which is impossible; or 2. Division // returns a number with a non-terminating decimal expansion. In the latter case, we will set the // scale manually. return total.divide(divisor, Math.max(total.scale(), 10), RoundingMode.HALF_UP); } } /** * <p> * Returns the average of all the numbers contained in the provided array. * </p> * * @param target the array of numbers * @return the average, as a BigDecimal */ public static BigDecimal avg(final long[] target) { Validate.notNull(target, "Cannot aggregate on null"); if (target.length == 0) { return null; } BigDecimal total = BigDecimal.ZERO; for (final long element : target) { total = total.add(toBigDecimal(element)); } final BigDecimal divisor = BigDecimal.valueOf(target.length); try { return total.divide(divisor); } catch (final ArithmeticException e) { // We will get an arithmetic exception if: 1. Divisor is zero, which is impossible; or 2. Division // returns a number with a non-terminating decimal expansion. In the latter case, we will set the // scale manually. return total.divide(divisor, Math.max(total.scale(), 10), RoundingMode.HALF_UP); } } /** * <p> * Returns the average of all the numbers contained in the provided array. * </p> * * @param target the array of numbers * @return the average, as a BigDecimal */ public static BigDecimal avg(final float[] target) { Validate.notNull(target, "Cannot aggregate on null"); if (target.length == 0) { return null; } BigDecimal total = BigDecimal.ZERO; for (final float element : target) { total = total.add(toBigDecimal(element)); } final BigDecimal divisor = BigDecimal.valueOf(target.length); try { return total.divide(divisor); } catch (final ArithmeticException e) { // We will get an arithmetic exception if: 1. Divisor is zero, which is impossible; or 2. Division // returns a number with a non-terminating decimal expansion. In the latter case, we will set the // scale manually. return total.divide(divisor, Math.max(total.scale(), 10), RoundingMode.HALF_UP); } } /** * <p> * Returns the average of all the numbers contained in the provided array. * </p> * * @param target the array of numbers * @return the average, as a BigDecimal */ public static BigDecimal avg(final double[] target) { Validate.notNull(target, "Cannot aggregate on null"); if (target.length == 0) { return null; } BigDecimal total = BigDecimal.ZERO; for (final double element : target) { total = total.add(toBigDecimal(element)); } final BigDecimal divisor = BigDecimal.valueOf(target.length); try { return total.divide(divisor); } catch (final ArithmeticException e) { // We will get an arithmetic exception if: 1. Divisor is zero, which is impossible; or 2. Division // returns a number with a non-terminating decimal expansion. In the latter case, we will set the // scale manually. return total.divide(divisor, Math.max(total.scale(), 10), RoundingMode.HALF_UP); } } private static BigDecimal toBigDecimal(final Number number) { Validate.notNull(number, "Cannot convert null to BigDecimal"); if (number instanceof BigDecimal) { return (BigDecimal) number; } if (number instanceof BigInteger) { return new BigDecimal((BigInteger)number); } if (number instanceof Byte || number instanceof Short || number instanceof Integer || number instanceof Long) { return BigDecimal.valueOf(number.longValue()); } return BigDecimal.valueOf(number.doubleValue()); } private static BigDecimal toBigDecimal(final byte number) { return BigDecimal.valueOf(number); } private static BigDecimal toBigDecimal(final short number) { return BigDecimal.valueOf(number); } private static BigDecimal toBigDecimal(final int number) { return BigDecimal.valueOf(number); } private static BigDecimal toBigDecimal(final long number) { return BigDecimal.valueOf(number); } private static BigDecimal toBigDecimal(final float number) { return BigDecimal.valueOf(number); } private static BigDecimal toBigDecimal(final double number) { return BigDecimal.valueOf(number); } private AggregateUtils() { super(); } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.test.api.form; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import org.camunda.bpm.engine.form.FormField; import org.camunda.bpm.engine.form.FormFieldValidationConstraint; import org.camunda.bpm.engine.form.TaskFormData; import org.camunda.bpm.engine.impl.form.type.EnumFormType; import org.camunda.bpm.engine.impl.form.validator.FormFieldValidationException; import org.camunda.bpm.engine.impl.form.validator.FormFieldValidatorException; import org.camunda.bpm.engine.impl.test.PluggableProcessEngineTestCase; import org.camunda.bpm.engine.runtime.ProcessInstance; import org.camunda.bpm.engine.task.Task; import org.camunda.bpm.engine.test.Deployment; /** * <p>Testcase verifying support for form matadata provided using * custom extension elements in BPMN Xml</p> * * @author Daniel Meyer * */ public class FormDataTest extends PluggableProcessEngineTestCase { @Deployment public void testGetFormFieldBasicProperties() { runtimeService.startProcessInstanceByKey("FormDataTest.testGetFormFieldBasicProperties"); Task task = taskService.createTaskQuery().singleResult(); TaskFormData taskFormData = formService.getTaskFormData(task.getId()); // validate properties: List<FormField> formFields = taskFormData.getFormFields(); // validate field 1 FormField formField1 = formFields.get(0); assertNotNull(formField1); assertEquals(formField1.getId(), "formField1"); assertEquals(formField1.getLabel(), "Form Field 1"); assertEquals("string", formField1.getTypeName()); assertNotNull(formField1.getType()); // validate field 2 FormField formField2 = formFields.get(1); assertNotNull(formField2); assertEquals(formField2.getId(), "formField2"); assertEquals(formField2.getLabel(), "Form Field 2"); assertEquals("boolean", formField2.getTypeName()); assertNotNull(formField1.getType()); } @Deployment public void testGetFormFieldBuiltInTypes() { runtimeService.startProcessInstanceByKey("FormDataTest.testGetFormFieldBuiltInTypes"); Task task = taskService.createTaskQuery().singleResult(); TaskFormData taskFormData = formService.getTaskFormData(task.getId()); // validate properties: List<FormField> formFields = taskFormData.getFormFields(); // validate string field FormField stringField = formFields.get(0); assertNotNull(stringField); assertEquals("string", stringField.getTypeName()); assertNotNull(stringField.getType()); assertEquals("someString", stringField.getDefaultValue()); // validate long field FormField longField = formFields.get(1); assertNotNull(longField); assertEquals("long", longField.getTypeName()); assertNotNull(longField.getType()); assertEquals(Long.valueOf(1l), longField.getDefaultValue()); // validate boolean field FormField booleanField = formFields.get(2); assertNotNull(booleanField); assertEquals("boolean", booleanField.getTypeName()); assertNotNull(booleanField.getType()); assertEquals(Boolean.valueOf(true), booleanField.getDefaultValue()); // validate date field FormField dateField = formFields.get(3); assertNotNull(dateField); assertEquals("date", dateField.getTypeName()); assertNotNull(dateField.getType()); Date dateValue = (Date) dateField.getDefaultValue(); Calendar calendar = Calendar.getInstance(); calendar.setTime(dateValue); assertEquals(10, calendar.get(Calendar.DAY_OF_MONTH)); assertEquals(Calendar.JANUARY, calendar.get(Calendar.MONTH)); assertEquals(2013, calendar.get(Calendar.YEAR)); // validate enum field FormField enumField = formFields.get(4); assertNotNull(enumField); assertEquals("enum", enumField.getTypeName()); assertNotNull(enumField.getType()); EnumFormType enumFormType = (EnumFormType) enumField.getType(); Map<String, String> values = enumFormType.getValues(); assertEquals("A", values.get("a")); assertEquals("B", values.get("b")); assertEquals("C", values.get("c")); } @Deployment public void testGetFormFieldProperties() { runtimeService.startProcessInstanceByKey("FormDataTest.testGetFormFieldProperties"); Task task = taskService.createTaskQuery().singleResult(); TaskFormData taskFormData = formService.getTaskFormData(task.getId()); List<FormField> formFields = taskFormData.getFormFields(); FormField stringField = formFields.get(0); Map<String, String> properties = stringField.getProperties(); assertEquals("property1", properties.get("p1")); assertEquals("property2", properties.get("p2")); } @Deployment public void testGetFormFieldValidationConstraints() { runtimeService.startProcessInstanceByKey("FormDataTest.testGetFormFieldValidationConstraints"); Task task = taskService.createTaskQuery().singleResult(); TaskFormData taskFormData = formService.getTaskFormData(task.getId()); List<FormField> formFields = taskFormData.getFormFields(); FormField field1 = formFields.get(0); List<FormFieldValidationConstraint> validationConstraints = field1.getValidationConstraints(); FormFieldValidationConstraint constraint1 = validationConstraints.get(0); assertEquals("maxlength", constraint1.getName()); assertEquals("10", constraint1.getConfiguration()); FormFieldValidationConstraint constraint2 = validationConstraints.get(1); assertEquals("minlength", constraint2.getName()); assertEquals("5", constraint2.getConfiguration()); } @Deployment public void testFormFieldSubmit() { // valid submit ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("FormDataTest.testFormFieldSubmit"); Task task = taskService.createTaskQuery().singleResult(); Map<String, Object> formValues = new HashMap<String, Object>(); formValues.put("stringField", "12345"); formValues.put("longField", 9L); formValues.put("customField", "validValue"); formService.submitTaskForm(task.getId(), formValues); assertEquals(formValues, runtimeService.getVariables(processInstance.getId())); runtimeService.deleteProcessInstance(processInstance.getId(), "test complete"); runtimeService.startProcessInstanceByKey("FormDataTest.testFormFieldSubmit"); task = taskService.createTaskQuery().singleResult(); // invalid submit 1 formValues = new HashMap<String, Object>(); formValues.put("stringField", "1234"); formValues.put("longField", 9L); formValues.put("customField", "validValue"); try { formService.submitTaskForm(task.getId(), formValues); fail(); } catch (FormFieldValidatorException e) { assertEquals(e.getName(), "minlength"); } // invalid submit 2 formValues = new HashMap<String, Object>(); formValues.put("customFieldWithValidationDetails", "C"); try { formService.submitTaskForm(task.getId(), formValues); fail(); } catch (FormFieldValidatorException e) { assertEquals(e.getName(), "validator"); assertEquals(e.getId(), "customFieldWithValidationDetails"); assertTrue(e.getCause() instanceof FormFieldValidationException); FormFieldValidationException exception = (FormFieldValidationException) e.getCause(); assertEquals(exception.getDetail(), "EXPIRED"); } } @Deployment public void testMissingFormVariables() { // given process definition with defined form varaibles // when start process instance with no variables ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("date-form-property-test"); Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); // then taskFormData contains form variables with null as values TaskFormData taskFormData = formService.getTaskFormData(task.getId()); assertNotNull(taskFormData); assertEquals(5, taskFormData.getFormFields().size()); for (FormField field : taskFormData.getFormFields()) { assertNotNull(field); assertNull(field.getValue().getValue()); } } @Deployment(resources = "org/camunda/bpm/engine/test/api/form/FormDataTest.testDoubleQuotesAreEscapedInGeneratedTaskForms.bpmn20.xml") public void testDoubleQuotesAreEscapedInGeneratedTaskForms() { // given HashMap<String, Object> variables = new HashMap<String, Object>(); variables.put("foo", "This is a \"Test\" message!"); ProcessInstance pi = runtimeService.startProcessInstanceByKey("oneTaskProcess", variables); Task taskWithForm = taskService.createTaskQuery().singleResult(); // when Object renderedStartForm = formService.getRenderedTaskForm(taskWithForm.getId()); assertTrue(renderedStartForm instanceof String); // then String renderedForm = (String) renderedStartForm; String expectedFormValueWithEscapedQuotes = "This is a &quot;Test&quot; message!"; assertTrue(renderedForm.contains(expectedFormValueWithEscapedQuotes)); } }
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.ex.chips; import android.net.Uri; import android.provider.ContactsContract.CommonDataKinds.Email; import android.provider.ContactsContract.DisplayNameSources; import android.text.util.Rfc822Token; import android.text.util.Rfc822Tokenizer; /** * Represents one entry inside recipient auto-complete list. */ public class RecipientEntry { /* package */ static final int INVALID_CONTACT = -1; /** * A GENERATED_CONTACT is one that was created based entirely on * information passed in to the RecipientEntry from an external source * that is not a real contact. */ /* package */ static final int GENERATED_CONTACT = -2; /** Used when {@link #mDestinationType} is invalid and thus shouldn't be used for display. */ /* package */ static final int INVALID_DESTINATION_TYPE = -1; public static final int ENTRY_TYPE_PERSON = 0; public static final int ENTRY_TYPE_SIZE = 1; private final int mEntryType; /** * True when this entry is the first entry in a group, which should have a photo and display * name, while the second or later entries won't. */ private boolean mIsFirstLevel; private final String mDisplayName; /** Destination for this contact entry. Would be an email address or a phone number. */ private final String mDestination; /** Type of the destination like {@link Email#TYPE_HOME} */ private final int mDestinationType; /** * Label of the destination which will be used when type was {@link Email#TYPE_CUSTOM}. * Can be null when {@link #mDestinationType} is {@link #INVALID_DESTINATION_TYPE}. */ private final String mDestinationLabel; /** ID for the person */ private final long mContactId; /** ID for the directory this contact came from, or <code>null</code> */ private final Long mDirectoryId; /** ID for the destination */ private final long mDataId; private final boolean mIsDivider; private final Uri mPhotoThumbnailUri; private boolean mIsValid; /** * This can be updated after this object being constructed, when the photo is fetched * from remote directories. */ private byte[] mPhotoBytes; /** See {@link ContactsContract.Contacts#LOOKUP_KEY} */ private final String mLookupKey; private RecipientEntry(int entryType, String displayName, String destination, int destinationType, String destinationLabel, long contactId, Long directoryId, long dataId, Uri photoThumbnailUri, boolean isFirstLevel, boolean isValid, String lookupKey) { mEntryType = entryType; mIsFirstLevel = isFirstLevel; mDisplayName = displayName; mDestination = destination; mDestinationType = destinationType; mDestinationLabel = destinationLabel; mContactId = contactId; mDirectoryId = directoryId; mDataId = dataId; mPhotoThumbnailUri = photoThumbnailUri; mPhotoBytes = null; mIsDivider = false; mIsValid = isValid; mLookupKey = lookupKey; } public boolean isValid() { return mIsValid; } /** * Determine if this was a RecipientEntry created from recipient info or * an entry from contacts. */ public static boolean isCreatedRecipient(long id) { return id == RecipientEntry.INVALID_CONTACT || id == RecipientEntry.GENERATED_CONTACT; } /** * Construct a RecipientEntry from just an address that has been entered. * This address has not been resolved to a contact and therefore does not * have a contact id or photo. */ public static RecipientEntry constructFakeEntry(final String address, final boolean isValid) { final Rfc822Token[] tokens = Rfc822Tokenizer.tokenize(address); final String tokenizedAddress = tokens.length > 0 ? tokens[0].getAddress() : address; return new RecipientEntry(ENTRY_TYPE_PERSON, tokenizedAddress, tokenizedAddress, INVALID_DESTINATION_TYPE, null, INVALID_CONTACT, null /* directoryId */, INVALID_CONTACT, null, true, isValid, null /* lookupKey */); } /** * Construct a RecipientEntry from just a phone number. */ public static RecipientEntry constructFakePhoneEntry(final String phoneNumber, final boolean isValid) { return new RecipientEntry(ENTRY_TYPE_PERSON, phoneNumber, phoneNumber, INVALID_DESTINATION_TYPE, null, INVALID_CONTACT, null /* directoryId */, INVALID_CONTACT, null, true, isValid, null /* lookupKey */); } /** * @return the display name for the entry. If the display name source is larger than * {@link DisplayNameSources#PHONE} we use the contact's display name, but if not, * i.e. the display name came from an email address or a phone number, we don't use it * to avoid confusion and just use the destination instead. */ private static String pickDisplayName(int displayNameSource, String displayName, String destination) { return (displayNameSource > DisplayNameSources.PHONE) ? displayName : destination; } /** * Construct a RecipientEntry from just an address that has been entered * with both an associated display name. This address has not been resolved * to a contact and therefore does not have a contact id or photo. */ public static RecipientEntry constructGeneratedEntry(String display, String address, boolean isValid) { return new RecipientEntry(ENTRY_TYPE_PERSON, display, address, INVALID_DESTINATION_TYPE, null, GENERATED_CONTACT, null /* directoryId */, GENERATED_CONTACT, null, true, isValid, null /* lookupKey */); } public static RecipientEntry constructTopLevelEntry(String displayName, int displayNameSource, String destination, int destinationType, String destinationLabel, long contactId, Long directoryId, long dataId, Uri photoThumbnailUri, boolean isValid, String lookupKey) { return new RecipientEntry(ENTRY_TYPE_PERSON, pickDisplayName(displayNameSource, displayName, destination), destination, destinationType, destinationLabel, contactId, directoryId, dataId, photoThumbnailUri, true, isValid, lookupKey); } public static RecipientEntry constructTopLevelEntry(String displayName, int displayNameSource, String destination, int destinationType, String destinationLabel, long contactId, Long directoryId, long dataId, String thumbnailUriAsString, boolean isValid, String lookupKey) { return new RecipientEntry(ENTRY_TYPE_PERSON, pickDisplayName(displayNameSource, displayName, destination), destination, destinationType, destinationLabel, contactId, directoryId, dataId, (thumbnailUriAsString != null ? Uri.parse(thumbnailUriAsString) : null), true, isValid, lookupKey); } public static RecipientEntry constructSecondLevelEntry(String displayName, int displayNameSource, String destination, int destinationType, String destinationLabel, long contactId, Long directoryId, long dataId, String thumbnailUriAsString, boolean isValid, String lookupKey) { return new RecipientEntry(ENTRY_TYPE_PERSON, pickDisplayName(displayNameSource, displayName, destination), destination, destinationType, destinationLabel, contactId, directoryId, dataId, (thumbnailUriAsString != null ? Uri.parse(thumbnailUriAsString) : null), false, isValid, lookupKey); } public int getEntryType() { return mEntryType; } public String getDisplayName() { return mDisplayName; } public String getDestination() { return mDestination; } public int getDestinationType() { return mDestinationType; } public String getDestinationLabel() { return mDestinationLabel; } public long getContactId() { return mContactId; } public Long getDirectoryId() { return mDirectoryId; } public long getDataId() { return mDataId; } public boolean isFirstLevel() { return mIsFirstLevel; } public Uri getPhotoThumbnailUri() { return mPhotoThumbnailUri; } /** This can be called outside main Looper thread. */ public synchronized void setPhotoBytes(byte[] photoBytes) { mPhotoBytes = photoBytes; } /** This can be called outside main Looper thread. */ public synchronized byte[] getPhotoBytes() { return mPhotoBytes; } public boolean isSeparator() { return mIsDivider; } public boolean isSelectable() { return mEntryType == ENTRY_TYPE_PERSON; } public String getLookupKey() { return mLookupKey; } @Override public String toString() { return mDisplayName + " <" + mDestination + ">, isValid=" + mIsValid; } }
package net.didion.jwnl.data.relationship; import net.didion.jwnl.JWNLException; import net.didion.jwnl.data.*; import net.didion.jwnl.data.list.PointerTargetNode; import net.didion.jwnl.data.list.PointerTargetNodeList; import net.didion.jwnl.data.list.PointerTargetTree; import net.didion.jwnl.data.list.PointerTargetTreeNode; import net.didion.jwnl.data.list.PointerTargetTreeNodeList; import java.util.*; public class RelationshipFinder { private static final int DEFAULT_ASYMMETRIC_SEARCH_DEPTH = Integer.MAX_VALUE; private static final int DEFAULT_SYMMETRIC_SEARCH_DEPTH = 2; private static final RelationshipFinder INSTANCE = new RelationshipFinder(); public static RelationshipFinder getInstance() { return INSTANCE; } private RelationshipFinder() { } /** * Looks at whether the target word is one of the words in one of the synsets * of the source word. * @return int the sense of the source word that contains the target word */ public int getImmediateRelationship(IndexWord sourceWord, IndexWord targetWord) throws JWNLException { Synset[] senses = sourceWord.getSenses(); String lemma = targetWord.getLemma(); for (int i = 0; i < senses.length; i++) { if (senses[i].containsWord(lemma)) { return i + 1; } } return -1; } /** * Find all relationships of type <var>type</var> between <var>sourceSynset</var> and <var>targetSynset</var>. * This method creates a symmetric or asymmetric relationship based on whether <var>type</var> is symmetric. */ public RelationshipList findRelationships( Synset sourceSynset, Synset targetSynset, PointerType type) throws JWNLException { return (type.isSymmetric()) ? findSymmetricRelationships(sourceSynset, targetSynset, type) : findAsymmetricRelationships(sourceSynset, targetSynset, type); } /** * Find all relationships of type <var>type</var> between <var>sourceSynset</var> and <var>targetSynset</var> * to depth <var>depth</var>. This method creates a symmetric or asymmetric relationship based on * whether <var>type</var> is symmetric. */ public RelationshipList findRelationships( Synset sourceSynset, Synset targetSynset, PointerType type, int depth) throws JWNLException { return (type.isSymmetric()) ? findSymmetricRelationships(sourceSynset, targetSynset, type, depth) : findAsymmetricRelationships(sourceSynset, targetSynset, type, depth); } /** * Finds the asymmetric relationship(s) between two words. A relationship is * asymmetric if its type is asymmetric (i.e. it's not its own inverse). */ private RelationshipList findAsymmetricRelationships( Synset sourceSynset, Synset targetSynset, PointerType type) throws JWNLException { return findAsymmetricRelationships(sourceSynset, targetSynset, type, DEFAULT_ASYMMETRIC_SEARCH_DEPTH); } /** * Finds the asymmetric relationship(s) between two words. A relationship is * asymmetric if its type is asymmetric (i.e. it's not its own inverse). */ private RelationshipList findAsymmetricRelationships( Synset sourceSynset, Synset targetSynset, PointerType type, int depth) throws JWNLException { // We run the reversal function on the trees to get linear (non-branching) // paths from the source word to its deepest ancestor (i.e. if there are // multiple relations from a single word anywhere in the path, the reversal // function will break them down into multiple, linear paths). PointerTargetNodeList[] sourceRelations = new PointerTargetTree( sourceSynset, PointerUtils.getInstance().makePointerTargetTreeList(sourceSynset, type, depth)).reverse(); PointerTargetNodeList[] targetRelations = new PointerTargetTree( targetSynset, PointerUtils.getInstance().makePointerTargetTreeList(targetSynset, type, depth)).reverse(); RelationshipList relationships = new RelationshipList(); // Do an exhaustive search for relationships for (int i = 0; i < sourceRelations.length; i++) { for (int j = 0; j < targetRelations.length; j++) { Relationship relationship = findAsymmetricRelationship( sourceRelations[i], targetRelations[j], type, sourceSynset, targetSynset); if (relationship != null) { relationships.add(relationship); } } } return relationships; } /** * Find a relationship between two asymmetric lists ordered from deepest * to shallowest ancestor. Each node has it's PointerType set to the kind of * relationship one need to follow to get from it to the next node in the list. * Take the dog/cat relationship. To get to carnivore, a hypernym relationship * must be used to get from dog to carnivore, but then a hyponym relationship * must be used to get from carnivore to cat. The list will look like this: * dog(hyper) -> canine(hyper) -> carnivore(hypo) -> feline(hypo) -> cat(hypo). * In this instance, cat's PointerType is meaningless, but is kept to facilitate * things like reversing the relationship (which just involves setting each node's * pointer type to the symmetric type of its current type. */ private Relationship findAsymmetricRelationship( PointerTargetNodeList sourceNodes, PointerTargetNodeList targetNodes, PointerType type, Synset sourceSynset, Synset targetSynset) { // If the deepest ancestors of the words are not the same, // then there is no relationship between the words. if (!sourceNodes.get(0).equals(targetNodes.get(0))) return null; PointerTargetNodeList relationship = new PointerTargetNodeList(); int targetStart = 0; int commonParentIndex = 0; for (int i = sourceNodes.size() - 1; i >= 0; i--) { PointerTargetNode testNode = (PointerTargetNode)sourceNodes.get(i); int idx = targetNodes.indexOf(testNode); if (idx >= 0) { targetStart = idx; break; } else { relationship.add(testNode.clone()); commonParentIndex++; } } for (int i = targetStart; i < targetNodes.size(); i++) { PointerTargetNode node = (PointerTargetNode)((PointerTargetNode)targetNodes.get(i)).clone(); node.setType(type.getSymmetricType()); relationship.add(node); } return new AsymmetricRelationship(type, relationship, commonParentIndex, sourceSynset, targetSynset); } /** * A symmetric relationship is one whose type is symmetric (i.e. is it's own * inverse. An example of a symmetric relationship is synonomy. */ private RelationshipList findSymmetricRelationships( Synset sourceSynset, Synset targetSynset, PointerType type) throws JWNLException { return findSymmetricRelationships(sourceSynset, targetSynset, type, DEFAULT_SYMMETRIC_SEARCH_DEPTH); } /** A symmetric relationship is one whose type is symmetric (i.e. is it's own inverse). */ private RelationshipList findSymmetricRelationships( final Synset sourceSynset, final Synset targetSynset, PointerType type, int depth) throws JWNLException { PointerTargetTree tree = new PointerTargetTree( sourceSynset, PointerUtils.getInstance().makePointerTargetTreeList(sourceSynset, type, null, depth, false)); PointerTargetTreeNodeList.Operation opr = new PointerTargetTreeNodeList.Operation() { public Object execute(PointerTargetTreeNode testNode) { if (targetSynset.equals(testNode.getPointerTarget())) { return testNode; } return null; } }; List l = tree.getAllMatches(opr); RelationshipList list = new RelationshipList(); for (int i = 0; i < l.size(); i++) { PointerTargetNodeList nodes = findSymmetricRelationship((PointerTargetTreeNode)l.get(i), type); list.add(new SymmetricRelationship(type, nodes, sourceSynset, targetSynset)); } return list; } /** * Build a relationsip from <var>node</var> back to it's root ancestor and * then reverse the list. */ private PointerTargetNodeList findSymmetricRelationship(PointerTargetTreeNode node, PointerType type) { PointerTargetNodeList list = new PointerTargetNodeList(); buildSymmetricRelationshipList(list, node); list = list.reverse(); // set the root's pointer type ((PointerTargetNode)list.get(0)).setType(type); return list; } /** Build the relationship. */ private void buildSymmetricRelationshipList(PointerTargetNodeList list, PointerTargetTreeNode node) { list.add(node.getPointerTarget(), node.getType()); if (node.getParent() != null) { buildSymmetricRelationshipList(list, node.getParent()); } } /* public RelationshipList findRelationships(Synset source, Synset target) { Map sourceNodes = new HashMap(); Map targetNodes = new HashMap(); Map matches = new HashMap(); Map sourceResults = expand(source, sourceNodes, null, false); Map targetResults = expand(target, targetNodes, null, true); findMatches(sourceResults, targetResults, matches); for (int i = 0; i < 10; i++) { sourceResults = expand(sourceResults, sourceNodes, false); targetResults = expand(targetResults, targetNodes, true); findMatches(sourceResults, targetResults, matches); } RelationshipList rl = new RelationshipList(); for (Iterator itr = matches.entrySet().iterator(); itr.hasNext();) { Map.Entry entry = (Map.Entry) itr.next(); Node sourceNode = (Node) entry.getKey(); Node targetNode = (Node) entry.getValue(); PointerTarget[] relationship = new PointerTarget[sourceNode.depth + targetNode.depth]; while (sourceNode != null) { relationship[sourceNode.depth] = sourceNode.ptr.getSource(); sourceNode = sourceNode.prev; } int targetDepth = targetNode.depth; while (targetNode != null) { relationship[sourceNode.depth + (targetDepth - targetNode.depth) + 1] = targetNode.ptr.getTarget(); targetNode = targetNode.prev; } rl.add(Relationship(null, new PointerTargetNodeList(relationship), source, target)); } return rl; } private Map expand(Synset s, Map nodes, Node parent, boolean reflexiveOnly) { Pointer[] ptrs = s.getPointers(); Map results = new HashMap(); for (int i = 0; i < ptrs.length; i++) { if (!reflexiveOnly || ptrs[i].getType().getSymmetricType() != null) { Long key = new Long(ptrs[i].getTargetOffset()); if (!nodes.containsKey(key)) { results.put(key, new Node(ptrs[i], parent)); } } } parent.expanded = true; nodes.putAll(results); return results; } private Map expand(Map expandNodes, Map allNodes, boolean reflexiveOnly) { Map results = new HashMap(); for (Iterator itr = expandNodes.values().iterator(); itr.hasNext();) { Node parent = (Node) itr.next(); results.putAll(expand(parent.ptr.getTarget(), allNodes, parent, reflexiveOnly)); } return results; } private void findMatches(Map sourceResults, Map targetResults, Map matches) { Set sourceSet = sourceResults.keySet(); Set targetSet = targetResults.keySet(); for (Iterator itr = sourceSet.iterator(); itr.hasNext();) { Long offset = (Long) itr.next(); if (targetSet.contains(offset)) { Node source = (Node) sourceResults.get(offset); Node target = (Node) targetResults.get(offset); matches.put(source, target); } } } private static class Node { Pointer ptr; Node prev; int depth; boolean expanded = false; public Node(Pointer ptr, Node prev) { this.ptr = ptr; this.prev = prev; this.depth = prev.depth + 1; } } */ }
// Generated from C:/cyberthinkers-dev/typescript-to-scala-js-facade/src/main/java/com/cyberthinkers/tools/generate\GenScalaFacadesFromTypescript.g4 by ANTLR 4.7 package com.cyberthinkers.tools.generate; import org.antlr.v4.runtime.tree.ParseTreeListener; /** * This interface defines a complete listener for a parse tree produced by * {@link GenScalaFacadesFromTypescriptParser}. */ public interface GenScalaFacadesFromTypescriptListener extends ParseTreeListener { /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typescriptAmbientDeclarations}. * @param ctx the parse tree */ void enterTypescriptAmbientDeclarations(GenScalaFacadesFromTypescriptParser.TypescriptAmbientDeclarationsContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typescriptAmbientDeclarations}. * @param ctx the parse tree */ void exitTypescriptAmbientDeclarations(GenScalaFacadesFromTypescriptParser.TypescriptAmbientDeclarationsContext ctx); /** * Enter a parse tree produced by the {@code declareModuleOrNamespace} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#declarationScriptElement}. * @param ctx the parse tree */ void enterDeclareModuleOrNamespace(GenScalaFacadesFromTypescriptParser.DeclareModuleOrNamespaceContext ctx); /** * Exit a parse tree produced by the {@code declareModuleOrNamespace} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#declarationScriptElement}. * @param ctx the parse tree */ void exitDeclareModuleOrNamespace(GenScalaFacadesFromTypescriptParser.DeclareModuleOrNamespaceContext ctx); /** * Enter a parse tree produced by the {@code declareGlobalModuleOrNamespace} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#declarationScriptElement}. * @param ctx the parse tree */ void enterDeclareGlobalModuleOrNamespace(GenScalaFacadesFromTypescriptParser.DeclareGlobalModuleOrNamespaceContext ctx); /** * Exit a parse tree produced by the {@code declareGlobalModuleOrNamespace} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#declarationScriptElement}. * @param ctx the parse tree */ void exitDeclareGlobalModuleOrNamespace(GenScalaFacadesFromTypescriptParser.DeclareGlobalModuleOrNamespaceContext ctx); /** * Enter a parse tree produced by the {@code declareInterface} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#declarationScriptElement}. * @param ctx the parse tree */ void enterDeclareInterface(GenScalaFacadesFromTypescriptParser.DeclareInterfaceContext ctx); /** * Exit a parse tree produced by the {@code declareInterface} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#declarationScriptElement}. * @param ctx the parse tree */ void exitDeclareInterface(GenScalaFacadesFromTypescriptParser.DeclareInterfaceContext ctx); /** * Enter a parse tree produced by the {@code declareExport} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#declarationScriptElement}. * @param ctx the parse tree */ void enterDeclareExport(GenScalaFacadesFromTypescriptParser.DeclareExportContext ctx); /** * Exit a parse tree produced by the {@code declareExport} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#declarationScriptElement}. * @param ctx the parse tree */ void exitDeclareExport(GenScalaFacadesFromTypescriptParser.DeclareExportContext ctx); /** * Enter a parse tree produced by the {@code declareImport} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#declarationScriptElement}. * @param ctx the parse tree */ void enterDeclareImport(GenScalaFacadesFromTypescriptParser.DeclareImportContext ctx); /** * Exit a parse tree produced by the {@code declareImport} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#declarationScriptElement}. * @param ctx the parse tree */ void exitDeclareImport(GenScalaFacadesFromTypescriptParser.DeclareImportContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#exportDef}. * @param ctx the parse tree */ void enterExportDef(GenScalaFacadesFromTypescriptParser.ExportDefContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#exportDef}. * @param ctx the parse tree */ void exitExportDef(GenScalaFacadesFromTypescriptParser.ExportDefContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#importDef}. * @param ctx the parse tree */ void enterImportDef(GenScalaFacadesFromTypescriptParser.ImportDefContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#importDef}. * @param ctx the parse tree */ void exitImportDef(GenScalaFacadesFromTypescriptParser.ImportDefContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#importName}. * @param ctx the parse tree */ void enterImportName(GenScalaFacadesFromTypescriptParser.ImportNameContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#importName}. * @param ctx the parse tree */ void exitImportName(GenScalaFacadesFromTypescriptParser.ImportNameContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#ambientModuleOrNamespace}. * @param ctx the parse tree */ void enterAmbientModuleOrNamespace(GenScalaFacadesFromTypescriptParser.AmbientModuleOrNamespaceContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#ambientModuleOrNamespace}. * @param ctx the parse tree */ void exitAmbientModuleOrNamespace(GenScalaFacadesFromTypescriptParser.AmbientModuleOrNamespaceContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#ambientModuleName}. * @param ctx the parse tree */ void enterAmbientModuleName(GenScalaFacadesFromTypescriptParser.AmbientModuleNameContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#ambientModuleName}. * @param ctx the parse tree */ void exitAmbientModuleName(GenScalaFacadesFromTypescriptParser.AmbientModuleNameContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#ambientItem}. * @param ctx the parse tree */ void enterAmbientItem(GenScalaFacadesFromTypescriptParser.AmbientItemContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#ambientItem}. * @param ctx the parse tree */ void exitAmbientItem(GenScalaFacadesFromTypescriptParser.AmbientItemContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#ambientBracesItem}. * @param ctx the parse tree */ void enterAmbientBracesItem(GenScalaFacadesFromTypescriptParser.AmbientBracesItemContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#ambientBracesItem}. * @param ctx the parse tree */ void exitAmbientBracesItem(GenScalaFacadesFromTypescriptParser.AmbientBracesItemContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#ambientStatement}. * @param ctx the parse tree */ void enterAmbientStatement(GenScalaFacadesFromTypescriptParser.AmbientStatementContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#ambientStatement}. * @param ctx the parse tree */ void exitAmbientStatement(GenScalaFacadesFromTypescriptParser.AmbientStatementContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#variableDeclaration}. * @param ctx the parse tree */ void enterVariableDeclaration(GenScalaFacadesFromTypescriptParser.VariableDeclarationContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#variableDeclaration}. * @param ctx the parse tree */ void exitVariableDeclaration(GenScalaFacadesFromTypescriptParser.VariableDeclarationContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#variableList}. * @param ctx the parse tree */ void enterVariableList(GenScalaFacadesFromTypescriptParser.VariableListContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#variableList}. * @param ctx the parse tree */ void exitVariableList(GenScalaFacadesFromTypescriptParser.VariableListContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#variableName}. * @param ctx the parse tree */ void enterVariableName(GenScalaFacadesFromTypescriptParser.VariableNameContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#variableName}. * @param ctx the parse tree */ void exitVariableName(GenScalaFacadesFromTypescriptParser.VariableNameContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#functionDeclaration}. * @param ctx the parse tree */ void enterFunctionDeclaration(GenScalaFacadesFromTypescriptParser.FunctionDeclarationContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#functionDeclaration}. * @param ctx the parse tree */ void exitFunctionDeclaration(GenScalaFacadesFromTypescriptParser.FunctionDeclarationContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#classDeclaration}. * @param ctx the parse tree */ void enterClassDeclaration(GenScalaFacadesFromTypescriptParser.ClassDeclarationContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#classDeclaration}. * @param ctx the parse tree */ void exitClassDeclaration(GenScalaFacadesFromTypescriptParser.ClassDeclarationContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#className}. * @param ctx the parse tree */ void enterClassName(GenScalaFacadesFromTypescriptParser.ClassNameContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#className}. * @param ctx the parse tree */ void exitClassName(GenScalaFacadesFromTypescriptParser.ClassNameContext ctx); /** * Enter a parse tree produced by the {@code ambientClassBodyElementConstructor} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#ambientClassBodyElement}. * @param ctx the parse tree */ void enterAmbientClassBodyElementConstructor(GenScalaFacadesFromTypescriptParser.AmbientClassBodyElementConstructorContext ctx); /** * Exit a parse tree produced by the {@code ambientClassBodyElementConstructor} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#ambientClassBodyElement}. * @param ctx the parse tree */ void exitAmbientClassBodyElementConstructor(GenScalaFacadesFromTypescriptParser.AmbientClassBodyElementConstructorContext ctx); /** * Enter a parse tree produced by the {@code ambientClassBodyElementProperty} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#ambientClassBodyElement}. * @param ctx the parse tree */ void enterAmbientClassBodyElementProperty(GenScalaFacadesFromTypescriptParser.AmbientClassBodyElementPropertyContext ctx); /** * Exit a parse tree produced by the {@code ambientClassBodyElementProperty} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#ambientClassBodyElement}. * @param ctx the parse tree */ void exitAmbientClassBodyElementProperty(GenScalaFacadesFromTypescriptParser.AmbientClassBodyElementPropertyContext ctx); /** * Enter a parse tree produced by the {@code ambientClassBodyElementIndex} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#ambientClassBodyElement}. * @param ctx the parse tree */ void enterAmbientClassBodyElementIndex(GenScalaFacadesFromTypescriptParser.AmbientClassBodyElementIndexContext ctx); /** * Exit a parse tree produced by the {@code ambientClassBodyElementIndex} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#ambientClassBodyElement}. * @param ctx the parse tree */ void exitAmbientClassBodyElementIndex(GenScalaFacadesFromTypescriptParser.AmbientClassBodyElementIndexContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#ambientConstructorDeclaration}. * @param ctx the parse tree */ void enterAmbientConstructorDeclaration(GenScalaFacadesFromTypescriptParser.AmbientConstructorDeclarationContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#ambientConstructorDeclaration}. * @param ctx the parse tree */ void exitAmbientConstructorDeclaration(GenScalaFacadesFromTypescriptParser.AmbientConstructorDeclarationContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#ambientPropertyMemberDeclaration}. * @param ctx the parse tree */ void enterAmbientPropertyMemberDeclaration(GenScalaFacadesFromTypescriptParser.AmbientPropertyMemberDeclarationContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#ambientPropertyMemberDeclaration}. * @param ctx the parse tree */ void exitAmbientPropertyMemberDeclaration(GenScalaFacadesFromTypescriptParser.AmbientPropertyMemberDeclarationContext ctx); /** * Enter a parse tree produced by the {@code optStaticDef} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#optStatic}. * @param ctx the parse tree */ void enterOptStaticDef(GenScalaFacadesFromTypescriptParser.OptStaticDefContext ctx); /** * Exit a parse tree produced by the {@code optStaticDef} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#optStatic}. * @param ctx the parse tree */ void exitOptStaticDef(GenScalaFacadesFromTypescriptParser.OptStaticDefContext ctx); /** * Enter a parse tree produced by the {@code optStaticNotDef} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#optStatic}. * @param ctx the parse tree */ void enterOptStaticNotDef(GenScalaFacadesFromTypescriptParser.OptStaticNotDefContext ctx); /** * Exit a parse tree produced by the {@code optStaticNotDef} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#optStatic}. * @param ctx the parse tree */ void exitOptStaticNotDef(GenScalaFacadesFromTypescriptParser.OptStaticNotDefContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#exportIdentifier}. * @param ctx the parse tree */ void enterExportIdentifier(GenScalaFacadesFromTypescriptParser.ExportIdentifierContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#exportIdentifier}. * @param ctx the parse tree */ void exitExportIdentifier(GenScalaFacadesFromTypescriptParser.ExportIdentifierContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#numericLiteral}. * @param ctx the parse tree */ void enterNumericLiteral(GenScalaFacadesFromTypescriptParser.NumericLiteralContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#numericLiteral}. * @param ctx the parse tree */ void exitNumericLiteral(GenScalaFacadesFromTypescriptParser.NumericLiteralContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeDeclaration}. * @param ctx the parse tree */ void enterTypeDeclaration(GenScalaFacadesFromTypescriptParser.TypeDeclarationContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeDeclaration}. * @param ctx the parse tree */ void exitTypeDeclaration(GenScalaFacadesFromTypescriptParser.TypeDeclarationContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeDef}. * @param ctx the parse tree */ void enterTypeDef(GenScalaFacadesFromTypescriptParser.TypeDefContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeDef}. * @param ctx the parse tree */ void exitTypeDef(GenScalaFacadesFromTypescriptParser.TypeDefContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeParameters}. * @param ctx the parse tree */ void enterTypeParameters(GenScalaFacadesFromTypescriptParser.TypeParametersContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeParameters}. * @param ctx the parse tree */ void exitTypeParameters(GenScalaFacadesFromTypescriptParser.TypeParametersContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeParameterList}. * @param ctx the parse tree */ void enterTypeParameterList(GenScalaFacadesFromTypescriptParser.TypeParameterListContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeParameterList}. * @param ctx the parse tree */ void exitTypeParameterList(GenScalaFacadesFromTypescriptParser.TypeParameterListContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeParameter}. * @param ctx the parse tree */ void enterTypeParameter(GenScalaFacadesFromTypescriptParser.TypeParameterContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeParameter}. * @param ctx the parse tree */ void exitTypeParameter(GenScalaFacadesFromTypescriptParser.TypeParameterContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#constraint}. * @param ctx the parse tree */ void enterConstraint(GenScalaFacadesFromTypescriptParser.ConstraintContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#constraint}. * @param ctx the parse tree */ void exitConstraint(GenScalaFacadesFromTypescriptParser.ConstraintContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeArguments}. * @param ctx the parse tree */ void enterTypeArguments(GenScalaFacadesFromTypescriptParser.TypeArgumentsContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeArguments}. * @param ctx the parse tree */ void exitTypeArguments(GenScalaFacadesFromTypescriptParser.TypeArgumentsContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeArgumentList}. * @param ctx the parse tree */ void enterTypeArgumentList(GenScalaFacadesFromTypescriptParser.TypeArgumentListContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeArgumentList}. * @param ctx the parse tree */ void exitTypeArgumentList(GenScalaFacadesFromTypescriptParser.TypeArgumentListContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#type}. * @param ctx the parse tree */ void enterType(GenScalaFacadesFromTypescriptParser.TypeContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#type}. * @param ctx the parse tree */ void exitType(GenScalaFacadesFromTypescriptParser.TypeContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#unnamedInterface}. * @param ctx the parse tree */ void enterUnnamedInterface(GenScalaFacadesFromTypescriptParser.UnnamedInterfaceContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#unnamedInterface}. * @param ctx the parse tree */ void exitUnnamedInterface(GenScalaFacadesFromTypescriptParser.UnnamedInterfaceContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#unionOrIntersectionOrPrimaryType}. * @param ctx the parse tree */ void enterUnionOrIntersectionOrPrimaryType(GenScalaFacadesFromTypescriptParser.UnionOrIntersectionOrPrimaryTypeContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#unionOrIntersectionOrPrimaryType}. * @param ctx the parse tree */ void exitUnionOrIntersectionOrPrimaryType(GenScalaFacadesFromTypescriptParser.UnionOrIntersectionOrPrimaryTypeContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#primaryOrArray}. * @param ctx the parse tree */ void enterPrimaryOrArray(GenScalaFacadesFromTypescriptParser.PrimaryOrArrayContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#primaryOrArray}. * @param ctx the parse tree */ void exitPrimaryOrArray(GenScalaFacadesFromTypescriptParser.PrimaryOrArrayContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#nestedType}. * @param ctx the parse tree */ void enterNestedType(GenScalaFacadesFromTypescriptParser.NestedTypeContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#nestedType}. * @param ctx the parse tree */ void exitNestedType(GenScalaFacadesFromTypescriptParser.NestedTypeContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#arrayDim}. * @param ctx the parse tree */ void enterArrayDim(GenScalaFacadesFromTypescriptParser.ArrayDimContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#arrayDim}. * @param ctx the parse tree */ void exitArrayDim(GenScalaFacadesFromTypescriptParser.ArrayDimContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#primaryType}. * @param ctx the parse tree */ void enterPrimaryType(GenScalaFacadesFromTypescriptParser.PrimaryTypeContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#primaryType}. * @param ctx the parse tree */ void exitPrimaryType(GenScalaFacadesFromTypescriptParser.PrimaryTypeContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeGuard}. * @param ctx the parse tree */ void enterTypeGuard(GenScalaFacadesFromTypescriptParser.TypeGuardContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeGuard}. * @param ctx the parse tree */ void exitTypeGuard(GenScalaFacadesFromTypescriptParser.TypeGuardContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#parenthesizedType}. * @param ctx the parse tree */ void enterParenthesizedType(GenScalaFacadesFromTypescriptParser.ParenthesizedTypeContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#parenthesizedType}. * @param ctx the parse tree */ void exitParenthesizedType(GenScalaFacadesFromTypescriptParser.ParenthesizedTypeContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeReference}. * @param ctx the parse tree */ void enterTypeReference(GenScalaFacadesFromTypescriptParser.TypeReferenceContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeReference}. * @param ctx the parse tree */ void exitTypeReference(GenScalaFacadesFromTypescriptParser.TypeReferenceContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#objectType}. * @param ctx the parse tree */ void enterObjectType(GenScalaFacadesFromTypescriptParser.ObjectTypeContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#objectType}. * @param ctx the parse tree */ void exitObjectType(GenScalaFacadesFromTypescriptParser.ObjectTypeContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeBody}. * @param ctx the parse tree */ void enterTypeBody(GenScalaFacadesFromTypescriptParser.TypeBodyContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeBody}. * @param ctx the parse tree */ void exitTypeBody(GenScalaFacadesFromTypescriptParser.TypeBodyContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeMemberList}. * @param ctx the parse tree */ void enterTypeMemberList(GenScalaFacadesFromTypescriptParser.TypeMemberListContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeMemberList}. * @param ctx the parse tree */ void exitTypeMemberList(GenScalaFacadesFromTypescriptParser.TypeMemberListContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeMember}. * @param ctx the parse tree */ void enterTypeMember(GenScalaFacadesFromTypescriptParser.TypeMemberContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeMember}. * @param ctx the parse tree */ void exitTypeMember(GenScalaFacadesFromTypescriptParser.TypeMemberContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#tupleType}. * @param ctx the parse tree */ void enterTupleType(GenScalaFacadesFromTypescriptParser.TupleTypeContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#tupleType}. * @param ctx the parse tree */ void exitTupleType(GenScalaFacadesFromTypescriptParser.TupleTypeContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#tupleTypeElements}. * @param ctx the parse tree */ void enterTupleTypeElements(GenScalaFacadesFromTypescriptParser.TupleTypeElementsContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#tupleTypeElements}. * @param ctx the parse tree */ void exitTupleTypeElements(GenScalaFacadesFromTypescriptParser.TupleTypeElementsContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#functionType}. * @param ctx the parse tree */ void enterFunctionType(GenScalaFacadesFromTypescriptParser.FunctionTypeContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#functionType}. * @param ctx the parse tree */ void exitFunctionType(GenScalaFacadesFromTypescriptParser.FunctionTypeContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#constructorType}. * @param ctx the parse tree */ void enterConstructorType(GenScalaFacadesFromTypescriptParser.ConstructorTypeContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#constructorType}. * @param ctx the parse tree */ void exitConstructorType(GenScalaFacadesFromTypescriptParser.ConstructorTypeContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeQuery}. * @param ctx the parse tree */ void enterTypeQuery(GenScalaFacadesFromTypescriptParser.TypeQueryContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeQuery}. * @param ctx the parse tree */ void exitTypeQuery(GenScalaFacadesFromTypescriptParser.TypeQueryContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#thisType}. * @param ctx the parse tree */ void enterThisType(GenScalaFacadesFromTypescriptParser.ThisTypeContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#thisType}. * @param ctx the parse tree */ void exitThisType(GenScalaFacadesFromTypescriptParser.ThisTypeContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#propertySignature}. * @param ctx the parse tree */ void enterPropertySignature(GenScalaFacadesFromTypescriptParser.PropertySignatureContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#propertySignature}. * @param ctx the parse tree */ void exitPropertySignature(GenScalaFacadesFromTypescriptParser.PropertySignatureContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeAnnotation}. * @param ctx the parse tree */ void enterTypeAnnotation(GenScalaFacadesFromTypescriptParser.TypeAnnotationContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#typeAnnotation}. * @param ctx the parse tree */ void exitTypeAnnotation(GenScalaFacadesFromTypescriptParser.TypeAnnotationContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#callSignature}. * @param ctx the parse tree */ void enterCallSignature(GenScalaFacadesFromTypescriptParser.CallSignatureContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#callSignature}. * @param ctx the parse tree */ void exitCallSignature(GenScalaFacadesFromTypescriptParser.CallSignatureContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#parameterList}. * @param ctx the parse tree */ void enterParameterList(GenScalaFacadesFromTypescriptParser.ParameterListContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#parameterList}. * @param ctx the parse tree */ void exitParameterList(GenScalaFacadesFromTypescriptParser.ParameterListContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#requiredParameterList}. * @param ctx the parse tree */ void enterRequiredParameterList(GenScalaFacadesFromTypescriptParser.RequiredParameterListContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#requiredParameterList}. * @param ctx the parse tree */ void exitRequiredParameterList(GenScalaFacadesFromTypescriptParser.RequiredParameterListContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#requiredParameter}. * @param ctx the parse tree */ void enterRequiredParameter(GenScalaFacadesFromTypescriptParser.RequiredParameterContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#requiredParameter}. * @param ctx the parse tree */ void exitRequiredParameter(GenScalaFacadesFromTypescriptParser.RequiredParameterContext ctx); /** * Enter a parse tree produced by the {@code publicModifier} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#accessibilityModifier}. * @param ctx the parse tree */ void enterPublicModifier(GenScalaFacadesFromTypescriptParser.PublicModifierContext ctx); /** * Exit a parse tree produced by the {@code publicModifier} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#accessibilityModifier}. * @param ctx the parse tree */ void exitPublicModifier(GenScalaFacadesFromTypescriptParser.PublicModifierContext ctx); /** * Enter a parse tree produced by the {@code privateModifier} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#accessibilityModifier}. * @param ctx the parse tree */ void enterPrivateModifier(GenScalaFacadesFromTypescriptParser.PrivateModifierContext ctx); /** * Exit a parse tree produced by the {@code privateModifier} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#accessibilityModifier}. * @param ctx the parse tree */ void exitPrivateModifier(GenScalaFacadesFromTypescriptParser.PrivateModifierContext ctx); /** * Enter a parse tree produced by the {@code protectedModifier} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#accessibilityModifier}. * @param ctx the parse tree */ void enterProtectedModifier(GenScalaFacadesFromTypescriptParser.ProtectedModifierContext ctx); /** * Exit a parse tree produced by the {@code protectedModifier} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#accessibilityModifier}. * @param ctx the parse tree */ void exitProtectedModifier(GenScalaFacadesFromTypescriptParser.ProtectedModifierContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#optionalParameterList}. * @param ctx the parse tree */ void enterOptionalParameterList(GenScalaFacadesFromTypescriptParser.OptionalParameterListContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#optionalParameterList}. * @param ctx the parse tree */ void exitOptionalParameterList(GenScalaFacadesFromTypescriptParser.OptionalParameterListContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#optionalParameter}. * @param ctx the parse tree */ void enterOptionalParameter(GenScalaFacadesFromTypescriptParser.OptionalParameterContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#optionalParameter}. * @param ctx the parse tree */ void exitOptionalParameter(GenScalaFacadesFromTypescriptParser.OptionalParameterContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#restParameter}. * @param ctx the parse tree */ void enterRestParameter(GenScalaFacadesFromTypescriptParser.RestParameterContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#restParameter}. * @param ctx the parse tree */ void exitRestParameter(GenScalaFacadesFromTypescriptParser.RestParameterContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#constructSignature}. * @param ctx the parse tree */ void enterConstructSignature(GenScalaFacadesFromTypescriptParser.ConstructSignatureContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#constructSignature}. * @param ctx the parse tree */ void exitConstructSignature(GenScalaFacadesFromTypescriptParser.ConstructSignatureContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#indexSignature}. * @param ctx the parse tree */ void enterIndexSignature(GenScalaFacadesFromTypescriptParser.IndexSignatureContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#indexSignature}. * @param ctx the parse tree */ void exitIndexSignature(GenScalaFacadesFromTypescriptParser.IndexSignatureContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#methodSignature}. * @param ctx the parse tree */ void enterMethodSignature(GenScalaFacadesFromTypescriptParser.MethodSignatureContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#methodSignature}. * @param ctx the parse tree */ void exitMethodSignature(GenScalaFacadesFromTypescriptParser.MethodSignatureContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#constExpression}. * @param ctx the parse tree */ void enterConstExpression(GenScalaFacadesFromTypescriptParser.ConstExpressionContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#constExpression}. * @param ctx the parse tree */ void exitConstExpression(GenScalaFacadesFromTypescriptParser.ConstExpressionContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#interfaceDeclaration}. * @param ctx the parse tree */ void enterInterfaceDeclaration(GenScalaFacadesFromTypescriptParser.InterfaceDeclarationContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#interfaceDeclaration}. * @param ctx the parse tree */ void exitInterfaceDeclaration(GenScalaFacadesFromTypescriptParser.InterfaceDeclarationContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#interfaceName}. * @param ctx the parse tree */ void enterInterfaceName(GenScalaFacadesFromTypescriptParser.InterfaceNameContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#interfaceName}. * @param ctx the parse tree */ void exitInterfaceName(GenScalaFacadesFromTypescriptParser.InterfaceNameContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#extendsClause}. * @param ctx the parse tree */ void enterExtendsClause(GenScalaFacadesFromTypescriptParser.ExtendsClauseContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#extendsClause}. * @param ctx the parse tree */ void exitExtendsClause(GenScalaFacadesFromTypescriptParser.ExtendsClauseContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#classOrInterfaceTypeList}. * @param ctx the parse tree */ void enterClassOrInterfaceTypeList(GenScalaFacadesFromTypescriptParser.ClassOrInterfaceTypeListContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#classOrInterfaceTypeList}. * @param ctx the parse tree */ void exitClassOrInterfaceTypeList(GenScalaFacadesFromTypescriptParser.ClassOrInterfaceTypeListContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#classHeritage}. * @param ctx the parse tree */ void enterClassHeritage(GenScalaFacadesFromTypescriptParser.ClassHeritageContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#classHeritage}. * @param ctx the parse tree */ void exitClassHeritage(GenScalaFacadesFromTypescriptParser.ClassHeritageContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#implementsClause}. * @param ctx the parse tree */ void enterImplementsClause(GenScalaFacadesFromTypescriptParser.ImplementsClauseContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#implementsClause}. * @param ctx the parse tree */ void exitImplementsClause(GenScalaFacadesFromTypescriptParser.ImplementsClauseContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#enumDeclaration}. * @param ctx the parse tree */ void enterEnumDeclaration(GenScalaFacadesFromTypescriptParser.EnumDeclarationContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#enumDeclaration}. * @param ctx the parse tree */ void exitEnumDeclaration(GenScalaFacadesFromTypescriptParser.EnumDeclarationContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#enumBody}. * @param ctx the parse tree */ void enterEnumBody(GenScalaFacadesFromTypescriptParser.EnumBodyContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#enumBody}. * @param ctx the parse tree */ void exitEnumBody(GenScalaFacadesFromTypescriptParser.EnumBodyContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#enumMemberList}. * @param ctx the parse tree */ void enterEnumMemberList(GenScalaFacadesFromTypescriptParser.EnumMemberListContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#enumMemberList}. * @param ctx the parse tree */ void exitEnumMemberList(GenScalaFacadesFromTypescriptParser.EnumMemberListContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#enumMember}. * @param ctx the parse tree */ void enterEnumMember(GenScalaFacadesFromTypescriptParser.EnumMemberContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#enumMember}. * @param ctx the parse tree */ void exitEnumMember(GenScalaFacadesFromTypescriptParser.EnumMemberContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#enumValue}. * @param ctx the parse tree */ void enterEnumValue(GenScalaFacadesFromTypescriptParser.EnumValueContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#enumValue}. * @param ctx the parse tree */ void exitEnumValue(GenScalaFacadesFromTypescriptParser.EnumValueContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#identifierPath}. * @param ctx the parse tree */ void enterIdentifierPath(GenScalaFacadesFromTypescriptParser.IdentifierPathContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#identifierPath}. * @param ctx the parse tree */ void exitIdentifierPath(GenScalaFacadesFromTypescriptParser.IdentifierPathContext ctx); /** * Enter a parse tree produced by the {@code optionalModifier} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#optionalParam}. * @param ctx the parse tree */ void enterOptionalModifier(GenScalaFacadesFromTypescriptParser.OptionalModifierContext ctx); /** * Exit a parse tree produced by the {@code optionalModifier} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#optionalParam}. * @param ctx the parse tree */ void exitOptionalModifier(GenScalaFacadesFromTypescriptParser.OptionalModifierContext ctx); /** * Enter a parse tree produced by the {@code requiredParam} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#optionalParam}. * @param ctx the parse tree */ void enterRequiredParam(GenScalaFacadesFromTypescriptParser.RequiredParamContext ctx); /** * Exit a parse tree produced by the {@code requiredParam} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#optionalParam}. * @param ctx the parse tree */ void exitRequiredParam(GenScalaFacadesFromTypescriptParser.RequiredParamContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#optionalBindingIdentifier}. * @param ctx the parse tree */ void enterOptionalBindingIdentifier(GenScalaFacadesFromTypescriptParser.OptionalBindingIdentifierContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#optionalBindingIdentifier}. * @param ctx the parse tree */ void exitOptionalBindingIdentifier(GenScalaFacadesFromTypescriptParser.OptionalBindingIdentifierContext ctx); /** * Enter a parse tree produced by the {@code basicIdentifier} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#bindingIdentifier}. * @param ctx the parse tree */ void enterBasicIdentifier(GenScalaFacadesFromTypescriptParser.BasicIdentifierContext ctx); /** * Exit a parse tree produced by the {@code basicIdentifier} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#bindingIdentifier}. * @param ctx the parse tree */ void exitBasicIdentifier(GenScalaFacadesFromTypescriptParser.BasicIdentifierContext ctx); /** * Enter a parse tree produced by the {@code stringLiteralIdentifier} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#bindingIdentifier}. * @param ctx the parse tree */ void enterStringLiteralIdentifier(GenScalaFacadesFromTypescriptParser.StringLiteralIdentifierContext ctx); /** * Exit a parse tree produced by the {@code stringLiteralIdentifier} * labeled alternative in {@link GenScalaFacadesFromTypescriptParser#bindingIdentifier}. * @param ctx the parse tree */ void exitStringLiteralIdentifier(GenScalaFacadesFromTypescriptParser.StringLiteralIdentifierContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#identifier}. * @param ctx the parse tree */ void enterIdentifier(GenScalaFacadesFromTypescriptParser.IdentifierContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#identifier}. * @param ctx the parse tree */ void exitIdentifier(GenScalaFacadesFromTypescriptParser.IdentifierContext ctx); /** * Enter a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#lineEnd}. * @param ctx the parse tree */ void enterLineEnd(GenScalaFacadesFromTypescriptParser.LineEndContext ctx); /** * Exit a parse tree produced by {@link GenScalaFacadesFromTypescriptParser#lineEnd}. * @param ctx the parse tree */ void exitLineEnd(GenScalaFacadesFromTypescriptParser.LineEndContext ctx); }
/*L * Copyright Ekagra Software Technologies Ltd. * Copyright SAIC * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cacore-sdk-pre411/LICENSE.txt for details. */ package test.gov.nih.nci.cacoresdk.domain.manytomany.bidirectional; import java.util.Collection; import java.util.Iterator; import gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Employee; import gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Project; import gov.nih.nci.system.applicationservice.ApplicationException; import gov.nih.nci.system.query.cql.CQLAssociation; import gov.nih.nci.system.query.cql.CQLAttribute; import gov.nih.nci.system.query.cql.CQLObject; import gov.nih.nci.system.query.cql.CQLPredicate; import gov.nih.nci.system.query.cql.CQLQuery; import test.gov.nih.nci.cacoresdk.SDKTestBase; public class M2MBidirectionalTest extends SDKTestBase { public static String getTestCaseName() { return "Many to Many Bidirectional Test Case"; } /** * Uses Nested Search Criteria for search * Verifies that the results are returned * Verifies size of the result set * Verifies that none of the attribute is null * * @throws ApplicationException */ public void testEntireObjectNestedSearch1() throws ApplicationException { Employee searchObject = new Employee(); Collection results = getApplicationService().search("gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Employee",searchObject ); assertNotNull(results); assertEquals(10,results.size()); for(Iterator i = results.iterator();i.hasNext();) { Employee result = (Employee)i.next(); assertNotNull(result); assertNotNull(result.getId()); assertNotNull(result.getName()); } } /** * Uses Nested Search Criteria for search * Verifies that the results are returned * Verifies size of the result set * Verifies that none of the attribute is null * * @throws ApplicationException */ public void testEntireObjectNestedSearch2() throws ApplicationException { Project searchObject = new Project(); Collection results = getApplicationService().search("gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Project",searchObject ); assertNotNull(results); assertEquals(10,results.size()); for(Iterator i = results.iterator();i.hasNext();) { Project result = (Project)i.next(); assertNotNull(result); assertNotNull(result.getId()); assertNotNull(result.getName()); } } /** * Uses Nested Search Criteria for search * Verifies that the results are returned * Verifies size of the result set * erifies that the associated object is null * * @throws ApplicationException */ public void testZeroAssociatedObjectsNestedSearch1() throws ApplicationException { Employee searchObject = new Employee(); searchObject.setId(new Integer(7)); Collection results = getApplicationService().search("gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Employee",searchObject ); assertNotNull(results); assertEquals(1,results.size()); Iterator i = results.iterator(); Employee result = (Employee)i.next(); assertNotNull(result); assertNotNull(result.getId()); assertNotNull(result.getName()); Collection projectCollection = result.getProjectCollection(); assertEquals(0,projectCollection.size()); } /** * Uses Nested Search Criteria for search to get associated object * Verifies that the results are returned * Verifies size of the result set is 0 * * @throws ApplicationException */ public void testZeroAssociatedObjectsNestedSearch2() throws ApplicationException { Employee searchObject = new Employee(); searchObject.setId(new Integer(7)); Collection results = getApplicationService().search("gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Project",searchObject ); assertNotNull(results); assertEquals(0,results.size()); } /** * Uses Nested Search Criteria for search * Verifies that the results are returned * Verifies size of the result set * Verifies that none of the attribute is null * Verifies that the associated object has required Id * * @throws ApplicationException */ public void testOneAssociatedObjectNestedSearch1() throws ApplicationException { Employee searchObject = new Employee(); searchObject.setId(new Integer(1)); Collection results = getApplicationService().search("gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Employee",searchObject ); assertNotNull(results); assertEquals(1,results.size()); Iterator i = results.iterator(); Employee result = (Employee)i.next(); assertNotNull(result); assertNotNull(result.getId()); assertNotNull(result.getName()); Collection projectCollection = result.getProjectCollection(); Iterator j = projectCollection.iterator(); Project project = (Project)j.next(); assertNotNull(project); assertNotNull(project); assertNotNull(project.getId()); assertNotNull(project.getName()); assertEquals(new Integer(1),project.getId()); Collection employeeCollection = project.getEmployeeCollection(); assertEquals(1,employeeCollection.size()); } /** * Uses Nested Search Criteria for search to get associated object * Verifies that the results are returned * Verifies size of the result set * Verifies that none of the attribute is null * Verified the Id attribute's value of the returned object * * @throws ApplicationException */ public void testOneAssociatedObjectNestedSearch2() throws ApplicationException { Employee searchObject = new Employee(); searchObject.setId(new Integer(1)); Collection results = getApplicationService().search("gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Project",searchObject ); assertNotNull(results); assertEquals(1,results.size()); Iterator i = results.iterator(); Project project = (Project)i.next(); assertNotNull(project); assertNotNull(project); assertNotNull(project.getId()); assertNotNull(project.getName()); assertEquals(new Integer(1),project.getId()); } /** * Uses Nested Search Criteria for search to get associated object * Verifies that the results are returned * Verifies size of the result set * Verifies that none of the attribute is null * Verified the Id attribute's value of the returned object * * @throws ApplicationException */ public void testOneAssociatedObjectNestedSearch3() throws ApplicationException { Project searchObject = new Project(); searchObject.setId(new Integer(1)); Collection results = getApplicationService().search("gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Employee",searchObject ); assertNotNull(results); assertEquals(1,results.size()); Iterator i = results.iterator(); Employee employee = (Employee)i.next(); assertNotNull(employee); assertNotNull(employee.getId()); assertNotNull(employee.getName()); assertEquals(new Integer(1),employee.getId()); } /** * Uses CQL Criteria for search * Verifies that the results are returned * Verifies size of the result set * Verifies that none of the attribute is null * Verifies that the associated object has required Id * * @throws ApplicationException */ public void testOneAssociatedObjectCQL1() throws ApplicationException { CQLQuery cqlQuery = new CQLQuery(); CQLObject target = new CQLObject(); CQLAssociation association = new CQLAssociation(); association.setName("gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Employee"); association.setAttribute(new CQLAttribute("id",CQLPredicate.EQUAL_TO,"4")); association.setTargetRoleName("employeeCollection"); target.setName("gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Project"); target.setAssociation(association); cqlQuery.setTarget(target); Collection results = getApplicationService().query(cqlQuery,"gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Project"); assertNotNull(results); assertEquals(2,results.size()); for(Iterator i = results.iterator();i.hasNext();) { Project project = (Project)i.next(); assertNotNull(project); assertNotNull(project.getId()); assertNotNull(project.getName()); assertEquals(true,project.getId().intValue()>1); } } /** * Uses CQL Criteria for search * Verifies that the results are returned * Verifies size of the result set * Verifies that none of the attribute is null * Verifies that the associated object has required Id * * @throws ApplicationException */ public void testOneAssociatedObjectCQL2() throws ApplicationException { CQLQuery cqlQuery = new CQLQuery(); CQLObject target = new CQLObject(); CQLAssociation association = new CQLAssociation(); association.setName("gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Project"); association.setAttribute(new CQLAttribute("id",CQLPredicate.EQUAL_TO,"4")); association.setTargetRoleName("projectCollection"); target.setName("gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Employee"); target.setAssociation(association); cqlQuery.setTarget(target); Collection results = getApplicationService().query(cqlQuery,"gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Employee"); assertNotNull(results); assertEquals(1,results.size()); Iterator i = results.iterator(); Employee employee = (Employee)i.next(); assertNotNull(employee); assertNotNull(employee); assertNotNull(employee.getId()); assertNotNull(employee.getName()); assertEquals(new Integer(4),employee.getId()); } /** * Uses CQL Criteria for search * Verifies that the results are returned * Verifies size of the result set is 0 * * @throws ApplicationException */ public void testZeroAssociatedObjectCQL() throws ApplicationException { CQLQuery cqlQuery = new CQLQuery(); CQLObject target = new CQLObject(); CQLAssociation association = new CQLAssociation(); association.setName("gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Employee"); association.setAttribute(new CQLAttribute("id",CQLPredicate.EQUAL_TO,"7")); association.setTargetRoleName("employeeCollection"); target.setName("gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Project"); target.setAssociation(association); cqlQuery.setTarget(target); Collection results = getApplicationService().query(cqlQuery,"gov.nih.nci.cacoresdk.domain.manytomany.bidirectional.Project"); assertNotNull(results); assertEquals(0,results.size()); } }
/* * [The "BSD license"] * Copyright (c) 2012 Terence Parr * Copyright (c) 2012 Sam Harwell * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.antlr.v4.test; import org.antlr.v4.runtime.ANTLRInputStream; import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.atn.ATN; import org.antlr.v4.runtime.atn.ATNState; import org.antlr.v4.runtime.misc.Utils; import org.antlr.v4.tool.DOTGenerator; import org.antlr.v4.tool.LexerGrammar; import org.junit.Test; import java.util.List; import static org.junit.Assert.*; /** * Lexer rules are little quirky when it comes to wildcards. Problem * stems from the fact that we want the longest match to win among * several rules and even within a rule. However, that conflicts * with the notion of non-greedy, which by definition tries to match * the fewest possible. During ATN construction, non-greedy loops * have their entry and exit branches reversed so that the ATN * simulator will see the exit branch 1st, giving it a priority. The * 1st path to the stop state kills any other paths for that rule * that begin with the wildcard. In general, this does everything we * want, but occasionally there are some quirks as you'll see from * the tests below. */ public class TestATNLexerInterpreter extends BaseTest { @Test public void testLexerTwoRules() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "A : 'a' ;\n" + "B : 'b' ;\n"); String expecting = "A, B, A, B, EOF"; checkLexerMatches(lg, "abab", expecting); } @Test public void testShortLongRule() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "A : 'xy'\n" + " | 'xyz'\n" + // this alt is preferred since there are no non-greedy configs " ;\n" + "Z : 'z'\n" + " ;\n"); checkLexerMatches(lg, "xy", "A, EOF"); checkLexerMatches(lg, "xyz", "A, EOF"); } @Test public void testShortLongRule2() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "A : 'xyz'\n" + // make sure nongreedy mech cut off doesn't kill this alt " | 'xy'\n" + " ;\n"); checkLexerMatches(lg, "xy", "A, EOF"); checkLexerMatches(lg, "xyz", "A, EOF"); } @Test public void testWildOnEndFirstAlt() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "A : 'xy' .\n" + // should pursue '.' since xyz hits stop first, before 2nd alt " | 'xy'\n" + " ;\n" + "Z : 'z'\n" + " ;\n"); checkLexerMatches(lg, "xy", "A, EOF"); checkLexerMatches(lg, "xyz", "A, EOF"); } @Test public void testWildOnEndLastAlt() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "A : 'xy'\n" + " | 'xy' .\n" + // this alt is preferred since there are no non-greedy configs " ;\n" + "Z : 'z'\n" + " ;\n"); checkLexerMatches(lg, "xy", "A, EOF"); checkLexerMatches(lg, "xyz", "A, EOF"); } @Test public void testWildcardNonQuirkWhenSplitBetweenTwoRules() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "A : 'xy' ;\n" + "B : 'xy' . 'z' ;\n"); checkLexerMatches(lg, "xy", "A, EOF"); checkLexerMatches(lg, "xyqz", "B, EOF"); } @Test public void testLexerLoops() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "INT : '0'..'9'+ ;\n" + "ID : 'a'..'z'+ ;\n"); String expecting = "ID, INT, ID, INT, EOF"; checkLexerMatches(lg, "a34bde3", expecting); } @Test public void testLexerNotSet() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "ID : ~('a'|'b')\n ;"); String expecting = "ID, EOF"; checkLexerMatches(lg, "c", expecting); } @Test public void testLexerKeywordIDAmbiguity() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "KEND : 'end' ;\n" + "ID : 'a'..'z'+ ;\n" + "WS : (' '|'\\n')+ ;"); String expecting = "ID, EOF"; //checkLexerMatches(lg, "e", expecting); expecting = "KEND, EOF"; checkLexerMatches(lg, "end", expecting); expecting = "ID, EOF"; checkLexerMatches(lg, "ending", expecting); expecting = "ID, WS, KEND, WS, ID, EOF"; checkLexerMatches(lg, "a end bcd", expecting); } @Test public void testLexerRuleRef() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "INT : DIGIT+ ;\n" + "fragment DIGIT : '0'..'9' ;\n" + "WS : (' '|'\\n')+ ;"); String expecting = "INT, WS, INT, EOF"; checkLexerMatches(lg, "32 99", expecting); } @Test public void testRecursiveLexerRuleRef() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "CMT : '/*' (CMT | ~'*')+ '*/' ;\n" + "WS : (' '|'\\n')+ ;"); String expecting = "CMT, WS, CMT, EOF"; checkLexerMatches(lg, "/* ick */\n/* /*nested*/ */", expecting); } @Test public void testRecursiveLexerRuleRefWithWildcard() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "CMT : '/*' (CMT | .)*? '*/' ;\n" + "WS : (' '|'\\n')+ ;"); String expecting = "CMT, WS, CMT, WS, EOF"; checkLexerMatches(lg, "/* ick */\n" + "/* /* */\n" + "/* /*nested*/ */\n", expecting); } @Test public void testLexerWildcardGreedyLoopByDefault() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "CMT : '//' .* '\\n' ;\n"); String expecting = "CMT, EOF"; checkLexerMatches(lg, "//x\n//y\n", expecting); } @Test public void testLexerWildcardLoopExplicitNonGreedy() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "CMT : '//' .*? '\\n' ;\n"); String expecting = "CMT, CMT, EOF"; checkLexerMatches(lg, "//x\n//y\n", expecting); } @Test public void testLexerEscapeInString() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "STR : '[' ('~' ']' | .)* ']' ;\n"); checkLexerMatches(lg, "[a~]b]", "STR, EOF"); checkLexerMatches(lg, "[a]", "STR, EOF"); } @Test public void testLexerWildcardGreedyPlusLoopByDefault() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "CMT : '//' .+ '\\n' ;\n"); String expecting = "CMT, EOF"; checkLexerMatches(lg, "//x\n//y\n", expecting); } @Test public void testLexerWildcardExplicitNonGreedyPlusLoop() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "CMT : '//' .+? '\\n' ;\n"); String expecting = "CMT, CMT, EOF"; checkLexerMatches(lg, "//x\n//y\n", expecting); } // does not fail since ('*/')? can't match and have rule succeed @Test public void testLexerGreedyOptionalShouldWorkAsWeExpect() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "CMT : '/*' ('*/')? '*/' ;\n"); String expecting = "CMT, EOF"; checkLexerMatches(lg, "/**/", expecting); } @Test public void testGreedyBetweenRules() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "A : '<a>' ;\n" + "B : '<' .+ '>' ;\n"); String expecting = "B, EOF"; checkLexerMatches(lg, "<a><x>", expecting); } @Test public void testNonGreedyBetweenRules() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "A : '<a>' ;\n" + "B : '<' .+? '>' ;\n"); String expecting = "A, B, EOF"; checkLexerMatches(lg, "<a><x>", expecting); } @Test public void testEOFAtEndOfLineComment() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "CMT : '//' ~('\\n')* ;\n"); String expecting = "CMT, EOF"; checkLexerMatches(lg, "//x", expecting); } @Test public void testEOFAtEndOfLineComment2() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "CMT : '//' ~('\\n'|'\\r')* ;\n"); String expecting = "CMT, EOF"; checkLexerMatches(lg, "//x", expecting); } /** only positive sets like (EOF|'\n') can match EOF and not in wildcard or ~foo sets * EOF matches but does not advance cursor. */ @Test public void testEOFInSetAtEndOfLineComment() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "CMT : '//' .* (EOF|'\\n') ;\n"); String expecting = "CMT, EOF"; checkLexerMatches(lg, "//", expecting); } @Test public void testEOFSuffixInSecondRule() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "A : 'a' ;\n"+ // shorter than 'a' EOF, despite EOF being 0 width "B : 'a' EOF ;\n"); String expecting = "B, EOF"; checkLexerMatches(lg, "a", expecting); } @Test public void testEOFSuffixInFirstRule() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "A : 'a' EOF ;\n"+ "B : 'a';\n"); String expecting = "A, EOF"; checkLexerMatches(lg, "a", expecting); } @Test public void testEOFByItself() throws Exception { LexerGrammar lg = new LexerGrammar( "lexer grammar L;\n"+ "DONE : EOF ;\n"+ "A : 'a';\n"); String expecting = "A, DONE, EOF"; checkLexerMatches(lg, "a", expecting); } protected void checkLexerMatches(LexerGrammar lg, String inputString, String expecting) { ATN atn = createATN(lg, true); CharStream input = new ANTLRInputStream(inputString); ATNState startState = atn.modeNameToStartState.get("DEFAULT_MODE"); DOTGenerator dot = new DOTGenerator(lg); System.out.println(dot.getDOT(startState, true)); List<String> tokenTypes = getTokenTypes(lg, atn, input); String result = Utils.join(tokenTypes.iterator(), ", "); System.out.println(tokenTypes); assertEquals(expecting, result); } }
package metadata.etl.lhotse.extractor; import metadata.etl.lhotse.LzExecMessage; import metadata.etl.lhotse.LzTaskExecRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import wherehows.common.schemas.LineageRecord; import wherehows.common.utils.ProcessUtils; import wherehows.common.utils.XmlParser; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.LineNumberReader; import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Created by jiahuilliu on 5/17/17. */ public class MRSubmitLineageExtractor implements BaseLineageExtractor { private static final Logger logger = LoggerFactory.getLogger(MRSubmitLineageExtractor.class); @Override public List<LineageRecord> getLineageRecord(String logLocation, LzExecMessage message, int defaultDatabaseId, String logPath) { LzTaskExecRecord lzTaskExecRecord = message.lzTaskExecRecord; List<LineageRecord> lineageRecords = new ArrayList<>(); if (lzTaskExecRecord.flowId == null) return lineageRecords; try { logger.info("start to parse the logLocation: {}", logLocation); logger.info("start to parse the logPath: {}", logPath); File logfile = new File(logPath); BufferedReader br = new BufferedReader(new FileReader(logfile)); LineNumberReader reader = new LineNumberReader(br); reader.setLineNumber(60); String str = ""; // info from logfile in location String jobmark = ""; while ((jobmark = getJobMark(reader.readLine())) != null) { break; } String targetfile = "/mr-history/done/"; // get conf.xml by log info String realcom = ".*" + jobmark + "_conf.xml"; String targetRaw = this.execHdfsCommand(targetfile, realcom); // get conf.xml from hdfs to local String[] cmdsget = {"hdfs", "dfs", "-get", targetRaw, "/tmp"}; ArrayList<String> nullresults = ProcessUtils.exec(cmdsget); // analyse xml file from hdfs XmlParser xmlParser = new XmlParser("/tmp/" + targetRaw.substring(targetRaw.length() - 31)); logger.info("get info xml : {}", "/tmp/" + targetRaw.substring(targetRaw.length() - 31)); String sourcePathdf = xmlParser.getExtPropertyMr("configuration/property/mapreduce.input.fileinputformat.inputdir"); logger.info("the sourcePathdf is : {}", sourcePathdf); sourcePathdf=sourcePathdf.substring(18); String destPathdf = xmlParser.getExtPropertyMr("configuration/property/mapreduce.output.fileoutputformat.outputdir"); destPathdf=destPathdf.substring(18); logger.info("the destPathdf is : {}", destPathdf); // get all input files. ArrayList<String> sourcePathfiles = new ArrayList<>(); List<String> destPathfiles = new ArrayList<>(); try { if (isHdfsFile(sourcePathdf)){ sourcePathfiles.add(sourcePathdf); }else { sourcePathfiles.addAll(getSubFiles(sourcePathdf)); } } catch (Exception e){ e.printStackTrace(); } // get all output files. try { if (isHdfsFile(destPathdf)){ destPathfiles.add(destPathdf); }else { destPathfiles.addAll(getSubFiles(destPathdf)); } logger.info("the destPathfiles is : {}", destPathfiles.toArray().toString()); } catch (Exception e) { e.printStackTrace(); return lineageRecords; } // analyse file from locallog //XmlParser xmlParser2 = new XmlParser(logLocation); long flowExecId = lzTaskExecRecord.flowId; // common long taskId = Long.parseLong(lzTaskExecRecord.taskId); String taskName = lzTaskExecRecord.taskName; String flowPath = String.format("%s:%s", lzTaskExecRecord.projectName, lzTaskExecRecord.workflowName); String operation = "MR command"; long num = 0L; logger.info("start to create the source record!"); for (String sourcePath: sourcePathfiles) { LineageRecord lineageRecord = new LineageRecord(lzTaskExecRecord.appId, flowExecId, taskName, taskId); // set lineage record details. lineageRecord.setDatasetInfo(defaultDatabaseId, sourcePath, "hdfs"); lineageRecord.setOperationInfo("source", operation, num, num, num, num, lzTaskExecRecord.taskStartTime, lzTaskExecRecord.taskEndTime, flowPath); lineageRecord.setAbstractObjectName(sourcePath); lineageRecord.setFullObjectName(sourcePath); logger.info("the source record is: {}", lineageRecord.toDatabaseValue()); lineageRecords.add(lineageRecord); } logger.info("start to create the target record!"); for (String destFilepath: destPathfiles) { LineageRecord lineageRecord = new LineageRecord(lzTaskExecRecord.appId, flowExecId, taskName, taskId); // set lineage record details. lineageRecord.setDatasetInfo(defaultDatabaseId, destFilepath, "hdfs"); lineageRecord.setOperationInfo("target", operation, num, num, num, num, lzTaskExecRecord.taskStartTime, lzTaskExecRecord.taskEndTime, flowPath); lineageRecord.setAbstractObjectName(destFilepath); lineageRecord.setFullObjectName(destFilepath); logger.info("the target record is: {}", lineageRecord.toDatabaseValue()); lineageRecords.add(lineageRecord); } } catch (Exception e) { e.printStackTrace(); logger.info("error happened in collecting lineage record."); } return lineageRecords; } private String getJobMark(String str) { Pattern pattern = Pattern.compile(".*+(job_\\d+_\\d+)"); Matcher RLine = pattern.matcher(str); String job_mark; if (RLine.find()) { job_mark = RLine.group(1); return job_mark; } return null; } private boolean findJobMark(String str, String com) { Pattern pattern = Pattern.compile(com); Matcher RLine = pattern.matcher(str); if (RLine.find()) { return true; } return false; } private String execHdfsCommand(String path, String com) { String[] cmds = {"hdfs", "dfs", "-lsr", path}; ArrayList<String> results = ProcessUtils.exec(cmds); if (results == null || results.size() == 0) { logger.error("process utils: no result get"); return null; } else { String raw = null; String targetRaw = null; String[] tmps; for (int i = 0; i < results.size(); i++) { raw = results.get(i); tmps = raw.split(" "); targetRaw = tmps[tmps.length - 1]; if (this.findJobMark(targetRaw, com)) { break; } } return targetRaw; } } private static boolean isHdfsFile(String path) throws Exception { String [] cmds = {"hdfs", "dfs", "-ls", path}; ArrayList<String> results = ProcessUtils.exec(cmds); // for debug logger.info("the process utils result: {}", results); if (results == null || results.size() == 0) { throw new Exception("getSubFiles: process utils no result get"); } else { String [] arg = results.get(results.size()-1).split("\\s+"); return arg.length == 8 && arg[7].equals(path); } } private static List<String> getSubFiles(String path) throws Exception { String [] cmds = {"hdfs", "dfs", "-ls", path}; ArrayList<String> results = ProcessUtils.exec(cmds); List<String> dataPaths = new ArrayList<>(); // for debug logger.info("the process utils result: {}", results); if (results == null || results.size() == 0) { throw new Exception("getSubFiles: process utils no result get"); } else { for (String str: results) { String [] arg = str.split("\\s+"); if (arg.length == 8 && !arg[4].equalsIgnoreCase("0") && !arg[7].startsWith("_")) { dataPaths.add(arg[7]); } } } return dataPaths; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.mapreduce; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.PrintStream; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.NavigableMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeepDeletedCells; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterBase; import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.mapreduce.Import.KeyValueImporter; import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.LauncherSecurityManager; import org.apache.hadoop.mapreduce.Mapper.Context; import org.apache.hadoop.util.ToolRunner; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; /** * Tests the table import and table export MR job functionality */ @Category({VerySlowMapReduceTests.class, MediumTests.class}) public class TestImportExport { private static final Log LOG = LogFactory.getLog(TestImportExport.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final byte[] ROW1 = Bytes.toBytesBinary("\\x32row1"); private static final byte[] ROW2 = Bytes.toBytesBinary("\\x32row2"); private static final byte[] ROW3 = Bytes.toBytesBinary("\\x32row3"); private static final String FAMILYA_STRING = "a"; private static final String FAMILYB_STRING = "b"; private static final byte[] FAMILYA = Bytes.toBytes(FAMILYA_STRING); private static final byte[] FAMILYB = Bytes.toBytes(FAMILYB_STRING); private static final byte[] QUAL = Bytes.toBytes("q"); private static final String OUTPUT_DIR = "outputdir"; private static String FQ_OUTPUT_DIR; private static final String EXPORT_BATCH_SIZE = "100"; private static long now = System.currentTimeMillis(); @BeforeClass public static void beforeClass() throws Exception { // Up the handlers; this test needs more than usual. UTIL.getConfiguration().setInt(HConstants.REGION_SERVER_HIGH_PRIORITY_HANDLER_COUNT, 10); UTIL.startMiniCluster(); FQ_OUTPUT_DIR = new Path(OUTPUT_DIR).makeQualified(FileSystem.get(UTIL.getConfiguration())).toString(); } @AfterClass public static void afterClass() throws Exception { UTIL.shutdownMiniCluster(); } @Rule public final TestName name = new TestName(); @Before public void announce() { LOG.info("Running " + name.getMethodName()); } @Before @After public void cleanup() throws Exception { FileSystem fs = FileSystem.get(UTIL.getConfiguration()); fs.delete(new Path(OUTPUT_DIR), true); } /** * Runs an export job with the specified command line args * @param args * @return true if job completed successfully * @throws IOException * @throws InterruptedException * @throws ClassNotFoundException */ boolean runExport(String[] args) throws Exception { // need to make a copy of the configuration because to make sure different temp dirs are used. int status = ToolRunner.run(new Configuration(UTIL.getConfiguration()), new Export(), args); return status == 0; } /** * Runs an import job with the specified command line args * @param args * @return true if job completed successfully * @throws IOException * @throws InterruptedException * @throws ClassNotFoundException */ boolean runImport(String[] args) throws Exception { // need to make a copy of the configuration because to make sure different temp dirs are used. int status = ToolRunner.run(new Configuration(UTIL.getConfiguration()), new Import(), args); return status == 0; } /** * Test simple replication case with column mapping * @throws Exception */ @Test public void testSimpleCase() throws Exception { try (Table t = UTIL.createTable(TableName.valueOf(name.getMethodName()), FAMILYA, 3);) { Put p = new Put(ROW1); p.addColumn(FAMILYA, QUAL, now, QUAL); p.addColumn(FAMILYA, QUAL, now + 1, QUAL); p.addColumn(FAMILYA, QUAL, now + 2, QUAL); t.put(p); p = new Put(ROW2); p.addColumn(FAMILYA, QUAL, now, QUAL); p.addColumn(FAMILYA, QUAL, now + 1, QUAL); p.addColumn(FAMILYA, QUAL, now + 2, QUAL); t.put(p); p = new Put(ROW3); p.addColumn(FAMILYA, QUAL, now, QUAL); p.addColumn(FAMILYA, QUAL, now + 1, QUAL); p.addColumn(FAMILYA, QUAL, now + 2, QUAL); t.put(p); } String[] args = new String[] { // Only export row1 & row2. "-D" + TableInputFormat.SCAN_ROW_START + "=\\x32row1", "-D" + TableInputFormat.SCAN_ROW_STOP + "=\\x32row3", name.getMethodName(), FQ_OUTPUT_DIR, "1000", // max number of key versions per key to export }; assertTrue(runExport(args)); final String IMPORT_TABLE = name.getMethodName() + "import"; try (Table t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), FAMILYB, 3);) { args = new String[] { "-D" + Import.CF_RENAME_PROP + "="+FAMILYA_STRING+":"+FAMILYB_STRING, IMPORT_TABLE, FQ_OUTPUT_DIR }; assertTrue(runImport(args)); Get g = new Get(ROW1); g.setMaxVersions(); Result r = t.get(g); assertEquals(3, r.size()); g = new Get(ROW2); g.setMaxVersions(); r = t.get(g); assertEquals(3, r.size()); g = new Get(ROW3); r = t.get(g); assertEquals(0, r.size()); } } /** * Test export hbase:meta table * * @throws Exception */ @Test public void testMetaExport() throws Exception { String EXPORT_TABLE = TableName.META_TABLE_NAME.getNameAsString(); String[] args = new String[] { EXPORT_TABLE, FQ_OUTPUT_DIR, "1", "0", "0" }; assertTrue(runExport(args)); } /** * Test import data from 0.94 exported file * @throws Exception */ @Test public void testImport94Table() throws Exception { final String name = "exportedTableIn94Format"; URL url = TestImportExport.class.getResource(name); File f = new File(url.toURI()); if (!f.exists()) { LOG.warn("FAILED TO FIND " + f + "; skipping out on test"); return; } assertTrue(f.exists()); LOG.info("FILE=" + f); Path importPath = new Path(f.toURI()); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); fs.copyFromLocalFile(importPath, new Path(FQ_OUTPUT_DIR + Path.SEPARATOR + name)); String IMPORT_TABLE = name; try (Table t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), Bytes.toBytes("f1"), 3);) { String[] args = new String[] { "-Dhbase.import.version=0.94" , IMPORT_TABLE, FQ_OUTPUT_DIR }; assertTrue(runImport(args)); /* exportedTableIn94Format contains 5 rows ROW COLUMN+CELL r1 column=f1:c1, timestamp=1383766761171, value=val1 r2 column=f1:c1, timestamp=1383766771642, value=val2 r3 column=f1:c1, timestamp=1383766777615, value=val3 r4 column=f1:c1, timestamp=1383766785146, value=val4 r5 column=f1:c1, timestamp=1383766791506, value=val5 */ assertEquals(5, UTIL.countRows(t)); } } /** * Test export scanner batching */ @Test public void testExportScannerBatching() throws Exception { HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(name.getMethodName())); desc.addFamily(new HColumnDescriptor(FAMILYA) .setMaxVersions(1) ); UTIL.getAdmin().createTable(desc); try (Table t = UTIL.getConnection().getTable(desc.getTableName());) { Put p = new Put(ROW1); p.addColumn(FAMILYA, QUAL, now, QUAL); p.addColumn(FAMILYA, QUAL, now + 1, QUAL); p.addColumn(FAMILYA, QUAL, now + 2, QUAL); p.addColumn(FAMILYA, QUAL, now + 3, QUAL); p.addColumn(FAMILYA, QUAL, now + 4, QUAL); t.put(p); String[] args = new String[] { "-D" + Export.EXPORT_BATCHING + "=" + EXPORT_BATCH_SIZE, // added scanner batching arg. name.getMethodName(), FQ_OUTPUT_DIR }; assertTrue(runExport(args)); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); fs.delete(new Path(FQ_OUTPUT_DIR), true); } } @Test public void testWithDeletes() throws Exception { HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(name.getMethodName())); desc.addFamily(new HColumnDescriptor(FAMILYA) .setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE) ); UTIL.getAdmin().createTable(desc); try (Table t = UTIL.getConnection().getTable(desc.getTableName());) { Put p = new Put(ROW1); p.addColumn(FAMILYA, QUAL, now, QUAL); p.addColumn(FAMILYA, QUAL, now + 1, QUAL); p.addColumn(FAMILYA, QUAL, now + 2, QUAL); p.addColumn(FAMILYA, QUAL, now + 3, QUAL); p.addColumn(FAMILYA, QUAL, now + 4, QUAL); t.put(p); Delete d = new Delete(ROW1, now+3); t.delete(d); d = new Delete(ROW1); d.addColumns(FAMILYA, QUAL, now+2); t.delete(d); } String[] args = new String[] { "-D" + Export.RAW_SCAN + "=true", name.getMethodName(), FQ_OUTPUT_DIR, "1000", // max number of key versions per key to export }; assertTrue(runExport(args)); final String IMPORT_TABLE = name.getMethodName() + "import"; desc = new HTableDescriptor(TableName.valueOf(IMPORT_TABLE)); desc.addFamily(new HColumnDescriptor(FAMILYA) .setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE) ); UTIL.getAdmin().createTable(desc); try (Table t = UTIL.getConnection().getTable(desc.getTableName());) { args = new String[] { IMPORT_TABLE, FQ_OUTPUT_DIR }; assertTrue(runImport(args)); Scan s = new Scan(); s.setMaxVersions(); s.setRaw(true); ResultScanner scanner = t.getScanner(s); Result r = scanner.next(); Cell[] res = r.rawCells(); assertTrue(CellUtil.isDeleteFamily(res[0])); assertEquals(now+4, res[1].getTimestamp()); assertEquals(now+3, res[2].getTimestamp()); assertTrue(CellUtil.isDelete(res[3])); assertEquals(now+2, res[4].getTimestamp()); assertEquals(now+1, res[5].getTimestamp()); assertEquals(now, res[6].getTimestamp()); } } @Test public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Exception { final TableName exportTable = TableName.valueOf(name.getMethodName()); HTableDescriptor desc = new HTableDescriptor(exportTable); desc.addFamily(new HColumnDescriptor(FAMILYA) .setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE) ); UTIL.getAdmin().createTable(desc); Table exportT = UTIL.getConnection().getTable(exportTable); //Add first version of QUAL Put p = new Put(ROW1); p.addColumn(FAMILYA, QUAL, now, QUAL); exportT.put(p); //Add Delete family marker Delete d = new Delete(ROW1, now+3); exportT.delete(d); //Add second version of QUAL p = new Put(ROW1); p.addColumn(FAMILYA, QUAL, now + 5, "s".getBytes()); exportT.put(p); //Add second Delete family marker d = new Delete(ROW1, now+7); exportT.delete(d); String[] args = new String[] { "-D" + Export.RAW_SCAN + "=true", exportTable.getNameAsString(), FQ_OUTPUT_DIR, "1000", // max number of key versions per key to export }; assertTrue(runExport(args)); final String importTable = name.getMethodName() + "import"; desc = new HTableDescriptor(TableName.valueOf(importTable)); desc.addFamily(new HColumnDescriptor(FAMILYA) .setMaxVersions(5) .setKeepDeletedCells(KeepDeletedCells.TRUE) ); UTIL.getAdmin().createTable(desc); Table importT = UTIL.getConnection().getTable(TableName.valueOf(importTable)); args = new String[] { importTable, FQ_OUTPUT_DIR }; assertTrue(runImport(args)); Scan s = new Scan(); s.setMaxVersions(); s.setRaw(true); ResultScanner importedTScanner = importT.getScanner(s); Result importedTResult = importedTScanner.next(); ResultScanner exportedTScanner = exportT.getScanner(s); Result exportedTResult = exportedTScanner.next(); try { Result.compareResults(exportedTResult, importedTResult); } catch (Exception e) { fail("Original and imported tables data comparision failed with error:"+e.getMessage()); } finally { exportT.close(); importT.close(); } } /** * Create a simple table, run an Export Job on it, Import with filtering on, verify counts, * attempt with invalid values. */ @Test public void testWithFilter() throws Exception { // Create simple table to export HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(name.getMethodName())); desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5)); UTIL.getAdmin().createTable(desc); Table exportTable = UTIL.getConnection().getTable(desc.getTableName()); Put p1 = new Put(ROW1); p1.addColumn(FAMILYA, QUAL, now, QUAL); p1.addColumn(FAMILYA, QUAL, now + 1, QUAL); p1.addColumn(FAMILYA, QUAL, now + 2, QUAL); p1.addColumn(FAMILYA, QUAL, now + 3, QUAL); p1.addColumn(FAMILYA, QUAL, now + 4, QUAL); // Having another row would actually test the filter. Put p2 = new Put(ROW2); p2.addColumn(FAMILYA, QUAL, now, QUAL); exportTable.put(Arrays.asList(p1, p2)); // Export the simple table String[] args = new String[] { name.getMethodName(), FQ_OUTPUT_DIR, "1000" }; assertTrue(runExport(args)); // Import to a new table final String IMPORT_TABLE = name.getMethodName() + "import"; desc = new HTableDescriptor(TableName.valueOf(IMPORT_TABLE)); desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5)); UTIL.getAdmin().createTable(desc); Table importTable = UTIL.getConnection().getTable(desc.getTableName()); args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + PrefixFilter.class.getName(), "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1), IMPORT_TABLE, FQ_OUTPUT_DIR, "1000" }; assertTrue(runImport(args)); // get the count of the source table for that time range PrefixFilter filter = new PrefixFilter(ROW1); int count = getCount(exportTable, filter); Assert.assertEquals("Unexpected row count between export and import tables", count, getCount(importTable, null)); // and then test that a broken command doesn't bork everything - easier here because we don't // need to re-run the export job args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + Filter.class.getName(), "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1) + "", name.getMethodName(), FQ_OUTPUT_DIR, "1000" }; assertFalse(runImport(args)); // cleanup exportTable.close(); importTable.close(); } /** * Count the number of keyvalues in the specified table for the given timerange * @param start * @param end * @param table * @return * @throws IOException */ private int getCount(Table table, Filter filter) throws IOException { Scan scan = new Scan(); scan.setFilter(filter); ResultScanner results = table.getScanner(scan); int count = 0; for (Result res : results) { count += res.size(); } results.close(); return count; } /** * test main method. Import should print help and call System.exit */ @Test public void testImportMain() throws Exception { PrintStream oldPrintStream = System.err; SecurityManager SECURITY_MANAGER = System.getSecurityManager(); LauncherSecurityManager newSecurityManager= new LauncherSecurityManager(); System.setSecurityManager(newSecurityManager); ByteArrayOutputStream data = new ByteArrayOutputStream(); String[] args = {}; System.setErr(new PrintStream(data)); try { System.setErr(new PrintStream(data)); Import.main(args); fail("should be SecurityException"); } catch (SecurityException e) { assertEquals(-1, newSecurityManager.getExitCode()); assertTrue(data.toString().contains("Wrong number of arguments:")); assertTrue(data.toString().contains("-Dimport.bulk.output=/path/for/output")); assertTrue(data.toString().contains("-Dimport.filter.class=<name of filter class>")); assertTrue(data.toString().contains("-Dimport.bulk.output=/path/for/output")); assertTrue(data.toString().contains("-Dmapreduce.reduce.speculative=false")); } finally { System.setErr(oldPrintStream); System.setSecurityManager(SECURITY_MANAGER); } } /** * test main method. Export should print help and call System.exit */ @Test public void testExportMain() throws Exception { PrintStream oldPrintStream = System.err; SecurityManager SECURITY_MANAGER = System.getSecurityManager(); LauncherSecurityManager newSecurityManager= new LauncherSecurityManager(); System.setSecurityManager(newSecurityManager); ByteArrayOutputStream data = new ByteArrayOutputStream(); String[] args = {}; System.setErr(new PrintStream(data)); try { System.setErr(new PrintStream(data)); Export.main(args); fail("should be SecurityException"); } catch (SecurityException e) { assertEquals(-1, newSecurityManager.getExitCode()); String errMsg = data.toString(); assertTrue(errMsg.contains("Wrong number of arguments:")); assertTrue(errMsg.contains( "Usage: Export [-D <property=value>]* <tablename> <outputdir> [<versions> " + "[<starttime> [<endtime>]] [^[regex pattern] or [Prefix] to filter]]")); assertTrue( errMsg.contains("-D hbase.mapreduce.scan.column.family=<family1>,<family2>, ...")); assertTrue(errMsg.contains("-D hbase.mapreduce.include.deleted.rows=true")); assertTrue(errMsg.contains("-Dhbase.client.scanner.caching=100")); assertTrue(errMsg.contains("-Dmapreduce.map.speculative=false")); assertTrue(errMsg.contains("-Dmapreduce.reduce.speculative=false")); assertTrue(errMsg.contains("-Dhbase.export.scanner.batch=10")); } finally { System.setErr(oldPrintStream); System.setSecurityManager(SECURITY_MANAGER); } } /** * Test map method of Importer */ @SuppressWarnings({ "unchecked", "rawtypes" }) @Test public void testKeyValueImporter() throws Exception { KeyValueImporter importer = new KeyValueImporter(); Configuration configuration = new Configuration(); Context ctx = mock(Context.class); when(ctx.getConfiguration()).thenReturn(configuration); doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock invocation) throws Throwable { ImmutableBytesWritable writer = (ImmutableBytesWritable) invocation.getArguments()[0]; KeyValue key = (KeyValue) invocation.getArguments()[1]; assertEquals("Key", Bytes.toString(writer.get())); assertEquals("row", Bytes.toString(CellUtil.cloneRow(key))); return null; } }).when(ctx).write(any(ImmutableBytesWritable.class), any(KeyValue.class)); importer.setup(ctx); Result value = mock(Result.class); KeyValue[] keys = { new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), Bytes.toBytes("qualifier"), Bytes.toBytes("value")), new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), Bytes.toBytes("qualifier"), Bytes.toBytes("value1")) }; when(value.rawCells()).thenReturn(keys); importer.map(new ImmutableBytesWritable(Bytes.toBytes("Key")), value, ctx); } /** * Test addFilterAndArguments method of Import This method set couple * parameters into Configuration */ @Test public void testAddFilterAndArguments() throws IOException { Configuration configuration = new Configuration(); List<String> args = new ArrayList<>(); args.add("param1"); args.add("param2"); Import.addFilterAndArguments(configuration, FilterBase.class, args); assertEquals("org.apache.hadoop.hbase.filter.FilterBase", configuration.get(Import.FILTER_CLASS_CONF_KEY)); assertEquals("param1,param2", configuration.get(Import.FILTER_ARGS_CONF_KEY)); } @Test public void testDurability() throws Exception { // Create an export table. String exportTableName = name.getMethodName() + "export"; try (Table exportTable = UTIL.createTable(TableName.valueOf(exportTableName), FAMILYA, 3);) { // Insert some data Put put = new Put(ROW1); put.addColumn(FAMILYA, QUAL, now, QUAL); put.addColumn(FAMILYA, QUAL, now + 1, QUAL); put.addColumn(FAMILYA, QUAL, now + 2, QUAL); exportTable.put(put); put = new Put(ROW2); put.addColumn(FAMILYA, QUAL, now, QUAL); put.addColumn(FAMILYA, QUAL, now + 1, QUAL); put.addColumn(FAMILYA, QUAL, now + 2, QUAL); exportTable.put(put); // Run the export String[] args = new String[] { exportTableName, FQ_OUTPUT_DIR, "1000"}; assertTrue(runExport(args)); // Create the table for import String importTableName = name.getMethodName() + "import1"; Table importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3); // Register the wal listener for the import table HRegionInfo region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer() .getOnlineRegions(importTable.getName()).get(0).getRegionInfo(); TableWALActionListener walListener = new TableWALActionListener(region); WAL wal = UTIL.getMiniHBaseCluster().getRegionServer(0).getWAL(region); wal.registerWALActionsListener(walListener); // Run the import with SKIP_WAL args = new String[] { "-D" + Import.WAL_DURABILITY + "=" + Durability.SKIP_WAL.name(), importTableName, FQ_OUTPUT_DIR }; assertTrue(runImport(args)); //Assert that the wal is not visisted assertTrue(!walListener.isWALVisited()); //Ensure that the count is 2 (only one version of key value is obtained) assertTrue(getCount(importTable, null) == 2); // Run the import with the default durability option importTableName = name.getMethodName() + "import2"; importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3); region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer() .getOnlineRegions(importTable.getName()).get(0).getRegionInfo(); wal = UTIL.getMiniHBaseCluster().getRegionServer(0).getWAL(region); walListener = new TableWALActionListener(region); wal.registerWALActionsListener(walListener); args = new String[] { importTableName, FQ_OUTPUT_DIR }; assertTrue(runImport(args)); //Assert that the wal is visisted assertTrue(walListener.isWALVisited()); //Ensure that the count is 2 (only one version of key value is obtained) assertTrue(getCount(importTable, null) == 2); } } /** * This listens to the {@link #visitLogEntryBeforeWrite(HRegionInfo, WALKey, WALEdit)} to * identify that an entry is written to the Write Ahead Log for the given table. */ private static class TableWALActionListener extends WALActionsListener.Base { private HRegionInfo regionInfo; private boolean isVisited = false; public TableWALActionListener(HRegionInfo region) { this.regionInfo = region; } @Override public void visitLogEntryBeforeWrite(WALKey logKey, WALEdit logEdit) { if (logKey.getTablename().getNameAsString().equalsIgnoreCase( this.regionInfo.getTable().getNameAsString()) && (!logEdit.isMetaEdit())) { isVisited = true; } } public boolean isWALVisited() { return isVisited; } } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master; import java.io.IOException; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.CoprocessorService; import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.MasterObserver; import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; import org.apache.hadoop.hbase.protobuf.generated.QuotaProtos.Quotas; /** * Provides the coprocessor framework and environment for master oriented * operations. {@link HMaster} interacts with the loaded coprocessors * through this class. */ @InterfaceAudience.Private public class MasterCoprocessorHost extends CoprocessorHost<MasterCoprocessorHost.MasterEnvironment> { private static final Log LOG = LogFactory.getLog(MasterCoprocessorHost.class); /** * Coprocessor environment extension providing access to master related * services. */ static class MasterEnvironment extends CoprocessorHost.Environment implements MasterCoprocessorEnvironment { private MasterServices masterServices; public MasterEnvironment(final Class<?> implClass, final Coprocessor impl, final int priority, final int seq, final Configuration conf, final MasterServices services) { super(impl, priority, seq, conf); this.masterServices = services; } public MasterServices getMasterServices() { return masterServices; } } private MasterServices masterServices; public MasterCoprocessorHost(final MasterServices services, final Configuration conf) { super(services); this.conf = conf; this.masterServices = services; // Log the state of coprocessor loading here; should appear only once or // twice in the daemon log, depending on HBase version, because there is // only one MasterCoprocessorHost instance in the master process boolean coprocessorsEnabled = conf.getBoolean(COPROCESSORS_ENABLED_CONF_KEY, DEFAULT_COPROCESSORS_ENABLED); LOG.info("System coprocessor loading is " + (coprocessorsEnabled ? "enabled" : "disabled")); loadSystemCoprocessors(conf, MASTER_COPROCESSOR_CONF_KEY); } @Override public MasterEnvironment createEnvironment(final Class<?> implClass, final Coprocessor instance, final int priority, final int seq, final Configuration conf) { for (Class<?> c : implClass.getInterfaces()) { if (CoprocessorService.class.isAssignableFrom(c)) { masterServices.registerService(((CoprocessorService)instance).getService()); } } return new MasterEnvironment(implClass, instance, priority, seq, conf, masterServices); } public boolean preCreateNamespace(final NamespaceDescriptor ns) throws IOException { return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preCreateNamespace(ctx, ns); } }); } public void postCreateNamespace(final NamespaceDescriptor ns) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postCreateNamespace(ctx, ns); } }); } public boolean preDeleteNamespace(final String namespaceName) throws IOException { return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preDeleteNamespace(ctx, namespaceName); } }); } public void postDeleteNamespace(final String namespaceName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postDeleteNamespace(ctx, namespaceName); } }); } public boolean preModifyNamespace(final NamespaceDescriptor ns) throws IOException { return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preModifyNamespace(ctx, ns); } }); } public void postModifyNamespace(final NamespaceDescriptor ns) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postModifyNamespace(ctx, ns); } }); } public void preGetNamespaceDescriptor(final String namespaceName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preGetNamespaceDescriptor(ctx, namespaceName); } }); } public void postGetNamespaceDescriptor(final NamespaceDescriptor ns) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postGetNamespaceDescriptor(ctx, ns); } }); } public boolean preListNamespaceDescriptors(final List<NamespaceDescriptor> descriptors) throws IOException { return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preListNamespaceDescriptors(ctx, descriptors); } }); } public void postListNamespaceDescriptors(final List<NamespaceDescriptor> descriptors) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postListNamespaceDescriptors(ctx, descriptors); } }); } /* Implementation of hooks for invoking MasterObservers */ public void preCreateTable(final HTableDescriptor htd, final HRegionInfo[] regions) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preCreateTable(ctx, htd, regions); } }); } public void postCreateTable(final HTableDescriptor htd, final HRegionInfo[] regions) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postCreateTable(ctx, htd, regions); } }); } public void preCreateTableHandler(final HTableDescriptor htd, final HRegionInfo[] regions) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preCreateTableHandler(ctx, htd, regions); } }); } public void postCreateTableHandler(final HTableDescriptor htd, final HRegionInfo[] regions) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postCreateTableHandler(ctx, htd, regions); } }); } public void preDeleteTable(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preDeleteTable(ctx, tableName); } }); } public void postDeleteTable(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postDeleteTable(ctx, tableName); } }); } public void preDeleteTableHandler(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preDeleteTableHandler(ctx, tableName); } }); } public void postDeleteTableHandler(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postDeleteTableHandler(ctx, tableName); } }); } public void preTruncateTable(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preTruncateTable(ctx, tableName); } }); } public void postTruncateTable(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postTruncateTable(ctx, tableName); } }); } public void preTruncateTableHandler(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preTruncateTableHandler(ctx, tableName); } }); } public void postTruncateTableHandler(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postTruncateTableHandler(ctx, tableName); } }); } public void preModifyTable(final TableName tableName, final HTableDescriptor htd) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preModifyTable(ctx, tableName, htd); } }); } public void postModifyTable(final TableName tableName, final HTableDescriptor htd) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postModifyTable(ctx, tableName, htd); } }); } public void preModifyTableHandler(final TableName tableName, final HTableDescriptor htd) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preModifyTableHandler(ctx, tableName, htd); } }); } public void postModifyTableHandler(final TableName tableName, final HTableDescriptor htd) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postModifyTableHandler(ctx, tableName, htd); } }); } public boolean preAddColumn(final TableName tableName, final HColumnDescriptor columnFamily) throws IOException { return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preAddColumn(ctx, tableName, columnFamily); oserver.preAddColumnFamily(ctx, tableName, columnFamily); } }); } public void postAddColumn(final TableName tableName, final HColumnDescriptor columnFamily) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postAddColumn(ctx, tableName, columnFamily); oserver.postAddColumnFamily(ctx, tableName, columnFamily); } }); } public boolean preAddColumnHandler(final TableName tableName, final HColumnDescriptor columnFamily) throws IOException { return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preAddColumnHandler(ctx, tableName, columnFamily); oserver.preAddColumnFamilyHandler(ctx, tableName, columnFamily); } }); } public void postAddColumnHandler(final TableName tableName, final HColumnDescriptor columnFamily) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postAddColumnHandler(ctx, tableName, columnFamily); oserver.postAddColumnFamilyHandler(ctx, tableName, columnFamily); } }); } public boolean preModifyColumn(final TableName tableName, final HColumnDescriptor columnFamily) throws IOException { return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preModifyColumn(ctx, tableName, columnFamily); oserver.preModifyColumnFamily(ctx, tableName, columnFamily); } }); } public void postModifyColumn(final TableName tableName, final HColumnDescriptor columnFamily) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postModifyColumn(ctx, tableName, columnFamily); oserver.postModifyColumnFamily(ctx, tableName, columnFamily); } }); } public boolean preModifyColumnHandler(final TableName tableName, final HColumnDescriptor columnFamily) throws IOException { return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preModifyColumnHandler(ctx, tableName, columnFamily); oserver.preModifyColumnFamilyHandler(ctx, tableName, columnFamily); } }); } public void postModifyColumnHandler(final TableName tableName, final HColumnDescriptor columnFamily) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postModifyColumnHandler(ctx, tableName, columnFamily); oserver.postModifyColumnFamilyHandler(ctx, tableName, columnFamily); } }); } public boolean preDeleteColumn(final TableName tableName, final byte[] columnFamily) throws IOException { return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preDeleteColumn(ctx, tableName, columnFamily); oserver.preDeleteColumnFamily(ctx, tableName, columnFamily); } }); } public void postDeleteColumn(final TableName tableName, final byte[] columnFamily) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postDeleteColumn(ctx, tableName, columnFamily); oserver.postDeleteColumnFamily(ctx, tableName, columnFamily); } }); } public boolean preDeleteColumnHandler(final TableName tableName, final byte[] columnFamily) throws IOException { return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preDeleteColumnHandler(ctx, tableName, columnFamily); oserver.preDeleteColumnFamilyHandler(ctx, tableName, columnFamily); } }); } public void postDeleteColumnHandler(final TableName tableName, final byte[] columnFamily) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postDeleteColumnHandler(ctx, tableName, columnFamily); oserver.postDeleteColumnFamilyHandler(ctx, tableName, columnFamily); } }); } public void preEnableTable(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preEnableTable(ctx, tableName); } }); } public void postEnableTable(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postEnableTable(ctx, tableName); } }); } public void preEnableTableHandler(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preEnableTableHandler(ctx, tableName); } }); } public void postEnableTableHandler(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postEnableTableHandler(ctx, tableName); } }); } public void preDisableTable(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preDisableTable(ctx, tableName); } }); } public void postDisableTable(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postDisableTable(ctx, tableName); } }); } public void preDisableTableHandler(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preDisableTableHandler(ctx, tableName); } }); } public void postDisableTableHandler(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postDisableTableHandler(ctx, tableName); } }); } public boolean preMove(final HRegionInfo region, final ServerName srcServer, final ServerName destServer) throws IOException { return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preMove(ctx, region, srcServer, destServer); } }); } public void postMove(final HRegionInfo region, final ServerName srcServer, final ServerName destServer) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postMove(ctx, region, srcServer, destServer); } }); } public boolean preAssign(final HRegionInfo regionInfo) throws IOException { return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preAssign(ctx, regionInfo); } }); } public void postAssign(final HRegionInfo regionInfo) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postAssign(ctx, regionInfo); } }); } public boolean preUnassign(final HRegionInfo regionInfo, final boolean force) throws IOException { return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preUnassign(ctx, regionInfo, force); } }); } public void postUnassign(final HRegionInfo regionInfo, final boolean force) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postUnassign(ctx, regionInfo, force); } }); } public void preRegionOffline(final HRegionInfo regionInfo) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preRegionOffline(ctx, regionInfo); } }); } public void postRegionOffline(final HRegionInfo regionInfo) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postRegionOffline(ctx, regionInfo); } }); } public boolean preBalance() throws IOException { return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preBalance(ctx); } }); } public void postBalance(final List<RegionPlan> plans) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postBalance(ctx, plans); } }); } public boolean preBalanceSwitch(final boolean b) throws IOException { return execOperationWithResult(b, coprocessors.isEmpty() ? null : new CoprocessorOperationWithResult<Boolean>() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { setResult(oserver.preBalanceSwitch(ctx, getResult())); } }); } public void postBalanceSwitch(final boolean oldValue, final boolean newValue) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postBalanceSwitch(ctx, oldValue, newValue); } }); } public void preShutdown() throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preShutdown(ctx); } @Override public void postEnvCall(MasterEnvironment env) { // invoke coprocessor stop method shutdown(env); } }); } public void preStopMaster() throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preStopMaster(ctx); } @Override public void postEnvCall(MasterEnvironment env) { // invoke coprocessor stop method shutdown(env); } }); } public void preMasterInitialization() throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preMasterInitialization(ctx); } }); } public void postStartMaster() throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postStartMaster(ctx); } }); } public void preSnapshot(final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preSnapshot(ctx, snapshot, hTableDescriptor); } }); } public void postSnapshot(final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postSnapshot(ctx, snapshot, hTableDescriptor); } }); } public void preListSnapshot(final SnapshotDescription snapshot) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver observer, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { observer.preListSnapshot(ctx, snapshot); } }); } public void postListSnapshot(final SnapshotDescription snapshot) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver observer, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { observer.postListSnapshot(ctx, snapshot); } }); } public void preCloneSnapshot(final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preCloneSnapshot(ctx, snapshot, hTableDescriptor); } }); } public void postCloneSnapshot(final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postCloneSnapshot(ctx, snapshot, hTableDescriptor); } }); } public void preRestoreSnapshot(final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preRestoreSnapshot(ctx, snapshot, hTableDescriptor); } }); } public void postRestoreSnapshot(final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postRestoreSnapshot(ctx, snapshot, hTableDescriptor); } }); } public void preDeleteSnapshot(final SnapshotDescription snapshot) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preDeleteSnapshot(ctx, snapshot); } }); } public void postDeleteSnapshot(final SnapshotDescription snapshot) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postDeleteSnapshot(ctx, snapshot); } }); } public boolean preGetTableDescriptors(final List<TableName> tableNamesList, final List<HTableDescriptor> descriptors, final String regex) throws IOException { return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preGetTableDescriptors(ctx, tableNamesList, descriptors, regex); } }); } public void postGetTableDescriptors(final List<TableName> tableNamesList, final List<HTableDescriptor> descriptors, final String regex) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postGetTableDescriptors(ctx, tableNamesList, descriptors, regex); } }); } public boolean preGetTableNames(final List<HTableDescriptor> descriptors, final String regex) throws IOException { return execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preGetTableNames(ctx, descriptors, regex); } }); } public void postGetTableNames(final List<HTableDescriptor> descriptors, final String regex) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postGetTableNames(ctx, descriptors, regex); } }); } public void preTableFlush(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preTableFlush(ctx, tableName); } }); } public void postTableFlush(final TableName tableName) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postTableFlush(ctx, tableName); } }); } public void preSetUserQuota(final String user, final Quotas quotas) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preSetUserQuota(ctx, user, quotas); } }); } public void postSetUserQuota(final String user, final Quotas quotas) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postSetUserQuota(ctx, user, quotas); } }); } public void preSetUserQuota(final String user, final TableName table, final Quotas quotas) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preSetUserQuota(ctx, user, table, quotas); } }); } public void postSetUserQuota(final String user, final TableName table, final Quotas quotas) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postSetUserQuota(ctx, user, table, quotas); } }); } public void preSetUserQuota(final String user, final String namespace, final Quotas quotas) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preSetUserQuota(ctx, user, namespace, quotas); } }); } public void postSetUserQuota(final String user, final String namespace, final Quotas quotas) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postSetUserQuota(ctx, user, namespace, quotas); } }); } public void preSetTableQuota(final TableName table, final Quotas quotas) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preSetTableQuota(ctx, table, quotas); } }); } public void postSetTableQuota(final TableName table, final Quotas quotas) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postSetTableQuota(ctx, table, quotas); } }); } public void preSetNamespaceQuota(final String namespace, final Quotas quotas) throws IOException { execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.preSetNamespaceQuota(ctx, namespace, quotas); } }); } public void postSetNamespaceQuota(final String namespace, final Quotas quotas) throws IOException{ execOperation(coprocessors.isEmpty() ? null : new CoprocessorOperation() { @Override public void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException { oserver.postSetNamespaceQuota(ctx, namespace, quotas); } }); } private static abstract class CoprocessorOperation extends ObserverContext<MasterCoprocessorEnvironment> { public CoprocessorOperation() { } public abstract void call(MasterObserver oserver, ObserverContext<MasterCoprocessorEnvironment> ctx) throws IOException; public void postEnvCall(MasterEnvironment env) { } } private static abstract class CoprocessorOperationWithResult<T> extends CoprocessorOperation { private T result = null; public void setResult(final T result) { this.result = result; } public T getResult() { return this.result; } } private <T> T execOperationWithResult(final T defaultValue, final CoprocessorOperationWithResult<T> ctx) throws IOException { if (ctx == null) return defaultValue; ctx.setResult(defaultValue); execOperation(ctx); return ctx.getResult(); } private boolean execOperation(final CoprocessorOperation ctx) throws IOException { if (ctx == null) return false; boolean bypass = false; for (MasterEnvironment env: coprocessors) { if (env.getInstance() instanceof MasterObserver) { ctx.prepare(env); Thread currentThread = Thread.currentThread(); ClassLoader cl = currentThread.getContextClassLoader(); try { currentThread.setContextClassLoader(env.getClassLoader()); ctx.call((MasterObserver)env.getInstance(), ctx); } catch (Throwable e) { handleCoprocessorThrowable(env, e); } finally { currentThread.setContextClassLoader(cl); } bypass |= ctx.shouldBypass(); if (ctx.shouldComplete()) { break; } } ctx.postEnvCall(env); } return bypass; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.io.sstable; import java.io.File; import java.io.IOError; import java.io.IOException; import java.util.*; import java.util.regex.Pattern; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Objects; import com.google.common.base.Splitter; import org.apache.cassandra.db.Directories; import org.apache.cassandra.io.sstable.format.SSTableFormat; import org.apache.cassandra.io.sstable.format.Version; import org.apache.cassandra.io.sstable.metadata.IMetadataSerializer; import org.apache.cassandra.io.sstable.metadata.MetadataSerializer; import org.apache.cassandra.utils.Pair; import static org.apache.cassandra.io.sstable.Component.separator; /** * A SSTable is described by the keyspace and column family it contains data * for, a generation (where higher generations contain more recent data) and * an alphabetic version string. * * A descriptor can be marked as temporary, which influences generated filenames. */ public class Descriptor { private final static String LEGACY_TMP_REGEX_STR = "^((.*)\\-(.*)\\-)?tmp(link)?\\-((?:l|k).)\\-(\\d)*\\-(.*)$"; private final static Pattern LEGACY_TMP_REGEX = Pattern.compile(LEGACY_TMP_REGEX_STR); public static String TMP_EXT = ".tmp"; private static final Splitter filenameSplitter = Splitter.on('-'); /** canonicalized path to the directory where SSTable resides */ public final File directory; /** version has the following format: <code>[a-z]+</code> */ public final Version version; public final String ksname; public final String cfname; public final int generation; public final SSTableFormat.Type formatType; private final int hashCode; /** * A descriptor that assumes CURRENT_VERSION. */ @VisibleForTesting public Descriptor(File directory, String ksname, String cfname, int generation) { this(SSTableFormat.Type.current().info.getLatestVersion(), directory, ksname, cfname, generation, SSTableFormat.Type.current()); } /** * Constructor for sstable writers only. */ public Descriptor(File directory, String ksname, String cfname, int generation, SSTableFormat.Type formatType) { this(formatType.info.getLatestVersion(), directory, ksname, cfname, generation, formatType); } public Descriptor(Version version, File directory, String ksname, String cfname, int generation, SSTableFormat.Type formatType) { assert version != null && directory != null && ksname != null && cfname != null && formatType.info.getLatestVersion().getClass().equals(version.getClass()); this.version = version; try { this.directory = directory.getCanonicalFile(); } catch (IOException e) { throw new IOError(e); } this.ksname = ksname; this.cfname = cfname; this.generation = generation; this.formatType = formatType; hashCode = Objects.hashCode(version, this.directory, generation, ksname, cfname, formatType); } public Descriptor withGeneration(int newGeneration) { return new Descriptor(version, directory, ksname, cfname, newGeneration, formatType); } public Descriptor withFormatType(SSTableFormat.Type newType) { return new Descriptor(newType.info.getLatestVersion(), directory, ksname, cfname, generation, newType); } public String tmpFilenameFor(Component component) { return filenameFor(component) + TMP_EXT; } public String filenameFor(Component component) { return baseFilename() + separator + component.name(); } public String baseFilename() { StringBuilder buff = new StringBuilder(); buff.append(directory).append(File.separatorChar); appendFileName(buff); return buff.toString(); } private void appendFileName(StringBuilder buff) { buff.append(version).append(separator); buff.append(generation); buff.append(separator).append(formatType.name); } public String relativeFilenameFor(Component component) { final StringBuilder buff = new StringBuilder(); if (Directories.isSecondaryIndexFolder(directory)) { buff.append(directory.getName()).append(File.separator); } appendFileName(buff); buff.append(separator).append(component.name()); return buff.toString(); } public SSTableFormat getFormat() { return formatType.info; } /** Return any temporary files found in the directory */ public List<File> getTemporaryFiles() { File[] tmpFiles = directory.listFiles((dir, name) -> name.endsWith(Descriptor.TMP_EXT)); List<File> ret = new ArrayList<>(tmpFiles.length); for (File tmpFile : tmpFiles) ret.add(tmpFile); return ret; } public static boolean isValidFile(File file) { String filename = file.getName(); return filename.endsWith(".db") && !LEGACY_TMP_REGEX.matcher(filename).matches(); } /** * Parse a sstable filename into a Descriptor. * <p> * This is a shortcut for {@code fromFilename(new File(filename))}. * * @param filename the filename to a sstable component. * @return the descriptor for the parsed file. * * @throws IllegalArgumentException if the provided {@code file} does point to a valid sstable filename. This could * mean either that the filename doesn't look like a sstable file, or that it is for an old and unsupported * versions. */ public static Descriptor fromFilename(String filename) { return fromFilename(new File(filename)); } /** * Parse a sstable filename into a Descriptor. * <p> * SSTables files are all located within subdirectories of the form {@code <keyspace>/<table>/}. Normal sstables are * are directly within that subdirectory structure while 2ndary index, backups and snapshot are each inside an * additional subdirectory. The file themselves have the form: * {@code <version>-<gen>-<format>-<component>}. * <p> * Note that this method will only sucessfully parse sstable files of supported versions. * * @param file the {@code File} object for the filename to parse. * @return the descriptor for the parsed file. * * @throws IllegalArgumentException if the provided {@code file} does point to a valid sstable filename. This could * mean either that the filename doesn't look like a sstable file, or that it is for an old and unsupported * versions. */ public static Descriptor fromFilename(File file) { return fromFilenameWithComponent(file).left; } /** * Parse a sstable filename, extracting both the {@code Descriptor} and {@code Component} part. * * @param file the {@code File} object for the filename to parse. * @return a pair of the descriptor and component corresponding to the provided {@code file}. * * @throws IllegalArgumentException if the provided {@code file} does point to a valid sstable filename. This could * mean either that the filename doesn't look like a sstable file, or that it is for an old and unsupported * versions. */ public static Pair<Descriptor, Component> fromFilenameWithComponent(File file) { // We need to extract the keyspace and table names from the parent directories, so make sure we deal with the // absolute path. if (!file.isAbsolute()) file = file.getAbsoluteFile(); String name = file.getName(); List<String> tokens = filenameSplitter.splitToList(name); int size = tokens.size(); if (size != 4) { // This is an invalid sstable file for this version. But to provide a more helpful error message, we detect // old format sstable, which had the format: // <keyspace>-<table>-(tmp-)?<version>-<gen>-<component> // Note that we assume it's an old format sstable if it has the right number of tokens: this is not perfect // but we're just trying to be helpful, not perfect. if (size == 5 || size == 6) throw new IllegalArgumentException(String.format("%s is of version %s which is now unsupported and cannot be read.", name, tokens.get(size - 3))); throw new IllegalArgumentException(String.format("Invalid sstable file %s: the name doesn't look like a supported sstable file name", name)); } String versionString = tokens.get(0); if (!Version.validate(versionString)) throw invalidSSTable(name, "invalid version %s", versionString); int generation; try { generation = Integer.parseInt(tokens.get(1)); } catch (NumberFormatException e) { throw invalidSSTable(name, "the 'generation' part of the name doesn't parse as a number"); } String formatString = tokens.get(2); SSTableFormat.Type format; try { format = SSTableFormat.Type.validate(formatString); } catch (IllegalArgumentException e) { throw invalidSSTable(name, "unknown 'format' part (%s)", formatString); } Component component = Component.parse(tokens.get(3)); Version version = format.info.getVersion(versionString); if (!version.isCompatible()) throw invalidSSTable(name, "incompatible sstable version (%s); you should have run upgradesstables before upgrading", versionString); File directory = parentOf(name, file); File tableDir = directory; // Check if it's a 2ndary index directory (not that it doesn't exclude it to be also a backup or snapshot) String indexName = ""; if (Directories.isSecondaryIndexFolder(tableDir)) { indexName = tableDir.getName(); tableDir = parentOf(name, tableDir); } // Then it can be a backup or a snapshot if (tableDir.getName().equals(Directories.BACKUPS_SUBDIR)) tableDir = tableDir.getParentFile(); else if (parentOf(name, tableDir).getName().equals(Directories.SNAPSHOT_SUBDIR)) tableDir = parentOf(name, parentOf(name, tableDir)); String table = tableDir.getName().split("-")[0] + indexName; String keyspace = parentOf(name, tableDir).getName(); return Pair.create(new Descriptor(version, directory, keyspace, table, generation, format), component); } private static File parentOf(String name, File file) { File parent = file.getParentFile(); if (parent == null) throw invalidSSTable(name, "cannot extract keyspace and table name; make sure the sstable is in the proper sub-directories"); return parent; } private static IllegalArgumentException invalidSSTable(String name, String msgFormat, Object... parameters) { throw new IllegalArgumentException(String.format("Invalid sstable file " + name + ": " + msgFormat, parameters)); } public IMetadataSerializer getMetadataSerializer() { return new MetadataSerializer(); } /** * @return true if the current Cassandra version can read the given sstable version */ public boolean isCompatible() { return version.isCompatible(); } @Override public String toString() { return baseFilename(); } @Override public boolean equals(Object o) { if (o == this) return true; if (!(o instanceof Descriptor)) return false; Descriptor that = (Descriptor)o; return that.directory.equals(this.directory) && that.generation == this.generation && that.ksname.equals(this.ksname) && that.cfname.equals(this.cfname) && that.formatType == this.formatType; } @Override public int hashCode() { return hashCode; } }
/* * MIT License * Copyright (c) 2016 Estonian Information System Authority (RIA) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package ee.ria.dumonitor.filter.config; import ee.ria.dumonitor.common.util.*; import ee.ria.dumonitor.filter.config.generated.FilterConfiguration; import ee.ria.dumonitor.filter.config.generated.LoggableFields; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.xml.bind.JAXBException; import javax.xml.namespace.NamespaceContext; import javax.xml.xpath.XPathExpression; import javax.xml.xpath.XPathExpressionException; import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; /** * Class that loads the filter configuration file (XML) and precompiles the XPath expressions in it. */ public final class ConfigurationLoader { private static final Logger LOG = LoggerFactory.getLogger(ConfigurationLoader.class); private static FilterConfiguration[] loadedConfigurations; private ConfigurationLoader() { throw new UnsupportedOperationException(); } /** * Loads the specified filter configuration files from the classpath and unmarshals the XML contents into * corresponding JAXB objects. This method must be called at application startup. * <p> * This method throws a RuntimeException if an XML file cannot be parsed or an XPath expression cannot be compiled * * @param filenames an array of filenames to load configurations from */ public static void loadConfiguration(String... filenames) { loadedConfigurations = new FilterConfiguration[filenames.length]; for (int i = 0; i < filenames.length; i++) { String filename = filenames[i]; try { loadedConfigurations[i] = readConfigurationFile(filename); } catch (JAXBException e) { ExceptionUtil.uncheck("Failed to read XPath configuration from " + filename, e); } } precompile(); // Dry run to discover errors at application startup } /** * Compiles the XPath expressions from loaded configuration files and merges the loaded configurations (default, * custom, etc) into a single object. * <p> * Note that the XPathExpression class (instances of which the returned object contains) is not thread safe. * <p> * This method throws a RuntimeException if an XPath expression cannot be compiled * * @return the filter configuration object with compiled XPath expressions */ public static synchronized FilterConfig precompile() { try { List<Filter> filters = compileFilters(loadedConfigurations); List<Exclusion> exclusions = compileExclusions(loadedConfigurations); return new FilterConfig(filters, exclusions); } catch (XPathExpressionException e) { throw ExceptionUtil.toUnchecked("Failed to compile XPath expressions", e); } } private static FilterConfiguration readConfigurationFile(String filename) throws JAXBException { LOG.debug("Parsing configuration file {}", filename); InputStream inputStream = ResourceUtil.getClasspathResourceAsStream(filename); try { return XmlUtil.unmarshal(inputStream, FilterConfiguration.class); } finally { IOUtil.close(inputStream); } } private static List<Filter> compileFilters(FilterConfiguration... configurations) throws XPathExpressionException { Map<String, XPathExpression> defaults = compileDefaults(configurations); return compileFilters(defaults, configurations); } private static Map<String, XPathExpression> compileDefaults(FilterConfiguration... configurations) throws XPathExpressionException { Map<String, XPathExpression> defaults = new HashMap<String, XPathExpression>(); for (FilterConfiguration configuration : configurations) { XPathNamespaceContext nsCtx = new XPathNamespaceContext(configuration); if (configuration.getDefaults() != null) { defaults.putAll(compileExpressions(configuration.getDefaults(), nsCtx)); } } return defaults; } private static List<Filter> compileFilters(Map<String, XPathExpression> defaults, FilterConfiguration... configurations) throws XPathExpressionException { List<Filter> filters = new ArrayList<Filter>(); for (FilterConfiguration configuration : configurations) { XPathNamespaceContext nsCtx = new XPathNamespaceContext(configuration); if (configuration.getFilters() != null) { for (ee.ria.dumonitor.filter.config.generated.Filter filter : configuration.getFilters().getFilter()) { filters.add(compileFilter(filter, defaults, nsCtx)); } } } return filters; } private static Filter compileFilter(ee.ria.dumonitor.filter.config.generated.Filter filter, Map<String, XPathExpression> defaults, XPathNamespaceContext nsCtx) throws XPathExpressionException { Map<String, XPathExpression> expressions = new HashMap<String, XPathExpression>(defaults); expressions.putAll(compileExpressions(filter.getLoggableFields(), nsCtx)); List<LoggableField> loggableFields = new ArrayList<LoggableField>(); for (Entry<String, XPathExpression> entry : expressions.entrySet()) { loggableFields.add(new LoggableField(entry.getKey(), entry.getValue())); } return new Filter(XPathUtil.compile(filter.getXpath(), nsCtx), loggableFields); } private static List<Exclusion> compileExclusions(FilterConfiguration... configurations) throws XPathExpressionException { BlacklistFunctionResolver functionResolver = new BlacklistFunctionResolver(); List<Exclusion> exclusions = new ArrayList<Exclusion>(); for (FilterConfiguration configuration : configurations) { XPathNamespaceContext nsCtx = new XPathNamespaceContext(configuration); if (configuration.getExclusions() != null) { for (String exclusion : configuration.getExclusions().getExclusion()) { exclusions.add(new Exclusion(XPathUtil.compile(exclusion, nsCtx, functionResolver))); } } } return exclusions; } private static Map<String, XPathExpression> compileExpressions(LoggableFields fields, NamespaceContext nsCtx) throws XPathExpressionException { Map<String, XPathExpression> compiled = new HashMap<String, XPathExpression>(); if (fields.getPersoncode() != null) { compiled.put("personcode", compile(fields.getPersoncode(), nsCtx)); } if (fields.getAction() != null) { compiled.put("action", compile(fields.getAction(), nsCtx)); } if (fields.getSender() != null) { compiled.put("sender", compile(fields.getSender(), nsCtx)); } if (fields.getReceiver() != null) { compiled.put("receiver", compile(fields.getReceiver(), nsCtx)); } if (fields.getRestrictions() != null) { compiled.put("restrictions", compile(fields.getRestrictions(), nsCtx)); } if (fields.getSendercode() != null) { compiled.put("sendercode", compile(fields.getSendercode(), nsCtx)); } if (fields.getReceivercode() != null) { compiled.put("receivercode", compile(fields.getReceivercode(), nsCtx)); } if (fields.getActioncode() != null) { compiled.put("actioncode", compile(fields.getActioncode(), nsCtx)); } if (fields.getXroadrequestid() != null) { compiled.put("xroadrequestid", compile(fields.getXroadrequestid(), nsCtx)); } if (fields.getXroadservice() != null) { compiled.put("xroadservice", compile(fields.getXroadservice(), nsCtx)); } if (fields.getUsercode() != null) { compiled.put("usercode", compile(fields.getUsercode(), nsCtx)); } return compiled; } private static XPathExpression compile(String expression, NamespaceContext nsCtx) throws XPathExpressionException { LOG.debug("Compiling XPath expression: {}", expression); return XPathUtil.compile(expression, nsCtx); } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver14; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFAsyncConfigPropPacketInSlaveVer14 implements OFAsyncConfigPropPacketInSlave { private static final Logger logger = LoggerFactory.getLogger(OFAsyncConfigPropPacketInSlaveVer14.class); // version: 1.4 final static byte WIRE_VERSION = 5; final static int LENGTH = 8; private final static long DEFAULT_MASK = 0x0L; // OF message fields private final long mask; // // Immutable default instance final static OFAsyncConfigPropPacketInSlaveVer14 DEFAULT = new OFAsyncConfigPropPacketInSlaveVer14( DEFAULT_MASK ); // package private constructor - used by readers, builders, and factory OFAsyncConfigPropPacketInSlaveVer14(long mask) { this.mask = mask; } // Accessors for OF message fields @Override public int getType() { return 0x0; } @Override public long getMask() { return mask; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } public OFAsyncConfigPropPacketInSlave.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFAsyncConfigPropPacketInSlave.Builder { final OFAsyncConfigPropPacketInSlaveVer14 parentMessage; // OF message fields private boolean maskSet; private long mask; BuilderWithParent(OFAsyncConfigPropPacketInSlaveVer14 parentMessage) { this.parentMessage = parentMessage; } @Override public int getType() { return 0x0; } @Override public long getMask() { return mask; } @Override public OFAsyncConfigPropPacketInSlave.Builder setMask(long mask) { this.mask = mask; this.maskSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFAsyncConfigPropPacketInSlave build() { long mask = this.maskSet ? this.mask : parentMessage.mask; // return new OFAsyncConfigPropPacketInSlaveVer14( mask ); } } static class Builder implements OFAsyncConfigPropPacketInSlave.Builder { // OF message fields private boolean maskSet; private long mask; @Override public int getType() { return 0x0; } @Override public long getMask() { return mask; } @Override public OFAsyncConfigPropPacketInSlave.Builder setMask(long mask) { this.mask = mask; this.maskSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } // @Override public OFAsyncConfigPropPacketInSlave build() { long mask = this.maskSet ? this.mask : DEFAULT_MASK; return new OFAsyncConfigPropPacketInSlaveVer14( mask ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFAsyncConfigPropPacketInSlave> { @Override public OFAsyncConfigPropPacketInSlave readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property type == 0x0 short type = bb.readShort(); if(type != (short) 0x0) throw new OFParseError("Wrong type: Expected=0x0(0x0), got="+type); int length = U16.f(bb.readShort()); if(length != 8) throw new OFParseError("Wrong length: Expected=8(8), got="+length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long mask = U32.f(bb.readInt()); OFAsyncConfigPropPacketInSlaveVer14 asyncConfigPropPacketInSlaveVer14 = new OFAsyncConfigPropPacketInSlaveVer14( mask ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", asyncConfigPropPacketInSlaveVer14); return asyncConfigPropPacketInSlaveVer14; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFAsyncConfigPropPacketInSlaveVer14Funnel FUNNEL = new OFAsyncConfigPropPacketInSlaveVer14Funnel(); static class OFAsyncConfigPropPacketInSlaveVer14Funnel implements Funnel<OFAsyncConfigPropPacketInSlaveVer14> { private static final long serialVersionUID = 1L; @Override public void funnel(OFAsyncConfigPropPacketInSlaveVer14 message, PrimitiveSink sink) { // fixed value property type = 0x0 sink.putShort((short) 0x0); // fixed value property length = 8 sink.putShort((short) 0x8); sink.putLong(message.mask); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFAsyncConfigPropPacketInSlaveVer14> { @Override public void write(ByteBuf bb, OFAsyncConfigPropPacketInSlaveVer14 message) { // fixed value property type = 0x0 bb.writeShort((short) 0x0); // fixed value property length = 8 bb.writeShort((short) 0x8); bb.writeInt(U32.t(message.mask)); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFAsyncConfigPropPacketInSlaveVer14("); b.append("mask=").append(mask); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFAsyncConfigPropPacketInSlaveVer14 other = (OFAsyncConfigPropPacketInSlaveVer14) obj; if( mask != other.mask) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (mask ^ (mask >>> 32)); return result; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.memory; import com.facebook.airlift.http.client.HttpClient; import com.facebook.airlift.json.JsonCodec; import com.facebook.airlift.log.Logger; import com.facebook.presto.execution.LocationFactory; import com.facebook.presto.execution.QueryExecution; import com.facebook.presto.execution.QueryIdGenerator; import com.facebook.presto.execution.scheduler.NodeSchedulerConfig; import com.facebook.presto.memory.LowMemoryKiller.QueryMemoryInfo; import com.facebook.presto.metadata.InternalNode; import com.facebook.presto.metadata.InternalNodeManager; import com.facebook.presto.server.BasicQueryInfo; import com.facebook.presto.server.InternalCommunicationConfig; import com.facebook.presto.server.ServerConfig; import com.facebook.presto.server.smile.Codec; import com.facebook.presto.server.smile.SmileCodec; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.QueryId; import com.facebook.presto.spi.memory.ClusterMemoryPoolManager; import com.facebook.presto.spi.memory.MemoryPoolId; import com.facebook.presto.spi.memory.MemoryPoolInfo; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Streams; import com.google.common.io.Closer; import io.airlift.units.DataSize; import io.airlift.units.Duration; import org.weakref.jmx.JmxException; import org.weakref.jmx.MBeanExporter; import org.weakref.jmx.Managed; import javax.annotation.PreDestroy; import javax.annotation.concurrent.GuardedBy; import javax.inject.Inject; import java.io.IOException; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import java.util.function.Supplier; import java.util.stream.Stream; import static com.facebook.presto.ExceededMemoryLimitException.exceededGlobalTotalLimit; import static com.facebook.presto.ExceededMemoryLimitException.exceededGlobalUserLimit; import static com.facebook.presto.SystemSessionProperties.RESOURCE_OVERCOMMIT; import static com.facebook.presto.SystemSessionProperties.getQueryMaxMemory; import static com.facebook.presto.SystemSessionProperties.getQueryMaxTotalMemory; import static com.facebook.presto.SystemSessionProperties.resourceOvercommit; import static com.facebook.presto.memory.LocalMemoryManager.GENERAL_POOL; import static com.facebook.presto.memory.LocalMemoryManager.RESERVED_POOL; import static com.facebook.presto.server.smile.JsonCodecWrapper.wrapJsonCodec; import static com.facebook.presto.spi.NodeState.ACTIVE; import static com.facebook.presto.spi.NodeState.SHUTTING_DOWN; import static com.facebook.presto.spi.StandardErrorCode.CLUSTER_OUT_OF_MEMORY; import static com.google.common.base.Verify.verify; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static com.google.common.collect.MoreCollectors.toOptional; import static com.google.common.collect.Sets.difference; import static io.airlift.units.DataSize.succinctBytes; import static io.airlift.units.Duration.nanosSince; import static java.lang.Math.min; import static java.lang.String.format; import static java.util.AbstractMap.SimpleEntry; import static java.util.Comparator.comparingLong; import static java.util.Objects.requireNonNull; import static org.weakref.jmx.ObjectNames.generatedNameOf; public class ClusterMemoryManager implements ClusterMemoryPoolManager { private static final Logger log = Logger.get(ClusterMemoryManager.class); private final ExecutorService listenerExecutor = Executors.newSingleThreadExecutor(); private final ClusterMemoryLeakDetector memoryLeakDetector = new ClusterMemoryLeakDetector(); private final InternalNodeManager nodeManager; private final LocationFactory locationFactory; private final HttpClient httpClient; private final MBeanExporter exporter; private final Codec<MemoryInfo> memoryInfoCodec; private final Codec<MemoryPoolAssignmentsRequest> assignmentsRequestCodec; private final DataSize maxQueryMemory; private final DataSize maxQueryTotalMemory; private final boolean enabled; private final LowMemoryKiller lowMemoryKiller; private final Duration killOnOutOfMemoryDelay; private final String coordinatorId; private final AtomicLong memoryPoolAssignmentsVersion = new AtomicLong(); private final AtomicLong clusterUserMemoryReservation = new AtomicLong(); private final AtomicLong clusterTotalMemoryReservation = new AtomicLong(); private final AtomicLong clusterMemoryBytes = new AtomicLong(); private final AtomicLong queriesKilledDueToOutOfMemory = new AtomicLong(); private final boolean isWorkScheduledOnCoordinator; private final boolean isBinaryTransportEnabled; @GuardedBy("this") private final Map<String, RemoteNodeMemory> nodes = new HashMap<>(); @GuardedBy("this") private final Map<MemoryPoolId, List<Consumer<MemoryPoolInfo>>> changeListeners = new HashMap<>(); @GuardedBy("this") private final Map<MemoryPoolId, ClusterMemoryPool> pools; @GuardedBy("this") private long lastTimeNotOutOfMemory = System.nanoTime(); @GuardedBy("this") private QueryId lastKilledQuery; @Inject public ClusterMemoryManager( @ForMemoryManager HttpClient httpClient, InternalNodeManager nodeManager, LocationFactory locationFactory, MBeanExporter exporter, JsonCodec<MemoryInfo> memoryInfoJsonCodec, SmileCodec<MemoryInfo> memoryInfoSmileCodec, JsonCodec<MemoryPoolAssignmentsRequest> assignmentsRequestJsonCodec, SmileCodec<MemoryPoolAssignmentsRequest> assignmentsRequestSmileCodec, QueryIdGenerator queryIdGenerator, LowMemoryKiller lowMemoryKiller, ServerConfig serverConfig, MemoryManagerConfig config, NodeMemoryConfig nodeMemoryConfig, NodeSchedulerConfig schedulerConfig, InternalCommunicationConfig communicationConfig) { requireNonNull(config, "config is null"); requireNonNull(nodeMemoryConfig, "nodeMemoryConfig is null"); requireNonNull(serverConfig, "serverConfig is null"); requireNonNull(schedulerConfig, "schedulerConfig is null"); requireNonNull(communicationConfig, "communicationConfig is null"); this.nodeManager = requireNonNull(nodeManager, "nodeManager is null"); this.locationFactory = requireNonNull(locationFactory, "locationFactory is null"); this.httpClient = requireNonNull(httpClient, "httpClient is null"); this.exporter = requireNonNull(exporter, "exporter is null"); this.lowMemoryKiller = requireNonNull(lowMemoryKiller, "lowMemoryKiller is null"); this.maxQueryMemory = config.getMaxQueryMemory(); this.maxQueryTotalMemory = config.getMaxQueryTotalMemory(); this.coordinatorId = queryIdGenerator.getCoordinatorId(); this.enabled = serverConfig.isCoordinator(); this.killOnOutOfMemoryDelay = config.getKillOnOutOfMemoryDelay(); this.isWorkScheduledOnCoordinator = schedulerConfig.isIncludeCoordinator(); this.isBinaryTransportEnabled = communicationConfig.isBinaryTransportEnabled(); if (this.isBinaryTransportEnabled) { this.memoryInfoCodec = requireNonNull(memoryInfoSmileCodec, "memoryInfoSmileCodec is null"); this.assignmentsRequestCodec = requireNonNull(assignmentsRequestSmileCodec, "assignmentsRequestSmileCodec is null"); } else { this.memoryInfoCodec = wrapJsonCodec(requireNonNull(memoryInfoJsonCodec, "memoryInfoJsonCodec is null")); this.assignmentsRequestCodec = wrapJsonCodec(requireNonNull(assignmentsRequestJsonCodec, "assignmentsRequestJsonCodec is null")); } verify(maxQueryMemory.toBytes() <= maxQueryTotalMemory.toBytes(), "maxQueryMemory cannot be greater than maxQueryTotalMemory"); verify(config.getSoftMaxQueryMemory().toBytes() <= maxQueryMemory.toBytes(), "Soft max query memory cannot be greater than hard limit"); verify(config.getSoftMaxQueryTotalMemory().toBytes() <= maxQueryTotalMemory.toBytes(), "Soft max query total memory cannot be greater than hard limit"); this.pools = createClusterMemoryPools(nodeMemoryConfig.isReservedPoolEnabled()); } private Map<MemoryPoolId, ClusterMemoryPool> createClusterMemoryPools(boolean reservedPoolEnabled) { Set<MemoryPoolId> memoryPools = new HashSet<>(); memoryPools.add(GENERAL_POOL); if (reservedPoolEnabled) { memoryPools.add(RESERVED_POOL); } ImmutableMap.Builder<MemoryPoolId, ClusterMemoryPool> builder = ImmutableMap.builder(); for (MemoryPoolId poolId : memoryPools) { ClusterMemoryPool pool = new ClusterMemoryPool(poolId); builder.put(poolId, pool); try { exporter.export(generatedNameOf(ClusterMemoryPool.class, poolId.toString()), pool); } catch (JmxException e) { log.error(e, "Error exporting memory pool %s", poolId); } } return builder.build(); } @Override public synchronized void addChangeListener(MemoryPoolId poolId, Consumer<MemoryPoolInfo> listener) { verify(memoryPoolExists(poolId), "Memory pool does not exist: %s", poolId); changeListeners.computeIfAbsent(poolId, id -> new ArrayList<>()).add(listener); } public synchronized boolean memoryPoolExists(MemoryPoolId poolId) { return pools.containsKey(poolId); } public synchronized void process(Iterable<QueryExecution> runningQueries, Supplier<List<BasicQueryInfo>> allQueryInfoSupplier) { if (!enabled) { return; } // TODO revocable memory reservations can also leak and may need to be detected in the future // We are only concerned about the leaks in general pool. memoryLeakDetector.checkForMemoryLeaks(allQueryInfoSupplier, pools.get(GENERAL_POOL).getQueryMemoryReservations()); boolean outOfMemory = isClusterOutOfMemory(); if (!outOfMemory) { lastTimeNotOutOfMemory = System.nanoTime(); } boolean queryKilled = false; long totalUserMemoryBytes = 0L; long totalMemoryBytes = 0L; for (QueryExecution query : runningQueries) { boolean resourceOvercommit = resourceOvercommit(query.getSession()); long userMemoryReservation = query.getUserMemoryReservation().toBytes(); long totalMemoryReservation = query.getTotalMemoryReservation().toBytes(); if (resourceOvercommit && outOfMemory) { // If a query has requested resource overcommit, only kill it if the cluster has run out of memory DataSize memory = succinctBytes(getQueryMemoryReservation(query)); query.fail(new PrestoException(CLUSTER_OUT_OF_MEMORY, format("The cluster is out of memory and %s=true, so this query was killed. It was using %s of memory", RESOURCE_OVERCOMMIT, memory))); queryKilled = true; } if (!resourceOvercommit) { long userMemoryLimit = min(maxQueryMemory.toBytes(), getQueryMaxMemory(query.getSession()).toBytes()); if (userMemoryReservation > userMemoryLimit) { query.fail(exceededGlobalUserLimit(succinctBytes(userMemoryLimit))); queryKilled = true; } long totalMemoryLimit = min(maxQueryTotalMemory.toBytes(), getQueryMaxTotalMemory(query.getSession()).toBytes()); if (totalMemoryReservation > totalMemoryLimit) { query.fail(exceededGlobalTotalLimit(succinctBytes(totalMemoryLimit))); queryKilled = true; } } totalUserMemoryBytes += userMemoryReservation; totalMemoryBytes += totalMemoryReservation; } clusterUserMemoryReservation.set(totalUserMemoryBytes); clusterTotalMemoryReservation.set(totalMemoryBytes); boolean killOnOomDelayPassed = nanosSince(lastTimeNotOutOfMemory).compareTo(killOnOutOfMemoryDelay) > 0; boolean lastKilledQueryGone = isLastKilledQueryGone(); boolean shouldCallOomKiller = !(lowMemoryKiller instanceof NoneLowMemoryKiller) && outOfMemory && !queryKilled && killOnOomDelayPassed && lastKilledQueryGone; if (shouldCallOomKiller) { callOomKiller(runningQueries); } else { // if the cluster is out of memory and we didn't trigger the oom killer we log the state to make debugging easier if (outOfMemory) { log.debug("The cluster is out of memory and the OOM killer is not called (query killed: %s, kill on OOM delay passed: %s, last killed query gone: %s).", queryKilled, killOnOomDelayPassed, lastKilledQueryGone); } } Map<MemoryPoolId, Integer> countByPool = new HashMap<>(); for (QueryExecution query : runningQueries) { MemoryPoolId id = query.getMemoryPool().getId(); countByPool.put(id, countByPool.getOrDefault(id, 0) + 1); } updatePools(countByPool); MemoryPoolAssignmentsRequest assignmentsRequest; if (pools.containsKey(RESERVED_POOL)) { assignmentsRequest = updateAssignments(runningQueries); } else { // If reserved pool is not enabled, we don't create a MemoryPoolAssignmentsRequest that puts all the queries // in the general pool (as they already are). In this case we create an effectively NOOP MemoryPoolAssignmentsRequest. // Once the reserved pool is removed we should get rid of the logic of putting queries into reserved pool including // this piece of code. assignmentsRequest = new MemoryPoolAssignmentsRequest(coordinatorId, Long.MIN_VALUE, ImmutableList.of()); } updateNodes(assignmentsRequest); } private synchronized void callOomKiller(Iterable<QueryExecution> runningQueries) { List<QueryMemoryInfo> queryMemoryInfoList = Streams.stream(runningQueries) .map(this::createQueryMemoryInfo) .collect(toImmutableList()); List<MemoryInfo> nodeMemoryInfos = nodes.values().stream() .map(RemoteNodeMemory::getInfo) .filter(Optional::isPresent) .map(Optional::get) .collect(toImmutableList()); Optional<QueryId> chosenQueryId = lowMemoryKiller.chooseQueryToKill(queryMemoryInfoList, nodeMemoryInfos); if (chosenQueryId.isPresent()) { log.debug("Low memory killer chose %s", chosenQueryId.get()); Optional<QueryExecution> chosenQuery = Streams.stream(runningQueries).filter(query -> chosenQueryId.get().equals(query.getQueryId())).collect(toOptional()); if (chosenQuery.isPresent()) { // See comments in isLastKilledQueryGone for why chosenQuery might be absent. chosenQuery.get().fail(new PrestoException(CLUSTER_OUT_OF_MEMORY, "Query killed because the cluster is out of memory. Please try again in a few minutes.")); queriesKilledDueToOutOfMemory.incrementAndGet(); lastKilledQuery = chosenQueryId.get(); logQueryKill(chosenQueryId.get(), nodeMemoryInfos); } } } @GuardedBy("this") private boolean isLastKilledQueryGone() { if (lastKilledQuery == null) { return true; } // If the lastKilledQuery is marked as leaked by the ClusterMemoryLeakDetector we consider the lastKilledQuery as gone, // so that the ClusterMemoryManager can continue to make progress even if there are leaks. // Even if the weak references to the leaked queries are GCed in the ClusterMemoryLeakDetector, it will mark the same queries // as leaked in its next run, and eventually the ClusterMemoryManager will make progress. if (memoryLeakDetector.wasQueryPossiblyLeaked(lastKilledQuery)) { lastKilledQuery = null; return true; } // pools fields is updated based on nodes field. // Therefore, if the query is gone from pools field, it should also be gone from nodes field. // However, since nodes can updated asynchronously, it has the potential of coming back after being gone. // Therefore, even if the query appears to be gone here, it might be back when one inspects nodes later. return !pools.get(GENERAL_POOL) .getQueryMemoryReservations() .containsKey(lastKilledQuery); } private void logQueryKill(QueryId killedQueryId, List<MemoryInfo> nodes) { if (!log.isInfoEnabled()) { return; } StringBuilder nodeDescription = new StringBuilder(); nodeDescription.append("Query Kill Decision: Killed ").append(killedQueryId).append("\n"); Comparator<Entry<MemoryPoolInfo, Long>> nodeMemoryComparator = comparingLong(Entry::getValue); nodes.stream() .filter(node -> node.getPools().get(GENERAL_POOL) != null) .map(node -> new SimpleEntry<MemoryPoolInfo, Long>( node.getPools().get(GENERAL_POOL), node.getPools().get(GENERAL_POOL).getQueryMemoryReservations().values().stream().mapToLong(l -> l).sum())) .sorted(nodeMemoryComparator.reversed()) .map(Entry::getKey) .forEachOrdered(memoryPoolInfo -> { nodeDescription.append("Query Kill Scenario: "); nodeDescription.append("MaxBytes ").append(memoryPoolInfo.getMaxBytes()).append(' '); nodeDescription.append("FreeBytes ").append(memoryPoolInfo.getFreeBytes() + memoryPoolInfo.getReservedRevocableBytes()).append(' '); nodeDescription.append("Queries "); Comparator<Entry<QueryId, Long>> queryMemoryComparator = comparingLong(Entry::getValue); Stream<Entry<QueryId, Long>> sortedMemoryReservations = memoryPoolInfo.getQueryMemoryReservations().entrySet().stream() .sorted(queryMemoryComparator.reversed()); Joiner.on(",").withKeyValueSeparator("=").appendTo(nodeDescription, (Iterable<Entry<QueryId, Long>>) sortedMemoryReservations::iterator); nodeDescription.append('\n'); }); log.info(nodeDescription.toString()); } @VisibleForTesting synchronized Map<MemoryPoolId, ClusterMemoryPool> getPools() { return ImmutableMap.copyOf(pools); } public synchronized Map<MemoryPoolId, MemoryPoolInfo> getMemoryPoolInfo() { ImmutableMap.Builder<MemoryPoolId, MemoryPoolInfo> builder = new ImmutableMap.Builder<>(); pools.forEach((poolId, memoryPool) -> builder.put(poolId, memoryPool.getInfo())); return builder.build(); } private synchronized boolean isClusterOutOfMemory() { ClusterMemoryPool reservedPool = pools.get(RESERVED_POOL); ClusterMemoryPool generalPool = pools.get(GENERAL_POOL); if (reservedPool == null) { return generalPool.getBlockedNodes() > 0; } return reservedPool.getAssignedQueries() > 0 && generalPool.getBlockedNodes() > 0; } // TODO once the reserved pool is removed we can remove this method. We can also update // RemoteNodeMemory as we don't need to POST anything. private synchronized MemoryPoolAssignmentsRequest updateAssignments(Iterable<QueryExecution> queries) { ClusterMemoryPool reservedPool = pools.get(RESERVED_POOL); ClusterMemoryPool generalPool = pools.get(GENERAL_POOL); verify(generalPool != null, "generalPool is null"); verify(reservedPool != null, "reservedPool is null"); long version = memoryPoolAssignmentsVersion.incrementAndGet(); // Check that all previous assignments have propagated to the visible nodes. This doesn't account for temporary network issues, // and is more of a safety check than a guarantee if (allAssignmentsHavePropagated(queries)) { if (reservedPool.getAssignedQueries() == 0 && generalPool.getBlockedNodes() > 0) { QueryExecution biggestQuery = null; long maxMemory = -1; for (QueryExecution queryExecution : queries) { if (resourceOvercommit(queryExecution.getSession())) { // Don't promote queries that requested resource overcommit to the reserved pool, // since their memory usage is unbounded. continue; } long bytesUsed = getQueryMemoryReservation(queryExecution); if (bytesUsed > maxMemory) { biggestQuery = queryExecution; maxMemory = bytesUsed; } } if (biggestQuery != null) { log.info("Moving query %s to the reserved pool", biggestQuery.getQueryId()); biggestQuery.setMemoryPool(new VersionedMemoryPoolId(RESERVED_POOL, version)); } } } ImmutableList.Builder<MemoryPoolAssignment> assignments = ImmutableList.builder(); for (QueryExecution queryExecution : queries) { assignments.add(new MemoryPoolAssignment(queryExecution.getQueryId(), queryExecution.getMemoryPool().getId())); } return new MemoryPoolAssignmentsRequest(coordinatorId, version, assignments.build()); } private QueryMemoryInfo createQueryMemoryInfo(QueryExecution query) { return new QueryMemoryInfo(query.getQueryId(), query.getMemoryPool().getId(), query.getTotalMemoryReservation().toBytes()); } private long getQueryMemoryReservation(QueryExecution query) { return query.getTotalMemoryReservation().toBytes(); } private synchronized boolean allAssignmentsHavePropagated(Iterable<QueryExecution> queries) { if (nodes.isEmpty()) { // Assignments can't have propagated, if there are no visible nodes. return false; } long newestAssignment = ImmutableList.copyOf(queries).stream() .map(QueryExecution::getMemoryPool) .mapToLong(VersionedMemoryPoolId::getVersion) .min() .orElse(-1); long mostOutOfDateNode = nodes.values().stream() .mapToLong(RemoteNodeMemory::getCurrentAssignmentVersion) .min() .orElse(Long.MAX_VALUE); return newestAssignment <= mostOutOfDateNode; } private synchronized void updateNodes(MemoryPoolAssignmentsRequest assignments) { ImmutableSet.Builder<InternalNode> builder = ImmutableSet.builder(); Set<InternalNode> aliveNodes = builder .addAll(nodeManager.getNodes(ACTIVE)) .addAll(nodeManager.getNodes(SHUTTING_DOWN)) .build(); ImmutableSet<String> aliveNodeIds = aliveNodes.stream() .map(InternalNode::getNodeIdentifier) .collect(toImmutableSet()); // Remove nodes that don't exist anymore // Make a copy to materialize the set difference Set<String> deadNodes = ImmutableSet.copyOf(difference(nodes.keySet(), aliveNodeIds)); nodes.keySet().removeAll(deadNodes); // Add new nodes for (InternalNode node : aliveNodes) { if (!nodes.containsKey(node.getNodeIdentifier())) { nodes.put( node.getNodeIdentifier(), new RemoteNodeMemory( node, httpClient, memoryInfoCodec, assignmentsRequestCodec, locationFactory.createMemoryInfoLocation(node), isBinaryTransportEnabled)); } } // If work isn't scheduled on the coordinator (the current node) there is no point // in polling or updating (when moving queries to the reserved pool) its memory pools if (!isWorkScheduledOnCoordinator) { nodes.remove(nodeManager.getCurrentNode().getNodeIdentifier()); } // Schedule refresh for (RemoteNodeMemory node : nodes.values()) { node.asyncRefresh(assignments); } } private synchronized void updatePools(Map<MemoryPoolId, Integer> queryCounts) { // Update view of cluster memory and pools List<MemoryInfo> nodeMemoryInfos = nodes.values().stream() .map(RemoteNodeMemory::getInfo) .filter(Optional::isPresent) .map(Optional::get) .collect(toImmutableList()); long totalClusterMemory = nodeMemoryInfos.stream() .map(MemoryInfo::getTotalNodeMemory) .mapToLong(DataSize::toBytes) .sum(); clusterMemoryBytes.set(totalClusterMemory); for (ClusterMemoryPool pool : pools.values()) { pool.update(nodeMemoryInfos, queryCounts.getOrDefault(pool.getId(), 0)); if (changeListeners.containsKey(pool.getId())) { MemoryPoolInfo info = pool.getInfo(); for (Consumer<MemoryPoolInfo> listener : changeListeners.get(pool.getId())) { listenerExecutor.execute(() -> listener.accept(info)); } } } } public synchronized Map<String, Optional<MemoryInfo>> getWorkerMemoryInfo() { Map<String, Optional<MemoryInfo>> memoryInfo = new HashMap<>(); for (Entry<String, RemoteNodeMemory> entry : nodes.entrySet()) { // workerId is of the form "node_identifier [node_host]" String workerId = entry.getKey() + " [" + entry.getValue().getNode().getHost() + "]"; memoryInfo.put(workerId, entry.getValue().getInfo()); } return memoryInfo; } @PreDestroy public synchronized void destroy() throws IOException { try (Closer closer = Closer.create()) { for (ClusterMemoryPool pool : pools.values()) { closer.register(() -> exporter.unexport(generatedNameOf(ClusterMemoryPool.class, pool.getId().toString()))); } closer.register(listenerExecutor::shutdownNow); } } @Managed public int getNumberOfLeakedQueries() { return memoryLeakDetector.getNumberOfLeakedQueries(); } @Managed public long getClusterUserMemoryReservation() { return clusterUserMemoryReservation.get(); } @Managed public long getClusterTotalMemoryReservation() { return clusterTotalMemoryReservation.get(); } @Managed public long getClusterMemoryBytes() { return clusterMemoryBytes.get(); } @Managed public long getQueriesKilledDueToOutOfMemory() { return queriesKilledDueToOutOfMemory.get(); } }
/** * Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.interestrate.swaption.method; import java.util.List; import java.util.Set; import com.opengamma.analytics.financial.interestrate.InstrumentDerivative; import com.opengamma.analytics.financial.interestrate.InterestRateCurveSensitivity; import com.opengamma.analytics.financial.interestrate.PresentValueSABRSensitivityDataBundle; import com.opengamma.analytics.financial.interestrate.YieldCurveBundle; import com.opengamma.analytics.financial.interestrate.method.PricingMethod; import com.opengamma.analytics.financial.interestrate.swaption.derivative.SwaptionPhysicalFixedIbor; import com.opengamma.analytics.financial.interestrate.swaption.provider.SwaptionPhysicalFixedIborSABRLMMLeastSquareMethod; import com.opengamma.analytics.financial.model.interestrate.definition.LiborMarketModelDisplacedDiffusionDataBundle; import com.opengamma.analytics.financial.model.interestrate.definition.LiborMarketModelDisplacedDiffusionParameters; import com.opengamma.analytics.financial.model.option.definition.SABRInterestRateDataBundle; import com.opengamma.analytics.math.matrix.CommonsMatrixAlgebra; import com.opengamma.analytics.math.matrix.DoubleMatrix1D; import com.opengamma.analytics.math.matrix.DoubleMatrix2D; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.money.CurrencyAmount; import com.opengamma.util.tuple.DoublesPair; import com.opengamma.util.tuple.Triple; /** * Method to computes the present value and sensitivities of physical delivery European swaptions with a Libor Market Model calibrated exactly to SABR prices. * The LMM displacements and volatility weights are hard coded. * <p> Reference: M. Henrard, Algorithmic differentiation and calibration: optimization, September 2012. * @deprecated Use {@link SwaptionPhysicalFixedIborSABRLMMLeastSquareMethod} */ @Deprecated public class SwaptionPhysicalFixedIborSABRLMMAtBestMethod implements PricingMethod { /** * The SABR method used for European swaptions with physical delivery. */ private static final SwaptionPhysicalFixedIborSABRMethod METHOD_SWAPTION_SABR = SwaptionPhysicalFixedIborSABRMethod.getInstance(); /** * The LMM method used for European swaptions with physical delivery. */ private static final SwaptionPhysicalFixedIborLMMDDMethod METHOD_SWAPTION_LMM = SwaptionPhysicalFixedIborLMMDDMethod.getInstance(); /** * The method used to create the calibration basket. */ private static final SwaptionPhysicalFixedIborBasketMethod METHOD_BASKET = SwaptionPhysicalFixedIborBasketMethod.getInstance(); /** * The matrix algebra used. */ private static final CommonsMatrixAlgebra ALGEBRA = new CommonsMatrixAlgebra(); /** * The noneyness of strikes used in the calibration basket. Difference between the swaption rate and the basket rates. */ private final double[] _strikeMoneyness; /** * The initial value of the LMM parameters for calibration. The initial parameters are not modified by the calibration but a new copy is created for each calibration. */ private final LiborMarketModelDisplacedDiffusionParameters _parametersInit; /** * Constructor. * @param strikeMoneyness The moneyness of strikes used in the calibration basket. Difference between the swaption rate and the basket rates. * @param parametersInit The initial value of the LMM parameters for calibration. The initial parameters are not modified by the calibration but a new copy is created for each calibration. */ public SwaptionPhysicalFixedIborSABRLMMAtBestMethod(final double[] strikeMoneyness, final LiborMarketModelDisplacedDiffusionParameters parametersInit) { ArgumentChecker.notNull(strikeMoneyness, "strike moneyness"); ArgumentChecker.notNull(parametersInit, "initial parameters"); _strikeMoneyness = strikeMoneyness; _parametersInit = parametersInit; } /** * The method calibrates a LMM on a set of vanilla swaption priced with SABR. The set of vanilla swaptions is given by the CalibrationType. * The original swaption is priced with the calibrated LMM. * This should not be used for vanilla swaptions (the price is equal to the SABR price with a longer computation type and some approximation). * This is useful for non-standard swaptions like amortized swaptions. * @param swaption The swaption. * @param curves The curves and SABR data. * @return The present value. */ public CurrencyAmount presentValue(final SwaptionPhysicalFixedIbor swaption, final SABRInterestRateDataBundle curves) { ArgumentChecker.notNull(swaption, "swaption"); ArgumentChecker.notNull(curves, "curves"); final int nbStrikes = _strikeMoneyness.length; final LiborMarketModelDisplacedDiffusionParameters lmmParameters = _parametersInit.copy(); final SwaptionPhysicalLMMDDSuccessiveLeastSquareCalibrationObjective objective = new SwaptionPhysicalLMMDDSuccessiveLeastSquareCalibrationObjective(lmmParameters); final SwaptionPhysicalLMMDDSuccessiveLeastSquareCalibrationEngine calibrationEngine = new SwaptionPhysicalLMMDDSuccessiveLeastSquareCalibrationEngine(objective, nbStrikes); final SwaptionPhysicalFixedIbor[] swaptionCalibration = METHOD_BASKET.calibrationBasketFixedLegPeriod(swaption, _strikeMoneyness); calibrationEngine.addInstrument(swaptionCalibration, METHOD_SWAPTION_SABR); calibrationEngine.calibrate(curves); final LiborMarketModelDisplacedDiffusionDataBundle lmmBundle = new LiborMarketModelDisplacedDiffusionDataBundle(lmmParameters, curves); final CurrencyAmount pv = METHOD_SWAPTION_LMM.presentValue(swaption, lmmBundle); return pv; } @Override public CurrencyAmount presentValue(final InstrumentDerivative instrument, final YieldCurveBundle curves) { ArgumentChecker.isTrue(instrument instanceof SwaptionPhysicalFixedIbor, "Physical delivery swaption"); ArgumentChecker.isTrue(curves instanceof SABRInterestRateDataBundle, "Bundle should contain SABR data"); return presentValue((SwaptionPhysicalFixedIbor) instrument, (SABRInterestRateDataBundle) curves); } public PresentValueSABRSensitivityDataBundle presentValueSABRSensitivity(final SwaptionPhysicalFixedIbor swaption, final SABRInterestRateDataBundle curves) { ArgumentChecker.notNull(swaption, "swaption"); ArgumentChecker.notNull(curves, "curves"); final int nbStrikes = _strikeMoneyness.length; final LiborMarketModelDisplacedDiffusionParameters lmmParameters = _parametersInit.copy(); final SwaptionPhysicalLMMDDSuccessiveLeastSquareCalibrationObjective objective = new SwaptionPhysicalLMMDDSuccessiveLeastSquareCalibrationObjective(lmmParameters); final SwaptionPhysicalLMMDDSuccessiveLeastSquareCalibrationEngine calibrationEngine = new SwaptionPhysicalLMMDDSuccessiveLeastSquareCalibrationEngine(objective, nbStrikes); final SwaptionPhysicalFixedIbor[] swaptionCalibration = METHOD_BASKET.calibrationBasketFixedLegPeriod(swaption, _strikeMoneyness); calibrationEngine.addInstrument(swaptionCalibration, METHOD_SWAPTION_SABR); calibrationEngine.calibrate(curves); final LiborMarketModelDisplacedDiffusionDataBundle lmmBundle = new LiborMarketModelDisplacedDiffusionDataBundle(lmmParameters, curves); final int nbCalibrations = swaptionCalibration.length; final int nbPeriods = nbCalibrations / nbStrikes; final int nbFact = lmmParameters.getNbFactor(); final List<Integer> instrumentIndex = calibrationEngine.getInstrumentIndex(); final double[] dPvdPhi = new double[2 * nbPeriods]; // Implementation note: Derivative of the priced swaptions wrt the calibration parameters (multiplicative factor and additive term) // Implementation note: Phi is a vector with the multiplicative factors on the volatility and then the additive terms on the displacements. final double[][] dPvdGamma = METHOD_SWAPTION_LMM.presentValueLMMSensitivity(swaption, lmmBundle); final double[] dPvdDis = METHOD_SWAPTION_LMM.presentValueDDSensitivity(swaption, lmmBundle); for (int loopperiod = 0; loopperiod < nbPeriods; loopperiod++) { for (int loopsub = instrumentIndex.get(loopperiod * nbStrikes); loopsub < instrumentIndex.get((loopperiod + 1) * nbStrikes); loopsub++) { for (int loopfact = 0; loopfact < nbFact; loopfact++) { dPvdPhi[loopperiod] += dPvdGamma[loopsub][loopfact] * lmmParameters.getVolatility()[loopsub][loopfact]; dPvdPhi[nbPeriods + loopperiod] += dPvdDis[loopsub]; } } } final double[][] dPvCaldPhi = new double[nbCalibrations][2 * nbPeriods]; // Implementation note: Derivative of the calibration swaptions wrt the calibration parameters (multiplicative factor and additive term) final double[][][] dPvCaldGamma = new double[nbCalibrations][][]; for (int loopcal = 0; loopcal < nbCalibrations; loopcal++) { dPvCaldGamma[loopcal] = METHOD_SWAPTION_LMM.presentValueLMMSensitivity(swaptionCalibration[loopcal], lmmBundle); } final double[][] dPvCaldDis = new double[nbCalibrations][]; for (int loopcal = 0; loopcal < nbCalibrations; loopcal++) { dPvCaldDis[loopcal] = METHOD_SWAPTION_LMM.presentValueDDSensitivity(swaptionCalibration[loopcal], lmmBundle); } for (int loopcal = 0; loopcal < nbCalibrations; loopcal++) { for (int loopperiod = 0; loopperiod < nbPeriods; loopperiod++) { for (int loopsub = instrumentIndex.get(loopperiod * nbStrikes); loopsub < instrumentIndex.get((loopperiod + 1) * nbStrikes); loopsub++) { for (int loopfact = 0; loopfact < nbFact; loopfact++) { dPvCaldPhi[loopcal][loopperiod] += dPvCaldGamma[loopcal][loopsub][loopfact] * lmmParameters.getVolatility()[loopsub][loopfact]; dPvCaldPhi[loopcal][nbPeriods + loopperiod] += dPvCaldDis[loopcal][loopsub]; } } } } final double[][] dPvCaldTheta = new double[nbCalibrations][3 * nbPeriods]; // Implementation note: Derivative of the calibration swaptions wrt the SABR parameters as a unique array. // Implementation note: Theta is vector with first the Alpha, the the Rho and finally the Nu. for (int loopperiod = 0; loopperiod < nbPeriods; loopperiod++) { for (int loopstrike = 0; loopstrike < nbStrikes; loopstrike++) { final PresentValueSABRSensitivityDataBundle dPvCaldSABR = METHOD_SWAPTION_SABR.presentValueSABRSensitivity(swaptionCalibration[loopperiod * nbStrikes + loopstrike], curves); final Set<DoublesPair> keySet = dPvCaldSABR.getAlpha().getMap().keySet(); final DoublesPair[] keys = keySet.toArray(new DoublesPair[keySet.size()]); dPvCaldTheta[loopperiod * nbStrikes + loopstrike][loopperiod] += dPvCaldSABR.getAlpha().getMap().get(keys[0]); dPvCaldTheta[loopperiod * nbStrikes + loopstrike][nbPeriods + loopperiod] = dPvCaldSABR.getRho().getMap().get(keys[0]); dPvCaldTheta[loopperiod * nbStrikes + loopstrike][2 * nbPeriods + loopperiod] = dPvCaldSABR.getNu().getMap().get(keys[0]); } } final double[][] dfdTheta = new double[2 * nbPeriods][3 * nbPeriods]; // Implementation note: Derivative of f wrt the SABR parameters. for (int loopp = 0; loopp < 2 * nbPeriods; loopp++) { for (int loops = 0; loops < 3 * nbPeriods; loops++) { for (int loopcal = 0; loopcal < nbCalibrations; loopcal++) { dfdTheta[loopp][loops] += -2 * dPvCaldPhi[loopcal][loopp] * dPvCaldTheta[loopcal][loops]; } } } final double[][] dfdPhi = new double[2 * nbPeriods][2 * nbPeriods]; // Implementation note: Derivative of f wrt the calibration parameters. This is an approximation: the second order derivative part are ignored. for (int loopp1 = 0; loopp1 < 2 * nbPeriods; loopp1++) { for (int loopp2 = 0; loopp2 < 2 * nbPeriods; loopp2++) { for (int loopcal = 0; loopcal < nbCalibrations; loopcal++) { dfdPhi[loopp1][loopp2] += 2 * dPvCaldPhi[loopcal][loopp1] * dPvCaldPhi[loopcal][loopp2]; } } } final DoubleMatrix2D dfdThetaMat = new DoubleMatrix2D(dfdTheta); final DoubleMatrix2D dfdPhiMat = new DoubleMatrix2D(dfdPhi); final DoubleMatrix2D dPhidThetaMat = (DoubleMatrix2D) ALGEBRA.scale(ALGEBRA.multiply(ALGEBRA.getInverse(dfdPhiMat), dfdThetaMat), -1.0); final DoubleMatrix1D dPvdPhiMat = new DoubleMatrix1D(dPvdPhi); final DoubleMatrix2D dPvdThetaMat = ALGEBRA.getTranspose(ALGEBRA.multiply(ALGEBRA.getTranspose(dPhidThetaMat), dPvdPhiMat)); final double[] dPvdTheta = dPvdThetaMat.getData()[0]; // Storage in PresentValueSABRSensitivityDataBundle final PresentValueSABRSensitivityDataBundle sensi = new PresentValueSABRSensitivityDataBundle(); for (int loopp = 0; loopp < nbPeriods; loopp++) { final DoublesPair expiryMaturity = DoublesPair.of(swaptionCalibration[loopp * nbStrikes].getTimeToExpiry(), swaptionCalibration[loopp * nbStrikes].getMaturityTime()); sensi.addAlpha(expiryMaturity, dPvdTheta[loopp]); sensi.addRho(expiryMaturity, dPvdTheta[nbPeriods + loopp]); sensi.addNu(expiryMaturity, dPvdTheta[2 * nbPeriods + loopp]); } return sensi; } public Triple<CurrencyAmount, PresentValueSABRSensitivityDataBundle, InterestRateCurveSensitivity> presentValueAndSensitivity(final SwaptionPhysicalFixedIbor swaption, final SABRInterestRateDataBundle curves) { ArgumentChecker.notNull(swaption, "swaption"); ArgumentChecker.notNull(curves, "curves"); final int nbStrikes = _strikeMoneyness.length; final LiborMarketModelDisplacedDiffusionParameters lmmParameters = _parametersInit.copy(); final SwaptionPhysicalLMMDDSuccessiveLeastSquareCalibrationObjective objective = new SwaptionPhysicalLMMDDSuccessiveLeastSquareCalibrationObjective(lmmParameters); final SwaptionPhysicalLMMDDSuccessiveLeastSquareCalibrationEngine calibrationEngine = new SwaptionPhysicalLMMDDSuccessiveLeastSquareCalibrationEngine(objective, nbStrikes); final SwaptionPhysicalFixedIbor[] swaptionCalibration = METHOD_BASKET.calibrationBasketFixedLegPeriod(swaption, _strikeMoneyness); calibrationEngine.addInstrument(swaptionCalibration, METHOD_SWAPTION_SABR); calibrationEngine.calibrate(curves); final LiborMarketModelDisplacedDiffusionDataBundle lmmBundle = new LiborMarketModelDisplacedDiffusionDataBundle(lmmParameters, curves); // 1. PV final CurrencyAmount pv = METHOD_SWAPTION_LMM.presentValue(swaption, lmmBundle); final int nbCalibrations = swaptionCalibration.length; final int nbPeriods = nbCalibrations / nbStrikes; final int nbFact = lmmParameters.getNbFactor(); final List<Integer> instrumentIndex = calibrationEngine.getInstrumentIndex(); // 2. SABR sensitivities final double[] dPvdPhi = new double[2 * nbPeriods]; // Implementation note: Derivative of the priced swaptions wrt the calibration parameters (multiplicative factor and additive term) final double[][] dPvdGamma = METHOD_SWAPTION_LMM.presentValueLMMSensitivity(swaption, lmmBundle); final double[] dPvdDis = METHOD_SWAPTION_LMM.presentValueDDSensitivity(swaption, lmmBundle); for (int loopperiod = 0; loopperiod < nbPeriods; loopperiod++) { for (int loopsub = instrumentIndex.get(loopperiod * nbStrikes); loopsub < instrumentIndex.get((loopperiod + 1) * nbStrikes); loopsub++) { for (int loopfact = 0; loopfact < nbFact; loopfact++) { dPvdPhi[loopperiod] += dPvdGamma[loopsub][loopfact] * lmmParameters.getVolatility()[loopsub][loopfact]; dPvdPhi[nbPeriods + loopperiod] += dPvdDis[loopsub]; } } } final double[][] dPvCaldPhi = new double[nbCalibrations][2 * nbPeriods]; // Implementation note: Derivative of the calibration swaptions wrt the calibration parameters (multiplicative factor and additive term) final double[][][] dPvCaldGamma = new double[nbCalibrations][][]; for (int loopcal = 0; loopcal < nbCalibrations; loopcal++) { dPvCaldGamma[loopcal] = METHOD_SWAPTION_LMM.presentValueLMMSensitivity(swaptionCalibration[loopcal], lmmBundle); } final double[][] dPvCaldDis = new double[nbCalibrations][]; for (int loopcal = 0; loopcal < nbCalibrations; loopcal++) { dPvCaldDis[loopcal] = METHOD_SWAPTION_LMM.presentValueDDSensitivity(swaptionCalibration[loopcal], lmmBundle); } for (int loopcal = 0; loopcal < nbCalibrations; loopcal++) { for (int loopperiod = 0; loopperiod < nbPeriods; loopperiod++) { for (int loopsub = instrumentIndex.get(loopperiod * nbStrikes); loopsub < instrumentIndex.get((loopperiod + 1) * nbStrikes); loopsub++) { for (int loopfact = 0; loopfact < nbFact; loopfact++) { dPvCaldPhi[loopcal][loopperiod] += dPvCaldGamma[loopcal][loopsub][loopfact] * lmmParameters.getVolatility()[loopsub][loopfact]; dPvCaldPhi[loopcal][nbPeriods + loopperiod] += dPvCaldDis[loopcal][loopsub]; } } } } final double[][] dPvCaldTheta = new double[nbCalibrations][3 * nbPeriods]; // Implementation note: Derivative of the calibration swaptions wrt the SABR parameters as a unique array. for (int loopperiod = 0; loopperiod < nbPeriods; loopperiod++) { for (int loopstrike = 0; loopstrike < nbStrikes; loopstrike++) { final PresentValueSABRSensitivityDataBundle dPvCaldSABR = METHOD_SWAPTION_SABR.presentValueSABRSensitivity(swaptionCalibration[loopperiod * nbStrikes + loopstrike], curves); final Set<DoublesPair> keySet = dPvCaldSABR.getAlpha().getMap().keySet(); final DoublesPair[] keys = keySet.toArray(new DoublesPair[keySet.size()]); dPvCaldTheta[loopperiod * nbStrikes + loopstrike][loopperiod] += dPvCaldSABR.getAlpha().getMap().get(keys[0]); dPvCaldTheta[loopperiod * nbStrikes + loopstrike][nbPeriods + loopperiod] = dPvCaldSABR.getRho().getMap().get(keys[0]); dPvCaldTheta[loopperiod * nbStrikes + loopstrike][2 * nbPeriods + loopperiod] = dPvCaldSABR.getNu().getMap().get(keys[0]); } } final double[][] dfdTheta = new double[2 * nbPeriods][3 * nbPeriods]; // Implementation note: Derivative of f wrt the SABR parameters. for (int loopp = 0; loopp < 2 * nbPeriods; loopp++) { for (int loops = 0; loops < 3 * nbPeriods; loops++) { for (int loopcal = 0; loopcal < nbCalibrations; loopcal++) { dfdTheta[loopp][loops] += -2 * dPvCaldPhi[loopcal][loopp] * dPvCaldTheta[loopcal][loops]; } } } final double[][] dfdPhi = new double[2 * nbPeriods][2 * nbPeriods]; // Implementation note: Derivative of f wrt the calibration parameters. This is an approximation: the second order derivative part are ignored. for (int loopp1 = 0; loopp1 < 2 * nbPeriods; loopp1++) { for (int loopp2 = 0; loopp2 < 2 * nbPeriods; loopp2++) { for (int loopcal = 0; loopcal < nbCalibrations; loopcal++) { dfdPhi[loopp1][loopp2] += 2 * dPvCaldPhi[loopcal][loopp1] * dPvCaldPhi[loopcal][loopp2]; } } } final DoubleMatrix2D dfdThetaMat = new DoubleMatrix2D(dfdTheta); final DoubleMatrix2D dfdPhiMat = new DoubleMatrix2D(dfdPhi); final DoubleMatrix2D dfdPhiInvMat = ALGEBRA.getInverse(dfdPhiMat); final DoubleMatrix2D dPhidThetaMat = (DoubleMatrix2D) ALGEBRA.scale(ALGEBRA.multiply(dfdPhiInvMat, dfdThetaMat), -1.0); final DoubleMatrix1D dPvdPhiMat = new DoubleMatrix1D(dPvdPhi); final DoubleMatrix2D dPvdThetaMat = ALGEBRA.getTranspose(ALGEBRA.multiply(ALGEBRA.getTranspose(dPhidThetaMat), dPvdPhiMat)); final double[] dPvdTheta = dPvdThetaMat.getData()[0]; // Storage in PresentValueSABRSensitivityDataBundle final PresentValueSABRSensitivityDataBundle sensiSABR = new PresentValueSABRSensitivityDataBundle(); for (int loopp = 0; loopp < nbPeriods; loopp++) { final DoublesPair expiryMaturity = DoublesPair.of(swaptionCalibration[loopp * nbStrikes].getTimeToExpiry(), swaptionCalibration[loopp * nbStrikes].getMaturityTime()); sensiSABR.addAlpha(expiryMaturity, dPvdTheta[loopp]); sensiSABR.addRho(expiryMaturity, dPvdTheta[nbPeriods + loopp]); sensiSABR.addNu(expiryMaturity, dPvdTheta[2 * nbPeriods + loopp]); } // 3. Curve sensitivities final InterestRateCurveSensitivity[] dPvCalBasedC = new InterestRateCurveSensitivity[nbCalibrations]; final InterestRateCurveSensitivity[] dPvCalLmmdC = new InterestRateCurveSensitivity[nbCalibrations]; final InterestRateCurveSensitivity[] dPvCalDiffdC = new InterestRateCurveSensitivity[nbCalibrations]; for (int loopcal = 0; loopcal < nbCalibrations; loopcal++) { dPvCalBasedC[loopcal] = METHOD_SWAPTION_SABR.presentValueCurveSensitivity(swaptionCalibration[loopcal], curves); dPvCalLmmdC[loopcal] = METHOD_SWAPTION_LMM.presentValueCurveSensitivity(swaptionCalibration[loopcal], lmmBundle); dPvCalDiffdC[loopcal] = dPvCalBasedC[loopcal].plus(dPvCalLmmdC[loopcal].multipliedBy(-1.0)).cleaned(); } final InterestRateCurveSensitivity[] dfdC = new InterestRateCurveSensitivity[2 * nbPeriods]; // Implementation note: Derivative of f wrt the curves. This is an approximation: the second order derivative part are ignored. for (int loopp = 0; loopp < 2 * nbPeriods; loopp++) { dfdC[loopp] = new InterestRateCurveSensitivity(); for (int loopcal = 0; loopcal < nbCalibrations; loopcal++) { dfdC[loopp] = dfdC[loopp].plus(dPvCalDiffdC[loopcal].multipliedBy(-2 * dPvCaldPhi[loopcal][loopp])).cleaned(); } } final InterestRateCurveSensitivity[] dPhidC = new InterestRateCurveSensitivity[2 * nbPeriods]; for (int loopp1 = 0; loopp1 < 2 * nbPeriods; loopp1++) { dPhidC[loopp1] = new InterestRateCurveSensitivity(); for (int loopp2 = 0; loopp2 < 2 * nbPeriods; loopp2++) { dPhidC[loopp1] = dPhidC[loopp1].plus(dfdC[loopp2].multipliedBy(-dfdPhiInvMat.getEntry(loopp1, loopp2))).cleaned(); } } InterestRateCurveSensitivity dPvdC = METHOD_SWAPTION_LMM.presentValueCurveSensitivity(swaption, lmmBundle); for (int loopp = 0; loopp < 2 * nbPeriods; loopp++) { dPvdC = dPvdC.plus(dPhidC[loopp].multipliedBy(dPvdPhi[loopp])).cleaned(); } return new Triple<>(pv, sensiSABR, dPvdC); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.runtime.tasks; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.JobID; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.broadcast.BroadcastVariableManager; import org.apache.flink.runtime.checkpoint.CheckpointException; import org.apache.flink.runtime.checkpoint.CheckpointMetaData; import org.apache.flink.runtime.checkpoint.CheckpointMetricsBuilder; import org.apache.flink.runtime.checkpoint.CheckpointOptions; import org.apache.flink.runtime.checkpoint.CheckpointType; import org.apache.flink.runtime.clusterframework.types.AllocationID; import org.apache.flink.runtime.deployment.InputGateDeploymentDescriptor; import org.apache.flink.runtime.deployment.ResultPartitionDeploymentDescriptor; import org.apache.flink.runtime.execution.Environment; import org.apache.flink.runtime.execution.ExecutionState; import org.apache.flink.runtime.execution.librarycache.TestingClassLoaderLease; import org.apache.flink.runtime.executiongraph.ExecutionAttemptID; import org.apache.flink.runtime.executiongraph.JobInformation; import org.apache.flink.runtime.executiongraph.TaskInformation; import org.apache.flink.runtime.externalresource.ExternalResourceInfoProvider; import org.apache.flink.runtime.filecache.FileCache; import org.apache.flink.runtime.io.disk.iomanager.IOManager; import org.apache.flink.runtime.io.network.NettyShuffleEnvironmentBuilder; import org.apache.flink.runtime.io.network.TaskEventDispatcher; import org.apache.flink.runtime.io.network.partition.NoOpResultPartitionConsumableNotifier; import org.apache.flink.runtime.io.network.partition.ResultPartitionConsumableNotifier; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable; import org.apache.flink.runtime.jobgraph.tasks.InputSplitProvider; import org.apache.flink.runtime.memory.MemoryManager; import org.apache.flink.runtime.metrics.groups.TaskMetricGroup; import org.apache.flink.runtime.metrics.groups.UnregisteredMetricGroups; import org.apache.flink.runtime.query.KvStateRegistry; import org.apache.flink.runtime.shuffle.ShuffleEnvironment; import org.apache.flink.runtime.state.CheckpointStorageLocationReference; import org.apache.flink.runtime.state.TestTaskStateManager; import org.apache.flink.runtime.taskexecutor.KvStateService; import org.apache.flink.runtime.taskexecutor.PartitionProducerStateChecker; import org.apache.flink.runtime.taskexecutor.TestGlobalAggregateManager; import org.apache.flink.runtime.taskmanager.CheckpointResponder; import org.apache.flink.runtime.taskmanager.NoOpTaskOperatorEventGateway; import org.apache.flink.runtime.taskmanager.Task; import org.apache.flink.runtime.taskmanager.TaskManagerActions; import org.apache.flink.runtime.util.TestingTaskManagerRuntimeInfo; import org.apache.flink.streaming.runtime.tasks.mailbox.MailboxDefaultAction; import org.apache.flink.util.SerializedValue; import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; import java.util.Collections; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingQueue; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; /** * Tests that the cached thread pool used by the {@link Task} allows synchronous checkpoints to * complete successfully. */ public class SynchronousCheckpointITCase { // A thread-safe queue to "log" and monitor events happening in the task's methods. Also, used // by the test thread // to synchronize actions with the task's threads. private static LinkedBlockingQueue<Event> eventQueue = new LinkedBlockingQueue<>(); @Rule public final Timeout timeoutPerTest = Timeout.seconds(10); @Test public void taskDispatcherThreadPoolAllowsForSynchronousCheckpoints() throws Exception { final Task task = createTask(SynchronousCheckpointTestingTask.class); try (TaskCleaner ignored = new TaskCleaner(task)) { task.startTaskThread(); assertThat(eventQueue.take(), is(Event.TASK_IS_RUNNING)); assertTrue(eventQueue.isEmpty()); assertEquals(ExecutionState.RUNNING, task.getExecutionState()); task.triggerCheckpointBarrier( 42, 156865867234L, new CheckpointOptions( CheckpointType.SAVEPOINT_SUSPEND, CheckpointStorageLocationReference.getDefault())); assertThat(eventQueue.take(), is(Event.PRE_TRIGGER_CHECKPOINT)); assertThat(eventQueue.take(), is(Event.POST_TRIGGER_CHECKPOINT)); assertTrue(eventQueue.isEmpty()); task.notifyCheckpointComplete(42); assertThat(eventQueue.take(), is(Event.PRE_NOTIFY_CHECKPOINT_COMPLETE)); assertThat(eventQueue.take(), is(Event.POST_NOTIFY_CHECKPOINT_COMPLETE)); assertTrue(eventQueue.isEmpty()); assertEquals(ExecutionState.RUNNING, task.getExecutionState()); } } /** * A {@link StreamTask} which makes sure that the different phases of a synchronous checkpoint * are reflected in the {@link SynchronousCheckpointITCase#eventQueue}. */ public static class SynchronousCheckpointTestingTask extends StreamTask { // Flag to emit the first event only once. private boolean isRunning; public SynchronousCheckpointTestingTask(Environment environment) throws Exception { super(environment); } @Override protected void processInput(MailboxDefaultAction.Controller controller) throws Exception { if (!isRunning) { isRunning = true; eventQueue.put(Event.TASK_IS_RUNNING); } if (isCanceled()) { controller.allActionsCompleted(); } else { controller.suspendDefaultAction(); } } @Override public Future<Boolean> triggerCheckpointAsync( CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions) { try { eventQueue.put(Event.PRE_TRIGGER_CHECKPOINT); Future<Boolean> result = super.triggerCheckpointAsync(checkpointMetaData, checkpointOptions); eventQueue.put(Event.POST_TRIGGER_CHECKPOINT); return result; } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } } @Override public Future<Void> notifyCheckpointCompleteAsync(long checkpointId) { try { eventQueue.put(Event.PRE_NOTIFY_CHECKPOINT_COMPLETE); Future<Void> result = super.notifyCheckpointCompleteAsync(checkpointId); eventQueue.put(Event.POST_NOTIFY_CHECKPOINT_COMPLETE); return result; } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } } @Override public Future<Void> notifyCheckpointAbortAsync(long checkpointId) { return CompletableFuture.completedFuture(null); } @Override protected void init() {} @Override public void triggerCheckpointOnBarrier( CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions, CheckpointMetricsBuilder checkpointMetrics) { throw new UnsupportedOperationException("Should not be called"); } @Override public void abortCheckpointOnBarrier(long checkpointId, CheckpointException cause) { throw new UnsupportedOperationException("Should not be called"); } @Override protected void cleanup() {} } /** * The different state transitions during a synchronous checkpoint along with their expected * previous state. */ private enum Event { TASK_IS_RUNNING, PRE_TRIGGER_CHECKPOINT, PRE_NOTIFY_CHECKPOINT_COMPLETE, POST_NOTIFY_CHECKPOINT_COMPLETE, POST_TRIGGER_CHECKPOINT, } // -------------------------- Boilerplate tools copied from the TaskAsyncCallTest // -------------------------- private Task createTask(Class<? extends AbstractInvokable> invokableClass) throws Exception { ResultPartitionConsumableNotifier consumableNotifier = new NoOpResultPartitionConsumableNotifier(); PartitionProducerStateChecker partitionProducerStateChecker = mock(PartitionProducerStateChecker.class); Executor executor = mock(Executor.class); ShuffleEnvironment<?, ?> shuffleEnvironment = new NettyShuffleEnvironmentBuilder().build(); TaskMetricGroup taskMetricGroup = UnregisteredMetricGroups.createUnregisteredTaskMetricGroup(); JobInformation jobInformation = new JobInformation( new JobID(), "Job Name", new SerializedValue<>(new ExecutionConfig()), new Configuration(), Collections.emptyList(), Collections.emptyList()); TaskInformation taskInformation = new TaskInformation( new JobVertexID(), "Test Task", 1, 1, invokableClass.getName(), new Configuration()); return new Task( jobInformation, taskInformation, new ExecutionAttemptID(), new AllocationID(), 0, 0, Collections.<ResultPartitionDeploymentDescriptor>emptyList(), Collections.<InputGateDeploymentDescriptor>emptyList(), mock(MemoryManager.class), mock(IOManager.class), shuffleEnvironment, new KvStateService(new KvStateRegistry(), null, null), mock(BroadcastVariableManager.class), new TaskEventDispatcher(), ExternalResourceInfoProvider.NO_EXTERNAL_RESOURCES, new TestTaskStateManager(), mock(TaskManagerActions.class), mock(InputSplitProvider.class), mock(CheckpointResponder.class), new NoOpTaskOperatorEventGateway(), new TestGlobalAggregateManager(), TestingClassLoaderLease.newBuilder().build(), mock(FileCache.class), new TestingTaskManagerRuntimeInfo(), taskMetricGroup, consumableNotifier, partitionProducerStateChecker, executor); } private static class TaskCleaner implements AutoCloseable { private final Task task; private TaskCleaner(Task task) { this.task = task; } @Override public void close() throws Exception { task.cancelExecution(); task.getExecutingThread().join(5000); } } }
package org.jtrim2.swing.component; import java.util.Arrays; import java.util.EnumSet; import java.util.Set; import org.jtrim2.event.ListenerRef; import org.jtrim2.image.transform.BasicImageTransformations; import org.jtrim2.image.transform.ZoomToFitOption; import org.jtrim2.property.MutableProperty; import org.jtrim2.property.PropertySource; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.*; import static org.mockito.Mockito.*; public class BasicTransformationPropertyTest { private BasicTransformationModel model; private BasicTransformationProperty view; @Before public void setUp() { model = new BasicTransformationModel(); view = new BasicTransformationProperty(model); } /** * Test of getOffsetX method, of class BasicTransformationProperty. */ @Test public void testGetOffsetX() { PropertySource<Double> offsetX = view.offsetX(); Runnable listener = mock(Runnable.class); ListenerRef listenerRef = offsetX.addChangeListener(listener); model.setOffset(3.0, model.getOffsetY()); verify(listener).run(); assertEquals(model.getOffsetX(), offsetX.getValue(), 0.0); model.setOffset(model.getOffsetX(), 4.0); model.flipHorizontal(); model.flipVertical(); model.setRotateInRadians(9.0); model.setZoom(9.0, 9.0); model.setZoomToFit(false, false); model.clearZoomToFit(); listenerRef.unregister(); model.setOffset(9.0, model.getOffsetY()); verifyNoMoreInteractions(listener); } @Test public void testGetOffsetXMutate() { MutableProperty<Double> offsetX = view.offsetX(); Runnable listener = mock(Runnable.class); offsetX.addChangeListener(listener); offsetX.setValue(3.0); verify(listener).run(); assertEquals(3.0, model.getOffsetX(), 0.0); assertEquals(3.0, offsetX.getValue(), 0.0); } /** * Test of getOffsetY method, of class BasicTransformationProperty. */ @Test public void testGetOffsetY() { PropertySource<Double> offsetY = view.offsetY(); Runnable listener = mock(Runnable.class); ListenerRef listenerRef = offsetY.addChangeListener(listener); model.setOffset(model.getOffsetX(), 3.0); verify(listener).run(); assertEquals(model.getOffsetY(), offsetY.getValue(), 0.0); model.setOffset(4.0, model.getOffsetY()); model.flipHorizontal(); model.flipVertical(); model.setRotateInRadians(9.0); model.setZoom(9.0, 9.0); model.setZoomToFit(false, false); model.clearZoomToFit(); listenerRef.unregister(); model.setOffset(model.getOffsetX(), 9.0); verifyNoMoreInteractions(listener); } @Test public void testGetOffsetYMutate() { MutableProperty<Double> offsetY = view.offsetY(); Runnable listener = mock(Runnable.class); offsetY.addChangeListener(listener); offsetY.setValue(3.0); verify(listener).run(); assertEquals(3.0, model.getOffsetY(), 0.0); assertEquals(3.0, offsetY.getValue(), 0.0); } /** * Test of getZoomX method, of class BasicTransformationProperty. */ @Test public void testGetZoomX() { PropertySource<Double> zoomX = view.zoomX(); Runnable listener = mock(Runnable.class); ListenerRef listenerRef = zoomX.addChangeListener(listener); model.setZoomX(3.0); verify(listener).run(); assertEquals(model.getZoomX(), zoomX.getValue(), 0.0); model.setOffset(9.0, 9.0); model.flipHorizontal(); model.flipVertical(); model.setRotateInRadians(9.0); model.setZoomY(9.0); model.setZoomToFit(false, false); model.clearZoomToFit(); listenerRef.unregister(); model.setZoomX(9.0); verifyNoMoreInteractions(listener); } @Test public void testGetZoomXMutate() { MutableProperty<Double> zoomX = view.zoomX(); Runnable listener = mock(Runnable.class); zoomX.addChangeListener(listener); zoomX.setValue(3.0); verify(listener).run(); assertEquals(3.0, model.getZoomX(), 0.0); assertEquals(3.0, zoomX.getValue(), 0.0); } /** * Test of getZoomY method, of class BasicTransformationProperty. */ @Test public void testGetZoomY() { PropertySource<Double> zoomY = view.zoomY(); Runnable listener = mock(Runnable.class); ListenerRef listenerRef = zoomY.addChangeListener(listener); model.setZoomY(3.0); verify(listener).run(); assertEquals(model.getZoomY(), zoomY.getValue(), 0.0); model.setOffset(9.0, 9.0); model.flipHorizontal(); model.flipVertical(); model.setRotateInRadians(9.0); model.setZoomX(9.0); model.setZoomToFit(false, false); model.clearZoomToFit(); listenerRef.unregister(); model.setZoomY(9.0); verifyNoMoreInteractions(listener); } @Test public void testGetZoomYMutate() { MutableProperty<Double> zoomY = view.zoomY(); Runnable listener = mock(Runnable.class); zoomY.addChangeListener(listener); zoomY.setValue(3.0); verify(listener).run(); assertEquals(3.0, model.getZoomY(), 0.0); assertEquals(3.0, zoomY.getValue(), 0.0); } /** * Test of getRotateInRadians method, of class BasicTransformationProperty. */ @Test public void testGetRotateInRadians() { PropertySource<Double> rotateRad = view.rotateInRadians(); Runnable listener = mock(Runnable.class); ListenerRef listenerRef = rotateRad.addChangeListener(listener); model.setRotateInRadians(3.0); verify(listener).run(); assertEquals(model.getRotateInRadians(), rotateRad.getValue(), 0.0); model.setOffset(9.0, 9.0); model.flipHorizontal(); model.flipVertical(); model.setZoom(9.0, 9.0); model.setZoomToFit(false, false); model.clearZoomToFit(); listenerRef.unregister(); model.setRotateInRadians(6.0); verifyNoMoreInteractions(listener); } @Test public void testGetRotateInRadiansMutate() { MutableProperty<Double> rotateInRad = view.rotateInRadians(); Runnable listener = mock(Runnable.class); rotateInRad.addChangeListener(listener); rotateInRad.setValue(3.0); verify(listener).run(); assertEquals(3.0, model.getRotateInRadians(), 0.0); assertEquals(3.0, rotateInRad.getValue(), 0.0); } /** * Test of getRotateInDegrees method, of class BasicTransformationProperty. */ @Test public void testGetRotateInDegrees() { PropertySource<Integer> rotateDeg = view.rotateInDegrees(); Runnable listener = mock(Runnable.class); ListenerRef listenerRef = rotateDeg.addChangeListener(listener); model.setRotateInDegrees(3); verify(listener).run(); assertEquals(Integer.valueOf(model.getRotateInDegrees()), rotateDeg.getValue()); model.setOffset(9.0, 9.0); model.flipHorizontal(); model.flipVertical(); model.setZoom(9.0, 9.0); model.setZoomToFit(false, false); model.clearZoomToFit(); listenerRef.unregister(); model.setRotateInDegrees(9); verifyNoMoreInteractions(listener); } @Test public void testGetRotateInDegreesMutate() { MutableProperty<Integer> rotateInDeg = view.rotateInDegrees(); Runnable listener = mock(Runnable.class); rotateInDeg.addChangeListener(listener); rotateInDeg.setValue(3); verify(listener).run(); assertEquals(3, model.getRotateInDegrees()); assertEquals(3, rotateInDeg.getValue().intValue()); } /** * Test of getFlipHorizontal method, of class BasicTransformationProperty. */ @Test public void testGetFlipHorizontal() { PropertySource<Boolean> flipHorizontal = view.flipHorizontal(); Runnable listener = mock(Runnable.class); ListenerRef listenerRef = flipHorizontal.addChangeListener(listener); model.flipHorizontal(); verify(listener).run(); assertEquals(model.isFlipHorizontal(), flipHorizontal.getValue()); model.setOffset(9.0, 9.0); model.flipVertical(); model.setRotateInRadians(6.0); model.setZoom(9.0, 9.0); model.setZoomToFit(false, false); model.clearZoomToFit(); listenerRef.unregister(); model.flipHorizontal(); verifyNoMoreInteractions(listener); } @Test public void testGetFlipHorizontalMutate() { MutableProperty<Boolean> flipH = view.flipHorizontal(); Runnable listener = mock(Runnable.class); flipH.addChangeListener(listener); flipH.setValue(true); verify(listener).run(); assertEquals(true, model.isFlipHorizontal()); assertEquals(true, flipH.getValue()); } /** * Test of getFlipVertical method, of class BasicTransformationProperty. */ @Test public void testGetFlipVertical() { PropertySource<Boolean> flipVertical = view.flipVertical(); Runnable listener = mock(Runnable.class); ListenerRef listenerRef = flipVertical.addChangeListener(listener); model.flipVertical(); verify(listener).run(); assertEquals(model.isFlipVertical(), flipVertical.getValue()); model.setOffset(9.0, 9.0); model.flipHorizontal(); model.setRotateInRadians(6.0); model.setZoom(9.0, 9.0); model.setZoomToFit(false, false); model.clearZoomToFit(); listenerRef.unregister(); model.flipVertical(); verifyNoMoreInteractions(listener); } @Test public void testGetFlipVerticalMutate() { MutableProperty<Boolean> flipV = view.flipVertical(); Runnable listener = mock(Runnable.class); flipV.addChangeListener(listener); flipV.setValue(true); verify(listener).run(); assertEquals(true, model.isFlipVertical()); assertEquals(true, flipV.getValue()); } /** * Test of getZoomToFit method, of class BasicTransformationProperty. */ @Test public void testGetZoomToFit() { PropertySource<Set<ZoomToFitOption>> zoomToFit = view.zoomToFit(); Runnable listener = mock(Runnable.class); ListenerRef listenerRef = zoomToFit.addChangeListener(listener); model.setZoomToFit(false, false); verify(listener).run(); assertEquals(model.getZoomToFitOptions(), zoomToFit.getValue()); model.clearZoomToFit(); verify(listener, times(2)).run(); assertEquals(model.getZoomToFitOptions(), zoomToFit.getValue()); model.setOffset(9.0, 9.0); model.flipVertical(); model.flipHorizontal(); model.setRotateInRadians(6.0); model.setZoom(9.0, 9.0); listenerRef.unregister(); model.setZoomToFit(true, true); verifyNoMoreInteractions(listener); } @Test public void testGetZoomToFitMutate() { Set<ZoomToFitOption> value1 = EnumSet.noneOf(ZoomToFitOption.class); Set<ZoomToFitOption> value2 = EnumSet.of( ZoomToFitOption.FIT_HEIGHT, ZoomToFitOption.KEEP_ASPECT_RATIO); Set<ZoomToFitOption> value3 = EnumSet.allOf(ZoomToFitOption.class); for (Set<ZoomToFitOption> expected: Arrays.asList(value1, value2, value3, null)) { MutableProperty<Set<ZoomToFitOption>> zoomToFit = view.zoomToFit(); Runnable listener = mock(Runnable.class); zoomToFit.addChangeListener(listener); zoomToFit.setValue(expected); verify(listener).run(); assertEquals(expected, model.getZoomToFitOptions()); assertEquals(expected, zoomToFit.getValue()); } } /** * Test of getTransformations method, of class BasicTransformationProperty. */ @Test public void testGetTransformations() { PropertySource<BasicImageTransformations> transformation = view.transformations(); Runnable listener = mock(Runnable.class); ListenerRef listenerRef = transformation.addChangeListener(listener); model.flipVertical(); verify(listener).run(); assertEquals(model.getTransformations(), transformation.getValue()); model.flipHorizontal(); verify(listener, times(2)).run(); assertEquals(model.getTransformations(), transformation.getValue()); model.setOffset(3.0, 3.0); verify(listener, times(3)).run(); assertEquals(model.getTransformations(), transformation.getValue()); model.setRotateInRadians(3.0); verify(listener, times(4)).run(); assertEquals(model.getTransformations(), transformation.getValue()); model.setZoom(3.0, 3.0); verify(listener, times(5)).run(); assertEquals(model.getTransformations(), transformation.getValue()); model.setZoomToFit(false, false); model.clearZoomToFit(); listenerRef.unregister(); model.flipVertical(); verifyNoMoreInteractions(listener); } @Test public void testGetTransformationsMutate() { MutableProperty<BasicImageTransformations> transformations = view.transformations(); Runnable listener = mock(Runnable.class); transformations.addChangeListener(listener); BasicImageTransformations expected = BasicImageTransformations.newRotateTransformation(3.0); transformations.setValue(expected); verify(listener).run(); assertEquals(expected, model.getTransformations()); assertEquals(expected, transformations.getValue()); } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInspection.reference; import com.intellij.codeInspection.BatchSuppressManager; import com.intellij.codeInspection.InspectionProfileEntry; import com.intellij.codeInspection.InspectionsBundle; import com.intellij.codeInspection.SuppressionUtil; import com.intellij.codeInspection.deadCode.UnusedDeclarationInspectionBase; import com.intellij.codeInspection.ex.*; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.*; import com.intellij.psi.*; import com.intellij.psi.javadoc.PsiDocComment; import com.intellij.psi.javadoc.PsiDocTag; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.util.IncorrectOperationException; import gnu.trove.THashMap; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; /** * @author anna * Date: 20-Dec-2007 */ public class RefJavaManagerImpl extends RefJavaManager { private static final Logger LOG = Logger.getInstance("#" + RefJavaManagerImpl.class.getName()); private PsiMethod myAppMainPattern; private PsiMethod myAppPremainPattern; private PsiMethod myAppAgentmainPattern; private PsiClass myApplet; private PsiClass myServlet; private RefPackage myDefaultPackage; private THashMap<String, RefPackage> myPackages; private final RefManagerImpl myRefManager; private PsiElementVisitor myProjectIterator; private EntryPointsManager myEntryPointsManager; public RefJavaManagerImpl(@NotNull RefManagerImpl manager) { myRefManager = manager; final Project project = manager.getProject(); final PsiManager psiManager = PsiManager.getInstance(project); PsiElementFactory factory = JavaPsiFacade.getInstance(psiManager.getProject()).getElementFactory(); try { myAppMainPattern = factory.createMethodFromText("void main(String[] args);", null); myAppPremainPattern = factory.createMethodFromText("void premain(String[] args, java.lang.instrument.Instrumentation i);", null); myAppAgentmainPattern = factory.createMethodFromText("void agentmain(String[] args, java.lang.instrument.Instrumentation i);", null); } catch (IncorrectOperationException e) { LOG.error(e); } myApplet = JavaPsiFacade.getInstance(psiManager.getProject()).findClass("java.applet.Applet", GlobalSearchScope.allScope(project)); myServlet = JavaPsiFacade.getInstance(psiManager.getProject()).findClass("javax.servlet.Servlet", GlobalSearchScope.allScope(project)); } @Override public RefPackage getPackage(String packageName) { if (myPackages == null) { myPackages = new THashMap<String, RefPackage>(); } RefPackage refPackage = myPackages.get(packageName); if (refPackage == null) { refPackage = new RefPackageImpl(packageName, myRefManager); myPackages.put(packageName, refPackage); int dotIndex = packageName.lastIndexOf('.'); if (dotIndex >= 0) { ((RefPackageImpl)getPackage(packageName.substring(0, dotIndex))).add(refPackage); } else { ((RefProjectImpl)myRefManager.getRefProject()).add(refPackage); } } return refPackage; } public boolean isEntryPoint(final RefElement element) { UnusedDeclarationInspectionBase tool = getDeadCodeTool(element); return tool != null && tool.isEntryPoint(element); } @Nullable private UnusedDeclarationInspectionBase getDeadCodeTool(RefElement element) { PsiFile file = ((RefElementImpl)element).getContainingFile(); if (file == null) return null; return getDeadCodeTool(file); } private static final UserDataCache<Ref<UnusedDeclarationInspectionBase>, PsiFile, RefManagerImpl> DEAD_CODE_TOOL = new UserDataCache<Ref<UnusedDeclarationInspectionBase>, PsiFile, RefManagerImpl>("DEAD_CODE_TOOL") { @Override protected Ref<UnusedDeclarationInspectionBase> compute(PsiFile file, RefManagerImpl refManager) { Tools tools = ((GlobalInspectionContextBase)refManager.getContext()).getTools().get(UnusedDeclarationInspectionBase.SHORT_NAME); InspectionToolWrapper toolWrapper = tools == null ? null : tools.getEnabledTool(file); InspectionProfileEntry tool = toolWrapper == null ? null : toolWrapper.getTool(); return Ref.create(tool instanceof UnusedDeclarationInspectionBase ? (UnusedDeclarationInspectionBase)tool : null); } }; @Nullable private UnusedDeclarationInspectionBase getDeadCodeTool(PsiElement element) { PsiFile file = element.getContainingFile(); return file != null ? DEAD_CODE_TOOL.get(file, myRefManager).get() : null; } @Override public RefPackage getDefaultPackage() { if (myDefaultPackage == null) { myDefaultPackage = getPackage(InspectionsBundle.message("inspection.reference.default.package")); } return myDefaultPackage; } @Override public PsiMethod getAppMainPattern() { return myAppMainPattern; } @Override public PsiMethod getAppPremainPattern() { return myAppPremainPattern; } @Override public PsiMethod getAppAgentmainPattern() { return myAppAgentmainPattern; } @Override public PsiClass getApplet() { return myApplet; } @Override public PsiClass getServlet() { return myServlet; } @Override public RefParameter getParameterReference(final PsiParameter param, final int index) { LOG.assertTrue(myRefManager.isValidPointForReference(), "References may become invalid after process is finished"); return myRefManager.getFromRefTableOrCache(param, new NullableFactory<RefParameter>() { @Nullable @Override public RefParameter create() { RefParameter ref = new RefParameterImpl(param, index, myRefManager); ((RefParameterImpl)ref).initialize(); return ref; } }); } @Override public void iterate(@NotNull final RefVisitor visitor) { if (myPackages != null) { for (RefPackage refPackage : myPackages.values()) { refPackage.accept(visitor); } } for (RefElement refElement : myRefManager.getSortedElements()) { if (refElement instanceof RefClass) { RefClass refClass = (RefClass)refElement; RefMethod refDefaultConstructor = refClass.getDefaultConstructor(); if (refDefaultConstructor instanceof RefImplicitConstructor) { refClass.getDefaultConstructor().accept(visitor); } } } } @Override public void cleanup() { if (myEntryPointsManager != null) { Disposer.dispose(myEntryPointsManager); myEntryPointsManager = null; } myPackages = null; myApplet = null; myAppMainPattern = null; myAppPremainPattern = null; myAppAgentmainPattern = null; myServlet = null; myDefaultPackage = null; myProjectIterator = null; } @Override public void removeReference(@NotNull final RefElement refElement) { if (refElement instanceof RefMethod) { RefMethod refMethod = (RefMethod)refElement; RefParameter[] params = refMethod.getParameters(); for (RefParameter param : params) { myRefManager.removeReference(param); } } } @Override @Nullable public RefElement createRefElement(final PsiElement elem) { if (elem instanceof PsiClass) { return new RefClassImpl((PsiClass)elem, myRefManager); } else if (elem instanceof PsiMethod) { final PsiMethod method = (PsiMethod)elem; final RefElement ref = myRefManager.getReference(method.getContainingClass(), true); if (ref instanceof RefClass) { return new RefMethodImpl((RefClass)ref, method, myRefManager); } } else if (elem instanceof PsiField) { final PsiField field = (PsiField)elem; final RefElement ref = myRefManager.getReference(field.getContainingClass(), true); if (ref instanceof RefClass) { return new RefFieldImpl((RefClass)ref, field, myRefManager); } } else if (elem instanceof PsiJavaFile) { return new RefJavaFileImpl((PsiJavaFile)elem, myRefManager); } return null; } @Override @Nullable public RefEntity getReference(final String type, final String fqName) { if (METHOD.equals(type)) { return RefMethodImpl.methodFromExternalName(myRefManager, fqName); } else if (CLASS.equals(type)) { return RefClassImpl.classFromExternalName(myRefManager, fqName); } else if (FIELD.equals(type)) { return RefFieldImpl.fieldFromExternalName(myRefManager, fqName); } else if (PARAMETER.equals(type)) { return RefParameterImpl.parameterFromExternalName(myRefManager, fqName); } else if (PACKAGE.equals(type)) { return RefPackageImpl.packageFromFQName(myRefManager, fqName); } return null; } @Override @Nullable public String getType(final RefEntity ref) { if (ref instanceof RefMethod) { return METHOD; } else if (ref instanceof RefClass) { return CLASS; } else if (ref instanceof RefField) { return FIELD; } else if (ref instanceof RefParameter) { return PARAMETER; } else if (ref instanceof RefPackage) { return PACKAGE; } return null; } @NotNull @Override public RefEntity getRefinedElement(@NotNull final RefEntity ref) { if (ref instanceof RefImplicitConstructor) { return ((RefImplicitConstructor)ref).getOwnerClass(); } return ref; } @Override public void visitElement(final PsiElement element) { if (myProjectIterator == null) { myProjectIterator = new MyJavaElementVisitor(); } element.accept(myProjectIterator); } @Override @Nullable public String getGroupName(final RefEntity entity) { if (entity instanceof RefFile && !(entity instanceof RefJavaFileImpl)) return null; return RefJavaUtil.getInstance().getPackageName(entity); } @Override public boolean belongsToScope(final PsiElement psiElement) { return !(psiElement instanceof PsiTypeParameter); } @Override public void export(@NotNull final RefEntity refEntity, @NotNull final Element element) { if (refEntity instanceof RefElement) { final SmartPsiElementPointer pointer = ((RefElement)refEntity).getPointer(); if (pointer != null) { final PsiFile psiFile = pointer.getContainingFile(); if (psiFile instanceof PsiJavaFile) { appendPackageElement(element, ((PsiJavaFile)psiFile).getPackageName()); } } } } @Override public void onEntityInitialized(RefElement refElement, PsiElement psiElement) { if (myRefManager.isOfflineView()) return; if (isEntryPoint(refElement)) { getEntryPointsManager().addEntryPoint(refElement, false); } if (psiElement instanceof PsiClass) { PsiClass psiClass = (PsiClass)psiElement; EntryPointsManager entryPointsManager = getEntryPointsManager(); if (psiClass.isAnnotationType()){ entryPointsManager.addEntryPoint(refElement, false); for (PsiMethod psiMethod : psiClass.getMethods()) { entryPointsManager.addEntryPoint(myRefManager.getReference(psiMethod), false); } } else if (psiClass.isEnum()) { entryPointsManager.addEntryPoint(refElement, false); } } } private static void appendPackageElement(final Element element, final String packageName) { final Element packageElement = new Element("package"); packageElement.addContent(packageName.isEmpty() ? InspectionsBundle.message("inspection.export.results.default") : packageName); element.addContent(packageElement); } @Override public EntryPointsManager getEntryPointsManager() { if (myEntryPointsManager == null) { final Project project = myRefManager.getProject(); myEntryPointsManager = new EntryPointsManagerBase(project) { @Override public void configureAnnotations() { } @Override public JButton createConfigureAnnotationsBtn() { return null; } }; ((EntryPointsManagerBase)myEntryPointsManager).addAllPersistentEntries(EntryPointsManagerBase.getInstance(project)); } return myEntryPointsManager; } private class MyJavaElementVisitor extends JavaElementVisitor { private final RefJavaUtil myRefUtil; public MyJavaElementVisitor() { myRefUtil = RefJavaUtil.getInstance(); } @Override public void visitReferenceExpression(PsiReferenceExpression expression) { visitElement(expression); } @Override public void visitReferenceElement(PsiJavaCodeReferenceElement reference) { } @Override public void visitReferenceParameterList(final PsiReferenceParameterList list) { super.visitReferenceParameterList(list); final PsiMember member = PsiTreeUtil.getParentOfType(list, PsiMember.class); if (member instanceof PsiTypeParameter) { final PsiMember owner = ((PsiTypeParameter)member).getOwner(); if (owner != null) { for (PsiClassType type : ((PsiTypeParameter)member).getExtendsListTypes()) { myRefUtil.addTypeReference(owner, type, myRefManager); } } } final PsiType[] typeArguments = list.getTypeArguments(); for (PsiType type : typeArguments) { myRefUtil.addTypeReference(member, type, myRefManager); } } @Override public void visitClass(PsiClass aClass) { if (!(aClass instanceof PsiTypeParameter)) { super.visitClass(aClass); RefElement refClass = myRefManager.getReference(aClass); if (refClass != null) { ((RefClassImpl)refClass).buildReferences(); } } } @Override public void visitMethod(final PsiMethod method) { super.visitMethod(method); final RefElement refElement = myRefManager.getReference(method); if (refElement instanceof RefMethodImpl) { ((RefMethodImpl)refElement).buildReferences(); } } @Override public void visitField(final PsiField field) { super.visitField(field); final RefElement refElement = myRefManager.getReference(field); if (refElement instanceof RefFieldImpl) { ((RefFieldImpl)refElement).buildReferences(); } } @Override public void visitDocComment(PsiDocComment comment) { super.visitDocComment(comment); final PsiDocTag[] tags = comment.getTags(); for (PsiDocTag tag : tags) { if (Comparing.strEqual(tag.getName(), SuppressionUtil.SUPPRESS_INSPECTIONS_TAG_NAME)) { final PsiElement[] dataElements = tag.getDataElements(); if (dataElements != null && dataElements.length > 0) { final PsiModifierListOwner listOwner = PsiTreeUtil.getParentOfType(comment, PsiModifierListOwner.class); if (listOwner != null) { final RefElementImpl element = (RefElementImpl)myRefManager.getReference(listOwner); if (element != null) { String suppression = ""; for (PsiElement dataElement : dataElements) { suppression += "," + dataElement.getText(); } element.addSuppression(suppression); } } } } } } @Override public void visitAnnotation(PsiAnnotation annotation) { super.visitAnnotation(annotation); if (Comparing.strEqual(annotation.getQualifiedName(), BatchSuppressManager.SUPPRESS_INSPECTIONS_ANNOTATION_NAME)) { final PsiModifierListOwner listOwner = PsiTreeUtil.getParentOfType(annotation, PsiModifierListOwner.class); if (listOwner != null) { final RefElementImpl element = (RefElementImpl)myRefManager.getReference(listOwner); if (element != null) { StringBuilder buf = new StringBuilder(); final PsiNameValuePair[] nameValuePairs = annotation.getParameterList().getAttributes(); for (PsiNameValuePair nameValuePair : nameValuePairs) { buf.append(",").append(nameValuePair.getText().replaceAll("[{}\"\"]", "")); } if (buf.length() > 0) { element.addSuppression(buf.substring(1)); } } } } } @Override public void visitVariable(PsiVariable variable) { super.visitVariable(variable); myRefUtil.addTypeReference(variable, variable.getType(), myRefManager); if (variable instanceof PsiParameter) { final RefElement reference = myRefManager.getReference(variable); if (reference instanceof RefParameterImpl) { ((RefParameterImpl)reference).buildReferences(); } } } @Override public void visitInstanceOfExpression(PsiInstanceOfExpression expression) { super.visitInstanceOfExpression(expression); final PsiTypeElement typeElement = expression.getCheckType(); if (typeElement != null) { myRefUtil.addTypeReference(expression, typeElement.getType(), myRefManager); } } @Override public void visitThisExpression(PsiThisExpression expression) { super.visitThisExpression(expression); final PsiJavaCodeReferenceElement qualifier = expression.getQualifier(); if (qualifier != null) { myRefUtil.addTypeReference(expression, expression.getType(), myRefManager); RefClass ownerClass = myRefUtil.getOwnerClass(myRefManager, expression); if (ownerClass != null) { RefClassImpl refClass = (RefClassImpl)myRefManager.getReference(qualifier.resolve()); if (refClass != null) { refClass.addInstanceReference(ownerClass); } } } } } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org) * Copyright (C) 2011-2012 Eugene Fradkin (eugene.fradkin@gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.mysql.tools; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Status; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.SashForm; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.widgets.*; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.ext.mysql.MySQLMessages; import org.jkiss.dbeaver.ext.mysql.model.MySQLCatalog; import org.jkiss.dbeaver.ext.mysql.model.MySQLDataSource; import org.jkiss.dbeaver.ext.mysql.model.MySQLTableBase; import org.jkiss.dbeaver.model.DBIcon; import org.jkiss.dbeaver.model.DBUtils; import org.jkiss.dbeaver.model.runtime.AbstractJob; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.model.struct.DBSObject; import org.jkiss.dbeaver.runtime.DBWorkbench; import org.jkiss.dbeaver.ui.DBeaverIcons; import org.jkiss.dbeaver.ui.UIUtils; import org.jkiss.dbeaver.ui.controls.CustomSashForm; import java.util.*; import java.util.List; class MySQLExportWizardPageObjects extends MySQLWizardPageSettings<MySQLExportWizard> { private Table catalogTable; private Table tablesTable; private Map<MySQLCatalog, Set<MySQLTableBase>> checkedObjects = new HashMap<>(); private MySQLCatalog curCatalog; protected MySQLExportWizardPageObjects(MySQLExportWizard wizard) { super(wizard, "Schemas/tables"); setTitle("Choose objects to export"); setDescription("Schemas/tables/views which will be exported"); } @Override public boolean isPageComplete() { return super.isPageComplete(); } @Override public void createControl(Composite parent) { Composite composite = UIUtils.createPlaceholder(parent, 1); Group objectsGroup = UIUtils.createControlGroup(composite, MySQLMessages.tools_db_export_wizard_page_settings_group_objects, 1, GridData.FILL_HORIZONTAL, 0); objectsGroup.setLayoutData(new GridData(GridData.FILL_BOTH)); SashForm sash = new CustomSashForm(objectsGroup, SWT.VERTICAL); sash.setLayoutData(new GridData(GridData.FILL_BOTH)); { Composite catPanel = UIUtils.createPlaceholder(sash, 1); catPanel.setLayoutData(new GridData(GridData.FILL_BOTH)); catalogTable = new Table(catPanel, SWT.BORDER | SWT.CHECK); catalogTable.addListener(SWT.Selection, new Listener() { public void handleEvent(Event event) { TableItem item = (TableItem) event.item; if (item != null) { MySQLCatalog catalog = (MySQLCatalog) item.getData(); if (event.detail == SWT.CHECK) { catalogTable.select(catalogTable.indexOf(item)); checkedObjects.remove(catalog); } loadTables(catalog); updateState(); } } }); GridData gd = new GridData(GridData.FILL_BOTH); gd.heightHint = 50; catalogTable.setLayoutData(gd); Composite buttonsPanel = UIUtils.createPlaceholder(catPanel, 3, 5); buttonsPanel.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); new Label(buttonsPanel, SWT.NONE).setLayoutData(new GridData(GridData.GRAB_HORIZONTAL)); createCheckButtons(buttonsPanel, catalogTable); } final Button exportViewsCheck; { Composite tablesPanel = UIUtils.createPlaceholder(sash, 1); tablesPanel.setLayoutData(new GridData(GridData.FILL_BOTH)); tablesTable = new Table(tablesPanel, SWT.BORDER | SWT.CHECK); GridData gd = new GridData(GridData.FILL_BOTH); gd.heightHint = 50; tablesTable.setLayoutData(gd); tablesTable.addListener(SWT.Selection, new Listener() { public void handleEvent(Event event) { if (event.detail == SWT.CHECK) { updateCheckedTables(); updateState(); } } }); Composite buttonsPanel = UIUtils.createPlaceholder(tablesPanel, 3, 5); buttonsPanel.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); exportViewsCheck = UIUtils.createCheckbox(buttonsPanel, "Show views", false); exportViewsCheck.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { wizard.showViews = exportViewsCheck.getSelection(); loadTables(null); } }); exportViewsCheck.setLayoutData(new GridData(GridData.GRAB_HORIZONTAL)); createCheckButtons(buttonsPanel, tablesTable); } MySQLDataSource dataSource = null; Set<MySQLCatalog> activeCatalogs = new LinkedHashSet<>(); for (DBSObject object : wizard.getDatabaseObjects()) { if (object instanceof MySQLCatalog) { activeCatalogs.add((MySQLCatalog) object); dataSource = ((MySQLCatalog) object).getDataSource(); } else if (object instanceof MySQLTableBase) { MySQLCatalog catalog = ((MySQLTableBase) object).getContainer(); dataSource = catalog.getDataSource(); activeCatalogs.add(catalog); Set<MySQLTableBase> tables = checkedObjects.get(catalog); if (tables == null) { tables = new HashSet<>(); checkedObjects.put(catalog, tables); } tables.add((MySQLTableBase) object); if (((MySQLTableBase) object).isView()) { wizard.showViews = true; exportViewsCheck.setSelection(true); } } else if (object.getDataSource() instanceof MySQLDataSource) { dataSource = (MySQLDataSource) object.getDataSource(); } } if (dataSource != null) { boolean tablesLoaded = false; for (MySQLCatalog catalog : dataSource.getCatalogs()) { TableItem item = new TableItem(catalogTable, SWT.NONE); item.setImage(DBeaverIcons.getImage(DBIcon.TREE_DATABASE)); item.setText(0, catalog.getName()); item.setData(catalog); if (activeCatalogs.contains(catalog)) { item.setChecked(true); catalogTable.select(catalogTable.indexOf(item)); if (!tablesLoaded) { loadTables(catalog); tablesLoaded = true; } } } } updateState(); setControl(composite); } private void updateCheckedTables() { Set<MySQLTableBase> checkedTables = new HashSet<>(); TableItem[] tableItems = tablesTable.getItems(); for (TableItem item : tableItems) { if (item.getChecked()) { checkedTables.add((MySQLTableBase) item.getData()); } } TableItem catalogItem = catalogTable.getItem(catalogTable.getSelectionIndex()); catalogItem.setChecked(!checkedTables.isEmpty()); if (checkedTables.isEmpty() || checkedTables.size() == tableItems.length) { checkedObjects.remove(curCatalog); } else { checkedObjects.put(curCatalog, checkedTables); } } private boolean isChecked(MySQLCatalog catalog) { for (TableItem item : catalogTable.getItems()) { if (item.getData() == catalog) { return item.getChecked(); } } return false; } private void loadTables(final MySQLCatalog catalog) { if (catalog != null) { curCatalog = catalog; } if (curCatalog == null) { return; } final boolean isCatalogChecked = isChecked(curCatalog); final Set<MySQLTableBase> checkedObjects = this.checkedObjects.get(curCatalog); new AbstractJob("Load '" + curCatalog.getName() + "' tables") { { setUser(true); } @Override protected IStatus run(DBRProgressMonitor monitor) { try { final List<MySQLTableBase> objects = new ArrayList<>(); objects.addAll(curCatalog.getTables(monitor)); if (wizard.showViews) { objects.addAll(curCatalog.getViews(monitor)); } Collections.sort(objects, DBUtils.nameComparator()); UIUtils.syncExec(new Runnable() { @Override public void run() { tablesTable.removeAll(); for (MySQLTableBase table : objects) { TableItem item = new TableItem(tablesTable, SWT.NONE); item.setImage(DBeaverIcons.getImage(table.isView() ? DBIcon.TREE_VIEW : DBIcon.TREE_TABLE)); item.setText(0, table.getName()); item.setData(table); item.setChecked(isCatalogChecked && (checkedObjects == null || checkedObjects.contains(table))); } } }); } catch (DBException e) { DBWorkbench.getPlatformUI().showError("Table list", "Can't read table list", e); } return Status.OK_STATUS; } }.schedule(); } public void saveState() { wizard.objects.clear(); for (TableItem item : catalogTable.getItems()) { if (item.getChecked()) { MySQLCatalog catalog = (MySQLCatalog) item.getData(); MySQLDatabaseExportInfo info = new MySQLDatabaseExportInfo(catalog, checkedObjects.get(catalog)); wizard.objects.add(info); } } } @Override protected void updateState() { boolean complete = false; if (!checkedObjects.isEmpty()) { complete = true; } for (TableItem item : catalogTable.getItems()) { if (item.getChecked()) { complete = true; break; } } setPageComplete(complete); } }
package com.dm.wallpaper.board.fragments; import android.content.res.Configuration; import android.graphics.Color; import android.os.AsyncTask; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.support.v4.view.MenuItemCompat; import android.support.v4.view.ViewCompat; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.widget.DefaultItemAnimator; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.SearchView; import android.util.Log; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.view.inputmethod.EditorInfo; import android.widget.TextView; import com.danimahardhika.android.helpers.core.ColorHelper; import com.danimahardhika.android.helpers.core.DrawableHelper; import com.danimahardhika.android.helpers.core.SoftKeyboardHelper; import com.danimahardhika.android.helpers.core.ViewHelper; import com.dm.wallpaper.board.R; import com.dm.wallpaper.board.R2; import com.dm.wallpaper.board.adapters.WallpapersAdapter; import com.dm.wallpaper.board.databases.Database; import com.dm.wallpaper.board.items.Wallpaper; import com.dm.wallpaper.board.preferences.Preferences; import com.dm.wallpaper.board.utils.LogUtil; import com.dm.wallpaper.board.utils.listeners.WallpaperListener; import java.util.ArrayList; import java.util.List; import butterknife.BindView; import butterknife.ButterKnife; import static com.dm.wallpaper.board.helpers.ViewHelper.resetViewBottomPadding; /* * Wallpaper Board * * Copyright (c) 2017 Dani Mahardhika * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ public class WallpaperSearchFragment extends Fragment implements WallpaperListener { @BindView(R2.id.recyclerview) RecyclerView mRecyclerView; @BindView(R2.id.swipe) SwipeRefreshLayout mSwipe; @BindView(R2.id.search_result) TextView mSearchResult; private SearchView mSearchView; private WallpapersAdapter mAdapter; private AsyncTask<Void, Void, Boolean> mGetWallpapers; @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_wallpapers, container, false); ButterKnife.bind(this, view); if (!Preferences.get(getActivity()).isShadowEnabled()) { View shadow = ButterKnife.findById(view, R.id.shadow); if (shadow != null) shadow.setVisibility(View.GONE); } return view; } @Override public void onActivityCreated(@Nullable Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); setHasOptionsMenu(true); ViewCompat.setNestedScrollingEnabled(mRecyclerView, false); resetViewBottomPadding(mRecyclerView, false); mSwipe.setEnabled(false); mRecyclerView.setItemAnimator(new DefaultItemAnimator()); mRecyclerView.setLayoutManager(new GridLayoutManager(getActivity(), getActivity().getResources().getInteger(R.integer.wallpapers_column_count))); mRecyclerView.setHasFixedSize(false); getWallpapers(); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { super.onCreateOptionsMenu(menu, inflater); inflater.inflate(R.menu.menu_wallpaper_search, menu); MenuItem search = menu.findItem(R.id.menu_search); int color = ColorHelper.getAttributeColor(getActivity(), R.attr.toolbar_icon); search.setIcon(DrawableHelper.getTintedDrawable(getActivity(), R.drawable.ic_toolbar_search, color)); mSearchView = (SearchView) MenuItemCompat.getActionView(search); mSearchView.setImeOptions(EditorInfo.IME_FLAG_NO_EXTRACT_UI | EditorInfo.IME_ACTION_SEARCH); mSearchView.setQueryHint(getActivity().getResources().getString(R.string.menu_search)); mSearchView.setMaxWidth(Integer.MAX_VALUE); MenuItemCompat.expandActionView(search); mSearchView.setIconifiedByDefault(false); mSearchView.clearFocus(); ViewHelper.setSearchViewTextColor(mSearchView, color); ViewHelper.setSearchViewBackgroundColor(mSearchView, Color.TRANSPARENT); ViewHelper.setSearchViewCloseIcon(mSearchView, R.drawable.ic_toolbar_close); ViewHelper.setSearchViewSearchIcon(mSearchView, null); mSearchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() { @Override public boolean onQueryTextChange(String string) { filterSearch(string); return true; } @Override public boolean onQueryTextSubmit(String string) { mSearchView.clearFocus(); return true; } }); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); ViewHelper.resetSpanCount(mRecyclerView, getActivity().getResources().getInteger( R.integer.wallpapers_column_count)); resetViewBottomPadding(mRecyclerView, true); } @Override public void onDestroy() { if (mGetWallpapers != null) mGetWallpapers.cancel(true); super.onDestroy(); } @Override public void onWallpaperSelected(int position) { if (mAdapter == null) return; if (position < 0 || position > mAdapter.getItemCount()) return; mRecyclerView.scrollToPosition(position); } private void filterSearch(String query) { try { mAdapter.search(query); if (mAdapter.getItemCount()==0) { String text = String.format(getActivity().getResources().getString( R.string.search_result_empty), query); mSearchResult.setText(text); mSearchResult.setVisibility(View.VISIBLE); } else mSearchResult.setVisibility(View.GONE); } catch (Exception e) { LogUtil.e(Log.getStackTraceString(e)); } } private void getWallpapers() { mGetWallpapers = new AsyncTask<Void, Void, Boolean>() { List<Wallpaper> wallpapers; @Override protected void onPreExecute() { super.onPreExecute(); wallpapers = new ArrayList<>(); } @Override protected Boolean doInBackground(Void... voids) { while (!isCancelled()) { try { Thread.sleep(1); Database database = new Database(getActivity()); wallpapers = database.getFilteredWallpapers(); return true; } catch (Exception e) { LogUtil.e(Log.getStackTraceString(e)); return false; } } return false; } @Override protected void onPostExecute(Boolean aBoolean) { super.onPostExecute(aBoolean); if (aBoolean) { mAdapter = new WallpapersAdapter(getActivity(), wallpapers, false, true); mRecyclerView.setAdapter(mAdapter); if (mSearchView != null) mSearchView.requestFocus(); SoftKeyboardHelper.openKeyboard(getActivity()); } } }.execute(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.data.input.impl.prefetch; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.io.CountingOutputStream; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.TrueFileFilter; import org.apache.druid.data.input.FiniteFirehoseFactory; import org.apache.druid.data.input.Firehose; import org.apache.druid.data.input.InputSplit; import org.apache.druid.data.input.Row; import org.apache.druid.data.input.impl.CSVParseSpec; import org.apache.druid.data.input.impl.DimensionsSpec; import org.apache.druid.data.input.impl.StringInputRowParser; import org.apache.druid.data.input.impl.TimestampSpec; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.RetryUtils; import org.apache.druid.java.util.common.StringUtils; import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import java.io.BufferedWriter; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.net.SocketException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeoutException; public class PrefetchableTextFilesFirehoseFactoryTest { private static long FILE_SIZE = -1; private static final StringInputRowParser parser = new StringInputRowParser( new CSVParseSpec( new TimestampSpec( "timestamp", "auto", null ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("timestamp", "a", "b")), new ArrayList<>(), new ArrayList<>() ), ",", Arrays.asList("timestamp", "a", "b"), false, 0 ), StandardCharsets.UTF_8.name() ); @ClassRule public static TemporaryFolder tempDir = new TemporaryFolder(); private static File TEST_DIR; @Rule public ExpectedException expectedException = ExpectedException.none(); @BeforeClass public static void setup() throws IOException { TEST_DIR = tempDir.newFolder(); for (int i = 0; i < 100; i++) { try ( CountingOutputStream cos = new CountingOutputStream( Files.newOutputStream(new File(TEST_DIR, "test_" + i).toPath()) ); Writer writer = new BufferedWriter(new OutputStreamWriter(cos, StandardCharsets.UTF_8)) ) { for (int j = 0; j < 100; j++) { final String a = StringUtils.format("%d,%03d,%03d\n", (20171220 + i), i, j); writer.write(a); } writer.flush(); // Every file size must be same if (FILE_SIZE == -1) { FILE_SIZE = cos.getCount(); } else { Assert.assertEquals(FILE_SIZE, cos.getCount()); } } } } private static void assertResult(List<Row> rows) { Assert.assertEquals(10000, rows.size()); rows.sort((r1, r2) -> { int c = r1.getTimestamp().compareTo(r2.getTimestamp()); if (c != 0) { return c; } c = Integer.valueOf(r1.getDimension("a").get(0)).compareTo(Integer.valueOf(r2.getDimension("a").get(0))); if (c != 0) { return c; } return Integer.valueOf(r1.getDimension("b").get(0)).compareTo(Integer.valueOf(r2.getDimension("b").get(0))); }); for (int i = 0; i < 100; i++) { for (int j = 0; j < 100; j++) { final Row row = rows.get(i * 100 + j); Assert.assertEquals(DateTimes.utc(20171220 + i), row.getTimestamp()); Assert.assertEquals(i, Integer.valueOf(row.getDimension("a").get(0)).intValue()); Assert.assertEquals(j, Integer.valueOf(row.getDimension("b").get(0)).intValue()); } } } private static void assertNumRemainingCacheFiles(File firehoseTmpDir, int expectedNumFiles) { final String[] files = firehoseTmpDir.list(); Assert.assertNotNull(files); Assert.assertEquals(expectedNumFiles, files.length); } private static File createFirehoseTmpDir(String dirPrefix) throws IOException { return Files.createTempDirectory(tempDir.getRoot().toPath(), dirPrefix).toFile(); } @Test public void testWithoutCacheAndFetch() throws IOException { final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 0, 0); final List<Row> rows = new ArrayList<>(); final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCacheAndFetch"); try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { while (firehose.hasMore()) { rows.add(firehose.nextRow()); } } Assert.assertEquals(0, factory.getCacheManager().getTotalCachedBytes()); assertResult(rows); assertNumRemainingCacheFiles(firehoseTmpDir, 0); } @Test public void testWithoutCacheAndFetchAgainstConnectionReset() throws IOException { final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.withConnectionResets(TEST_DIR, 0, 0, 2); final List<Row> rows = new ArrayList<>(); final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCacheAndFetch"); try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { while (firehose.hasMore()) { rows.add(firehose.nextRow()); } } Assert.assertEquals(0, factory.getCacheManager().getTotalCachedBytes()); assertResult(rows); assertNumRemainingCacheFiles(firehoseTmpDir, 0); } @Test public void testWithoutCache() throws IOException { final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 0, 2048); final List<Row> rows = new ArrayList<>(); final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCache"); try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { while (firehose.hasMore()) { rows.add(firehose.nextRow()); } } Assert.assertEquals(0, factory.getCacheManager().getTotalCachedBytes()); assertResult(rows); assertNumRemainingCacheFiles(firehoseTmpDir, 0); } @Test public void testWithZeroFetchCapacity() throws IOException { final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 2048, 0); final List<Row> rows = new ArrayList<>(); final File firehoseTmpDir = createFirehoseTmpDir("testWithZeroFetchCapacity"); try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { while (firehose.hasMore()) { rows.add(firehose.nextRow()); } } assertResult(rows); assertNumRemainingCacheFiles(firehoseTmpDir, 2); } @Test public void testWithCacheAndFetch() throws IOException { final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.of(TEST_DIR); final List<Row> rows = new ArrayList<>(); final File firehoseTmpDir = createFirehoseTmpDir("testWithCacheAndFetch"); try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { while (firehose.hasMore()) { rows.add(firehose.nextRow()); } } assertResult(rows); assertNumRemainingCacheFiles(firehoseTmpDir, 2); } @Test public void testWithLargeCacheAndSmallFetch() throws IOException { final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 2048, 1024); final List<Row> rows = new ArrayList<>(); final File firehoseTmpDir = createFirehoseTmpDir("testWithLargeCacheAndSmallFetch"); try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { while (firehose.hasMore()) { rows.add(firehose.nextRow()); } } assertResult(rows); assertNumRemainingCacheFiles(firehoseTmpDir, 2); } @Test public void testWithSmallCacheAndLargeFetch() throws IOException { final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 1024, 2048); final List<Row> rows = new ArrayList<>(); final File firehoseTmpDir = createFirehoseTmpDir("testWithSmallCacheAndLargeFetch"); try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { while (firehose.hasMore()) { rows.add(firehose.nextRow()); } } assertResult(rows); assertNumRemainingCacheFiles(firehoseTmpDir, 1); } @Test public void testRetry() throws IOException { final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.withOpenExceptions(TEST_DIR, 1); final List<Row> rows = new ArrayList<>(); final File firehoseTmpDir = createFirehoseTmpDir("testRetry"); try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { while (firehose.hasMore()) { rows.add(firehose.nextRow()); } } assertResult(rows); assertNumRemainingCacheFiles(firehoseTmpDir, 2); } @Test public void testMaxRetry() throws IOException { expectedException.expect(RuntimeException.class); expectedException.expectCause(CoreMatchers.instanceOf(ExecutionException.class)); expectedException.expectMessage("Exception for retry test"); final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.withOpenExceptions(TEST_DIR, 5); try (Firehose firehose = factory.connect(parser, createFirehoseTmpDir("testMaxRetry"))) { while (firehose.hasMore()) { firehose.nextRow(); } } } @Test public void testTimeout() throws IOException { expectedException.expect(RuntimeException.class); expectedException.expectCause(CoreMatchers.instanceOf(TimeoutException.class)); final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.withSleepMillis(TEST_DIR, 1000); try (Firehose firehose = factory.connect(parser, createFirehoseTmpDir("testTimeout"))) { while (firehose.hasMore()) { firehose.nextRow(); } } } @Test public void testReconnectWithCacheAndPrefetch() throws IOException { final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.of(TEST_DIR); final File firehoseTmpDir = createFirehoseTmpDir("testReconnectWithCacheAndPrefetch"); for (int i = 0; i < 5; i++) { final List<Row> rows = new ArrayList<>(); try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { if (i > 0) { Assert.assertEquals(FILE_SIZE * 2, factory.getCacheManager().getTotalCachedBytes()); } while (firehose.hasMore()) { rows.add(firehose.nextRow()); } } assertResult(rows); assertNumRemainingCacheFiles(firehoseTmpDir, 2); } } @Test public void testReconnectWithCache() throws IOException { final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.with(TEST_DIR, 2048, 0); final File firehoseTmpDir = createFirehoseTmpDir("testReconnectWithCache"); for (int i = 0; i < 5; i++) { final List<Row> rows = new ArrayList<>(); try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { if (i > 0) { Assert.assertEquals(FILE_SIZE * 2, factory.getCacheManager().getTotalCachedBytes()); } while (firehose.hasMore()) { rows.add(firehose.nextRow()); } } assertResult(rows); assertNumRemainingCacheFiles(firehoseTmpDir, 2); } } static class TestPrefetchableTextFilesFirehoseFactory extends PrefetchableTextFilesFirehoseFactory<File> { private final long sleepMillis; private final File baseDir; private int numOpenExceptions; private int maxConnectionResets; static TestPrefetchableTextFilesFirehoseFactory with(File baseDir, long cacheCapacity, long fetchCapacity) { return new TestPrefetchableTextFilesFirehoseFactory( baseDir, 1024, cacheCapacity, fetchCapacity, 60_000, // fetch timeout 3, 0, 0, 0 ); } static TestPrefetchableTextFilesFirehoseFactory of(File baseDir) { return new TestPrefetchableTextFilesFirehoseFactory( baseDir, 1024, 2048, 2048, 3, 0, 0, 0 ); } static TestPrefetchableTextFilesFirehoseFactory withOpenExceptions(File baseDir, int count) { return new TestPrefetchableTextFilesFirehoseFactory( baseDir, 1024, 2048, 2048, 3, count, 0, 0 ); } static TestPrefetchableTextFilesFirehoseFactory withConnectionResets( File baseDir, long cacheCapacity, long fetchCapacity, int numConnectionResets ) { return new TestPrefetchableTextFilesFirehoseFactory( baseDir, fetchCapacity / 2, cacheCapacity, fetchCapacity, 3, 0, numConnectionResets, 0 ); } static TestPrefetchableTextFilesFirehoseFactory withSleepMillis(File baseDir, long ms) { return new TestPrefetchableTextFilesFirehoseFactory( baseDir, 1024, 2048, 2048, 100, 3, 0, 0, ms ); } private static long computeTimeout(int maxRetry) { // See RetryUtils.nextRetrySleepMillis() final double maxFuzzyMultiplier = 2.; return (long) Math.min( RetryUtils.MAX_SLEEP_MILLIS, RetryUtils.BASE_SLEEP_MILLIS * Math.pow(2, maxRetry - 1) * maxFuzzyMultiplier ); } TestPrefetchableTextFilesFirehoseFactory( File baseDir, long prefetchTriggerThreshold, long maxCacheCapacityBytes, long maxFetchCapacityBytes, int maxRetry, int numOpenExceptions, int numConnectionResets, long sleepMillis ) { this( baseDir, prefetchTriggerThreshold, maxCacheCapacityBytes, maxFetchCapacityBytes, computeTimeout(maxRetry), maxRetry, numOpenExceptions, numConnectionResets, sleepMillis ); } TestPrefetchableTextFilesFirehoseFactory( File baseDir, long prefetchTriggerThreshold, long maxCacheCapacityBytes, long maxFetchCapacityBytes, long fetchTimeout, int maxRetry, int numOpenExceptions, int maxConnectionResets, long sleepMillis ) { super( maxCacheCapacityBytes, maxFetchCapacityBytes, prefetchTriggerThreshold, fetchTimeout, maxRetry ); this.numOpenExceptions = numOpenExceptions; this.maxConnectionResets = maxConnectionResets; this.sleepMillis = sleepMillis; this.baseDir = baseDir; } @Override protected Collection<File> initObjects() { return FileUtils.listFiles( Preconditions.checkNotNull(baseDir).getAbsoluteFile(), TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE ); } @Override protected InputStream openObjectStream(File object) throws IOException { if (numOpenExceptions > 0) { numOpenExceptions--; throw new IOException("Exception for retry test"); } if (sleepMillis > 0) { try { Thread.sleep(sleepMillis); } catch (InterruptedException e) { throw new RuntimeException(e); } } return maxConnectionResets > 0 ? new TestInputStream(FileUtils.openInputStream(object), maxConnectionResets) : FileUtils.openInputStream(object); } @Override protected InputStream wrapObjectStream(File object, InputStream stream) { return stream; } @Override protected Predicate<Throwable> getRetryCondition() { return e -> e instanceof IOException; } @Override protected InputStream openObjectStream(File object, long start) throws IOException { if (numOpenExceptions > 0) { numOpenExceptions--; throw new IOException("Exception for retry test"); } if (sleepMillis > 0) { try { Thread.sleep(sleepMillis); } catch (InterruptedException e) { throw new RuntimeException(e); } } final InputStream in = FileUtils.openInputStream(object); in.skip(start); return maxConnectionResets > 0 ? new TestInputStream(in, maxConnectionResets) : in; } private int readCount; private int numConnectionResets; @Override public FiniteFirehoseFactory<StringInputRowParser, File> withSplit(InputSplit<File> split) { throw new UnsupportedOperationException(); } private class TestInputStream extends InputStream { private static final int NUM_READ_COUNTS_BEFORE_ERROR = 10; private final InputStream delegate; private final int maxConnectionResets; TestInputStream( InputStream delegate, int maxConnectionResets ) { this.delegate = delegate; this.maxConnectionResets = maxConnectionResets; } @Override public int read() throws IOException { if (readCount++ % NUM_READ_COUNTS_BEFORE_ERROR == 0) { if (numConnectionResets++ < maxConnectionResets) { // Simulate connection reset throw new SocketException("Test Connection reset"); } } return delegate.read(); } @Override public int read(byte b[], int off, int len) throws IOException { if (readCount++ % NUM_READ_COUNTS_BEFORE_ERROR == 0) { if (numConnectionResets++ < maxConnectionResets) { // Simulate connection reset throw new SocketException("Test Connection reset"); } } return delegate.read(b, off, len); } } } }
package de.rwthaachen.jobimservice.rest; import de.rwthaachen.jobimservice.jbt.JoBimSqlConnector; import de.rwthaachen.jobimservice.status.StatusMonitor; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import java.util.HashMap; import java.util.Map; /** * REST interface for StatusMonitor and JBT */ @Path("/jbt") public class RestInterface { // To include the experimental status monitor uncomment this line, the first line in the constructor // as well as the section under listBackendsJson() //private StatusMonitor statusMonitor; private Map<String, JoBimSqlConnector> joBimBackendMap = new HashMap<>(); /** * Constructor that creates StatusMonitor and JBT backends */ public RestInterface() { //statusMonitor = new StatusMonitor(); String config = "conf_mysql_wikipedia_stanford.xml"; JoBimSqlConnector joBimSqlConnector = new JoBimSqlConnector(); joBimSqlConnector.init(config); joBimBackendMap.put("mysql_wikipedia_stanford", joBimSqlConnector); config = "conf_web_wikipedia_trigram.xml"; joBimSqlConnector = new JoBimSqlConnector(); joBimSqlConnector.initWeb(config); joBimBackendMap.put("web_wikipedia_trigram", joBimSqlConnector); } /** * Prints list of JBT backends * @return List of JBT backends */ @GET @Path("listBackends") //http://127.0.0.1:8080/jbt/listBackends @Produces(MediaType.TEXT_PLAIN) public String listBackends() { StringBuilder stringBuilder = new StringBuilder(); for (String key : joBimBackendMap.keySet()) { stringBuilder.append(key); stringBuilder.append("\n"); } return stringBuilder.toString(); } /** * Prints list of JBT backends * @return List of JBT backends */ @GET @Path("listBackendsJson") //http://127.0.0.1:8080/jbt/listBackendsJson @Produces(MediaType.APPLICATION_JSON) public String listBackendsJson() { StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("["); String delimiter = ""; for (String key : joBimBackendMap.keySet()) { stringBuilder.append(delimiter).append("\"").append(key).append("\""); delimiter = ","; } stringBuilder.append("]"); return stringBuilder.toString(); } // //<editor-fold desc=".:: Status Reports ::."> // /** // * Returns status report in plain text // * @return Status report in plain text // */ // @GET // @Path("statusText") //http://127.0.0.1:8080/jbt/statusText // @Produces(MediaType.TEXT_PLAIN) // public String test() { // return statusMonitor.reportPlaintext(); // } // // /** // * Returns status report in HTML // * @return Status report in HTML // */ // @GET // @Path("status") //http://127.0.0.1:8080/jbt/status // @Produces(MediaType.TEXT_HTML) // public String status() { // return statusMonitor.reportHtml(); // } // // /** // * Returns status report in JSON (via marshalling) // * @return Status report in JSON // */ // @GET // @Path("statusJson") //http://127.0.0.1:8080/jbt/statusJson // @Produces(MediaType.APPLICATION_JSON) // public StatusMonitor.HealthReport statusJson() { // return statusMonitor.reportObject(); // } // //</editor-fold> //<editor-fold desc=".:: Distributional Semantics ::."> //BEWARE If you use a Stanford model you need to look up tagged terms, e.g. exceptionally#RB // To be able to use the REST interface they need to be URL encoded, e.g. exceptionally%23RB // Full example: http://127.0.0.1:8080/jbt/similarTerms?term=exceptionally%23RB&backend=mysql_wikipedia_stanford //<editor-fold desc="1) Counts"> /** * Returns count for a term * @param term Term to count * @return Term count */ @GET @Path("countTerm") @Produces(MediaType.TEXT_PLAIN) public String countTerm(@QueryParam("term") String term, @QueryParam("backend") String backend){ JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return joBimSqlConnector.getCountTerm(term); } return ""; } /** * Returns count for a term * @param term Term to count * @return Term count as JSON value */ @GET @Path("countTermJson") @Produces(MediaType.APPLICATION_JSON) public String countTermJson(@QueryParam("term") String term, @QueryParam("backend") String backend){ JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return "{\"count\":" + joBimSqlConnector.getCountTerm(term) + "}"; } return "{}"; } /** * Returns context count for a term * @param term Term whose contexts to count * @return Context count */ @GET @Path("countContext") @Produces(MediaType.TEXT_PLAIN) public String countContext(@QueryParam("term") String term, @QueryParam("backend") String backend){ JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return joBimSqlConnector.getCountContext(term); } return ""; } /** * Returns context count for a term * @param term Term whose contexts to count * @return Context Count as JSON value */ @GET @Path("countContextJson") @Produces(MediaType.APPLICATION_JSON) public String countContextJson(@QueryParam("term") String term, @QueryParam("backend") String backend){ JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return "{\"count\":" + joBimSqlConnector.getCountContext(term) + "}"; } return "{}"; } //</editor-fold> //<editor-fold desc="2) Similar Terms"> /** * Determines similar terms for term * @param term Term to find similar terms for * @return All similar terms */ @GET @Path("similarTerms") @Produces(MediaType.TEXT_PLAIN) public String similarTerms(@QueryParam("term") String term, @QueryParam("backend") String backend){ // Feel free to use getOrDefault here (but then we need to settle on a default) JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return joBimSqlConnector.getSimilarTerms(term); } return ""; } /** * Converts a list returned by JBT into valid JSON according to RFC 7159 * cmp. rfc-editor.org/rfc/rfc7159.txt * This method simply surrounds each String with quotes * * @param jbtList List returned from JBT * @return Same list in JSON */ public String convertJbtListToJson(String jbtList) { StringBuilder stringBuilder = new StringBuilder(); String[] tempArray = jbtList.replaceAll("[\\s\\[\\]]","").split(","); stringBuilder.append("["); String delimiter = ""; for (String i : tempArray) { stringBuilder.append(delimiter).append("\"").append(i).append("\""); delimiter = ","; } stringBuilder.append("]"); return stringBuilder.toString(); } /** * Determines similar terms for term * @param term Term to find similar terms for * @return All similar terms as JSON array */ @GET @Path("similarTermsJson") @Produces(MediaType.APPLICATION_JSON) public String similarTermsJson(@QueryParam("term") String term, @QueryParam("backend") String backend){ // Feel free to use getOrDefault here (but then we need to settle on a default) JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return convertJbtListToJson(joBimSqlConnector.getSimilarTerms(term)); } return "[]"; } /** * Determines n highest ranked similar terms for term * @param term Term to find similar terms for * @param n How many highest ranked similar terms are returned * @return Filtered similar terms */ @GET @Path("similarTermsTopN") @Produces(MediaType.TEXT_PLAIN) public String similarTermsTopN(@QueryParam("term") String term, @QueryParam("n") int n, @QueryParam("backend") String backend){ JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return joBimSqlConnector.getSimilarTerms(term, n); } return ""; } /** * Determines n highest ranked similar terms for term * @param term Term to find similar terms for * @param n How many highest ranked similar terms are returned * @return Filtered similar terms as JSON array */ @GET @Path("similarTermsTopNJson") @Produces(MediaType.APPLICATION_JSON) public String similarTermsTopNJson(@QueryParam("term") String term, @QueryParam("n") int n, @QueryParam("backend") String backend){ JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return convertJbtListToJson(joBimSqlConnector.getSimilarTerms(term, n)); } return "[]"; } /** * Finds similar terms with thresholding (for score above 20 use 81.0) * @param term Term to find similar terms for * @param d Threshold * @return Filtered similar terms */ @GET @Path("similarTermsThresholded") @Produces(MediaType.TEXT_PLAIN) public String similarTermsThresholded(@QueryParam("term") String term, @QueryParam("d") double d, @QueryParam("backend") String backend){ JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return joBimSqlConnector.getSimilarTerms(term, d); } return ""; } /** * Finds similar terms with thresholding (for score above 20 use 81.0) * @param term Term to find similar terms for * @param d Threshold * @return Filtered similar terms as JSON array */ @GET @Path("similarTermsThresholdedJson") @Produces(MediaType.APPLICATION_JSON) public String similarTermsThresholdedJson(@QueryParam("term") String term, @QueryParam("d") double d, @QueryParam("backend") String backend){ JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return convertJbtListToJson(joBimSqlConnector.getSimilarTerms(term, d)); } return "[]"; } /** * Performs similar term search for an entire sentence (words separated by spaces) * Note: will return empty lists for Stanford backend, because tokens are not tagged * @param sentence The sentence to analyze * @return Similar terms for each word */ @GET @Path("similarTermsFromSentence") @Produces(MediaType.TEXT_PLAIN) public String similarTermsFromSentence(@QueryParam("sentence") String sentence, @QueryParam("backend") String backend){ JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return joBimSqlConnector.getSimilarTermsFromSentence(sentence); } return ""; } //</editor-fold> //<editor-fold desc="3) Contexts"> /** * Determines contexts for a term * @param term Term to find contexts for * @return All contexts found */ @GET @Path("contexts") @Produces(MediaType.TEXT_PLAIN) public String contexts(@QueryParam("term") String term, @QueryParam("backend") String backend){ JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return joBimSqlConnector.getContextsForTerm(term); } return ""; } /** * Determines contexts for a term * @param term Term to find contexts for * @return All contexts found as JSON array */ @GET @Path("contextsJson") @Produces(MediaType.APPLICATION_JSON) public String contextsJson(@QueryParam("term") String term, @QueryParam("backend") String backend){ JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return convertJbtListToJson(joBimSqlConnector.getContextsForTerm(term)); } return "[]"; } /** * Determines contexts for a term * @param term Term to find contexts for * @param n How many highest ranked contexts are returned * @return Filtered contexts */ @GET @Path("contextsTopN") @Produces(MediaType.TEXT_PLAIN) public String contextsTopN(@QueryParam("term") String term, @QueryParam("n") int n, @QueryParam("backend") String backend){ JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return joBimSqlConnector.getContextsForTerm(term, n); } return ""; } /** * Determines contexts for a term * @param term Term to find contexts for * @param n How many highest ranked contexts are returned * @return Filtered contexts as JSON array */ @GET @Path("contextsTopNJson") @Produces(MediaType.APPLICATION_JSON) public String contextsTopNJson(@QueryParam("term") String term, @QueryParam("n") int n, @QueryParam("backend") String backend){ JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return convertJbtListToJson(joBimSqlConnector.getContextsForTerm(term, n)); } return "[]"; } /** * Determines contexts for a term with thresholding * @param term Term to find contexts for * @param d Threshold * @return Filtered contexts */ @GET @Path("contextsThresholded") @Produces(MediaType.TEXT_PLAIN) public String contextsThresholded(@QueryParam("term") String term, @QueryParam("d") double d, @QueryParam("backend") String backend){ JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return joBimSqlConnector.getContextsForTerm(term, d); } return ""; } /** * Determines contexts for a term with thresholding * @param term Term to find contexts for * @param d Threshold * @return Filtered contexts as JSON array */ @GET @Path("contextsThresholdedJson") @Produces(MediaType.APPLICATION_JSON) public String contextsThresholdedJson(@QueryParam("term") String term, @QueryParam("d") double d, @QueryParam("backend") String backend){ JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return convertJbtListToJson(joBimSqlConnector.getContextsForTerm(term, d)); } return "[]"; } //</editor-fold"> //<editor-fold desc="4) Sense Clusters"> /** * Determines senses for a term * @param term Term to find senses for * @return Found senses */ @GET @Path("getSenses") @Produces(MediaType.TEXT_PLAIN) public String getSenses(@QueryParam("term") String term, @QueryParam("backend") String backend){ JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { return joBimSqlConnector.getSensesVerbose(term); } return ""; } /** * Determines senses for a term * @param term Term to find senses for * @return Found senses */ @GET @Path("getSensesJson") @Produces(MediaType.APPLICATION_JSON) public String getSensesJson(@QueryParam("term") String term, @QueryParam("backend") String backend){ JoBimSqlConnector joBimSqlConnector = joBimBackendMap.get(backend); if(joBimSqlConnector != null) { String[] tempArray = joBimSqlConnector.getSenses(term).split(";"); StringBuilder stringBuilder = new StringBuilder(); stringBuilder.append("["); String delimiter = ""; for (String i : tempArray) { stringBuilder.append(delimiter).append(convertJbtListToJson(i)); delimiter = ","; } stringBuilder.append("]"); return stringBuilder.toString(); } return "[]"; } //</editor-fold> //</editor-fold> }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.fileChooser.ex; import com.intellij.icons.AllIcons; import com.intellij.ide.IdeEventQueue; import com.intellij.ide.SaveAndSyncHandler; import com.intellij.ide.SaveAndSyncHandlerImpl; import com.intellij.ide.util.PropertiesComponent; import com.intellij.ide.util.treeView.NodeRenderer; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationActivationListener; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.fileChooser.*; import com.intellij.openapi.fileChooser.impl.FileChooserFactoryImpl; import com.intellij.openapi.fileChooser.impl.FileChooserUtil; import com.intellij.openapi.project.DumbModePermission; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Iconable; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.IdeFrame; import com.intellij.ui.*; import com.intellij.ui.components.JBList; import com.intellij.ui.components.labels.LinkLabel; import com.intellij.util.ArrayUtil; import com.intellij.util.Consumer; import com.intellij.util.IconUtil; import com.intellij.util.containers.HashMap; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.update.MergingUpdateQueue; import com.intellij.util.ui.update.UiNotifyConnector; import com.intellij.util.ui.update.Update; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.TreeExpansionEvent; import javax.swing.event.TreeExpansionListener; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.KeyEvent; import java.awt.event.MouseEvent; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; public class FileChooserDialogImpl extends DialogWrapper implements FileChooserDialog, PathChooserDialog, FileLookup { @NonNls public static final String FILE_CHOOSER_SHOW_PATH_PROPERTY = "FileChooser.ShowPath"; public static final String RECENT_FILES_KEY = "file.chooser.recent.files"; private final FileChooserDescriptor myChooserDescriptor; protected FileSystemTreeImpl myFileSystemTree; private Project myProject; private VirtualFile[] myChosenFiles = VirtualFile.EMPTY_ARRAY; private JPanel myNorthPanel; private TextFieldAction myTextFieldAction; protected FileTextFieldImpl myPathTextField; private JComponent myPathTextFieldWrapper; private MergingUpdateQueue myUiUpdater; private boolean myTreeIsUpdating; public static DataKey<PathField> PATH_FIELD = DataKey.create("PathField"); public FileChooserDialogImpl(@NotNull final FileChooserDescriptor descriptor, @Nullable Project project) { super(project, true); myChooserDescriptor = descriptor; myProject = project; setTitle(getChooserTitle(descriptor)); } public FileChooserDialogImpl(@NotNull final FileChooserDescriptor descriptor, @NotNull Component parent) { this(descriptor, parent, null); } public FileChooserDialogImpl(@NotNull final FileChooserDescriptor descriptor, @NotNull Component parent, @Nullable Project project) { super(parent, true); myChooserDescriptor = descriptor; myProject = project; setTitle(getChooserTitle(descriptor)); } private static String getChooserTitle(final FileChooserDescriptor descriptor) { final String title = descriptor.getTitle(); return title != null ? title : UIBundle.message("file.chooser.default.title"); } @Override @NotNull public VirtualFile[] choose(@Nullable final Project project, @NotNull final VirtualFile... toSelect) { init(); if ((myProject == null) && (project != null)) { myProject = project; } if (toSelect.length == 1) { restoreSelection(toSelect[0]); } else if (toSelect.length == 0) { restoreSelection(null); // select last opened file } else { selectInTree(toSelect, true); } DumbService.allowStartingDumbModeInside(DumbModePermission.MAY_START_MODAL, new Runnable() { @Override public void run() { show(); } }); return myChosenFiles; } @NotNull @Override public VirtualFile[] choose(@Nullable final VirtualFile toSelect, @Nullable final Project project) { if (toSelect == null) { return choose(project); } return choose(project, toSelect); } @Override public void choose(@Nullable VirtualFile toSelect, @NotNull Consumer<List<VirtualFile>> callback) { init(); restoreSelection(toSelect); show(); if (myChosenFiles.length > 0) { callback.consume(Arrays.asList(myChosenFiles)); } else if (callback instanceof FileChooser.FileChooserConsumer) { ((FileChooser.FileChooserConsumer)callback).cancelled(); } } protected void restoreSelection(@Nullable VirtualFile toSelect) { final VirtualFile lastOpenedFile = FileChooserUtil.getLastOpenedFile(myProject); final VirtualFile file = FileChooserUtil.getFileToSelect(myChooserDescriptor, myProject, toSelect, lastOpenedFile); if (file != null && file.isValid()) { myFileSystemTree.select(file, new Runnable() { public void run() { if (!file.equals(myFileSystemTree.getSelectedFile())) { VirtualFile parent = file.getParent(); if (parent != null) { myFileSystemTree.select(parent, null); } } else if (file.isDirectory()) { myFileSystemTree.expand(file, null); } } }); } } protected void storeSelection(@Nullable VirtualFile file) { FileChooserUtil.setLastOpenedFile(myProject, file); if (file != null && file.getFileSystem() instanceof LocalFileSystem) { saveRecent(file.getPath()); } } protected void saveRecent(String path) { final List<String> files = new ArrayList<String>(Arrays.asList(getRecentFiles())); files.remove(path); files.add(0, path); while (files.size() > 30) { files.remove(files.size() - 1); } PropertiesComponent.getInstance().setValues(RECENT_FILES_KEY, ArrayUtil.toStringArray(files)); } @NotNull private String[] getRecentFiles() { final String[] recent = PropertiesComponent.getInstance().getValues(RECENT_FILES_KEY); if (recent != null) { if (recent.length > 0 && myPathTextField.getField().getText().replace('\\', '/').equals(recent[0])) { final String[] pathes = new String[recent.length - 1]; System.arraycopy(recent, 1, pathes, 0, recent.length - 1); return pathes; } return recent; } return ArrayUtil.EMPTY_STRING_ARRAY; } protected JComponent createHistoryButton() { JLabel label = new JLabel(AllIcons.Actions.Get); label.setToolTipText("Recent files"); new ClickListener() { @Override public boolean onClick(@NotNull MouseEvent event, int clickCount) { showRecentFilesPopup(); return true; } }.installOn(label); new AnAction() { @Override public void actionPerformed(AnActionEvent e) { showRecentFilesPopup(); } @Override public void update(AnActionEvent e) { e.getPresentation().setEnabled(!IdeEventQueue.getInstance().isPopupActive()); } }.registerCustomShortcutSet(KeyEvent.VK_DOWN, 0, myPathTextField.getField()); return label; } private void showRecentFilesPopup() { final JBList files = new JBList(getRecentFiles()) { @Override public Dimension getPreferredSize() { return new Dimension(myPathTextField.getField().getWidth(), super.getPreferredSize().height); } }; files.setCellRenderer(new ColoredListCellRenderer() { @Override protected void customizeCellRenderer(JList list, Object value, int index, boolean selected, boolean hasFocus) { final String path = value.toString(); append(path); final VirtualFile file = LocalFileSystem.getInstance().findFileByIoFile(new File(path)); if (file != null) { setIcon(IconUtil.getIcon(file, Iconable.ICON_FLAG_READ_STATUS, null)); } } }); JBPopupFactory.getInstance() .createListPopupBuilder(files) .setItemChoosenCallback(new Runnable() { @Override public void run() { myPathTextField.getField().setText(files.getSelectedValue().toString()); } }).createPopup().showUnderneathOf(myPathTextField.getField()); } protected DefaultActionGroup createActionGroup() { registerFileChooserShortcut(IdeActions.ACTION_DELETE, "FileChooser.Delete"); registerFileChooserShortcut(IdeActions.ACTION_SYNCHRONIZE, "FileChooser.Refresh"); return (DefaultActionGroup)ActionManager.getInstance().getAction("FileChooserToolbar"); } private void registerFileChooserShortcut(@NonNls final String baseActionId, @NonNls final String fileChooserActionId) { final JTree tree = myFileSystemTree.getTree(); final AnAction syncAction = ActionManager.getInstance().getAction(fileChooserActionId); AnAction original = ActionManager.getInstance().getAction(baseActionId); syncAction.registerCustomShortcutSet(original.getShortcutSet(), tree, myDisposable); } @Nullable protected final JComponent createTitlePane() { final String description = myChooserDescriptor.getDescription(); if (StringUtil.isEmptyOrSpaces(description)) return null; final JLabel label = new JLabel(description); label.setBorder(BorderFactory.createCompoundBorder( new SideBorder(UIUtil.getPanelBackground().darker(), SideBorder.BOTTOM), JBUI.Borders.empty(0, 5, 10, 5))); return label; } protected JComponent createCenterPanel() { JPanel panel = new MyPanel(); myUiUpdater = new MergingUpdateQueue("FileChooserUpdater", 200, false, panel); Disposer.register(myDisposable, myUiUpdater); new UiNotifyConnector(panel, myUiUpdater); panel.setBorder(JBUI.Borders.empty()); createTree(); final DefaultActionGroup group = createActionGroup(); ActionToolbar toolBar = ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, group, true); toolBar.setTargetComponent(panel); final JPanel toolbarPanel = new JPanel(new BorderLayout()); toolbarPanel.add(toolBar.getComponent(), BorderLayout.CENTER); myTextFieldAction = new TextFieldAction() { public void linkSelected(final LinkLabel aSource, final Object aLinkData) { toggleShowTextField(); } }; toolbarPanel.add(myTextFieldAction, BorderLayout.EAST); myPathTextFieldWrapper = new JPanel(new BorderLayout()); myPathTextFieldWrapper.setBorder(JBUI.Borders.emptyBottom(2)); myPathTextField = new FileTextFieldImpl.Vfs( FileChooserFactoryImpl.getMacroMap(), getDisposable(), new LocalFsFinder.FileChooserFilter(myChooserDescriptor, myFileSystemTree)) { protected void onTextChanged(final String newValue) { myUiUpdater.cancelAllUpdates(); updateTreeFromPath(newValue); } }; Disposer.register(myDisposable, myPathTextField); myPathTextFieldWrapper.add(myPathTextField.getField(), BorderLayout.CENTER); if (getRecentFiles().length > 0) { myPathTextFieldWrapper.add(createHistoryButton(), BorderLayout.EAST); } myNorthPanel = new JPanel(new BorderLayout()); myNorthPanel.add(toolbarPanel, BorderLayout.NORTH); updateTextFieldShowing(); panel.add(myNorthPanel, BorderLayout.NORTH); registerMouseListener(group); JScrollPane scrollPane = ScrollPaneFactory.createScrollPane(myFileSystemTree.getTree()); //scrollPane.setBorder(BorderFactory.createLineBorder(new Color(148, 154, 156))); panel.add(scrollPane, BorderLayout.CENTER); panel.setPreferredSize(JBUI.size(400)); panel.add(new JLabel( "<html><center><small><font color=gray>Drag and drop a file into the space above to quickly locate it in the tree.</font></small></center></html>", SwingConstants.CENTER), BorderLayout.SOUTH); ApplicationManager.getApplication().getMessageBus().connect(getDisposable()) .subscribe(ApplicationActivationListener.TOPIC, new ApplicationActivationListener.Adapter() { @Override public void applicationActivated(IdeFrame ideFrame) { ((SaveAndSyncHandlerImpl)SaveAndSyncHandler.getInstance()).maybeRefresh(ModalityState.current()); } }); return panel; } public JComponent getPreferredFocusedComponent() { if (isToShowTextField()) { return myPathTextField != null ? myPathTextField.getField() : null; } else { return myFileSystemTree != null ? myFileSystemTree.getTree() : null; } } public final void dispose() { LocalFileSystem.getInstance().removeWatchedRoots(myRequests.values()); super.dispose(); } private boolean isTextFieldActive() { return myPathTextField.getField().getRootPane() != null; } protected void doOKAction() { if (!isOKActionEnabled()) { return; } if (isTextFieldActive()) { final String text = myPathTextField.getTextFieldText(); final LookupFile file = myPathTextField.getFile(); if (text == null || file == null || !file.exists()) { setErrorText("Specified path cannot be found"); return; } } final List<VirtualFile> selectedFiles = Arrays.asList(getSelectedFilesInt()); final VirtualFile[] files = VfsUtilCore.toVirtualFileArray(FileChooserUtil.getChosenFiles(myChooserDescriptor, selectedFiles)); if (files.length == 0) { myChosenFiles = VirtualFile.EMPTY_ARRAY; close(CANCEL_EXIT_CODE); return; } try { myChooserDescriptor.validateSelectedFiles(files); } catch (Exception e) { Messages.showErrorDialog(getContentPane(), e.getMessage(), getTitle()); return; } myChosenFiles = files; storeSelection(files[files.length - 1]); super.doOKAction(); } public final void doCancelAction() { myChosenFiles = VirtualFile.EMPTY_ARRAY; super.doCancelAction(); } protected JTree createTree() { myFileSystemTree = new FileSystemTreeImpl(myProject, myChooserDescriptor); Disposer.register(myDisposable, myFileSystemTree); myFileSystemTree.addOkAction(new Runnable() { public void run() { doOKAction(); } }); JTree tree = myFileSystemTree.getTree(); tree.setCellRenderer(new NodeRenderer()); tree.getSelectionModel().addTreeSelectionListener(new FileTreeSelectionListener()); tree.addTreeExpansionListener(new FileTreeExpansionListener()); setOKActionEnabled(false); myFileSystemTree.addListener(new FileSystemTree.Listener() { public void selectionChanged(final List<VirtualFile> selection) { updatePathFromTree(selection, false); } }, myDisposable); new FileDrop(tree, new FileDrop.Target() { public FileChooserDescriptor getDescriptor() { return myChooserDescriptor; } public boolean isHiddenShown() { return myFileSystemTree.areHiddensShown(); } public void dropFiles(final List<VirtualFile> files) { if (!myChooserDescriptor.isChooseMultiple() && files.size() > 0) { selectInTree(new VirtualFile[]{files.get(0)}, true); } else { selectInTree(VfsUtilCore.toVirtualFileArray(files), true); } } }); return tree; } protected final void registerMouseListener(final ActionGroup group) { myFileSystemTree.registerMouseListener(group); } private VirtualFile[] getSelectedFilesInt() { if (myTreeIsUpdating || !myUiUpdater.isEmpty()) { if (isTextFieldActive() && !StringUtil.isEmpty(myPathTextField.getTextFieldText())) { LookupFile toFind = myPathTextField.getFile(); if (toFind instanceof LocalFsFinder.VfsFile && toFind.exists()) { VirtualFile file = ((LocalFsFinder.VfsFile)toFind).getFile(); if (file != null) { return new VirtualFile[]{file}; } } } return VirtualFile.EMPTY_ARRAY; } return myFileSystemTree.getSelectedFiles(); } private final Map<String, LocalFileSystem.WatchRequest> myRequests = new HashMap<String, LocalFileSystem.WatchRequest>(); private static boolean isToShowTextField() { return PropertiesComponent.getInstance().getBoolean(FILE_CHOOSER_SHOW_PATH_PROPERTY, true); } private static void setToShowTextField(boolean toShowTextField) { PropertiesComponent.getInstance().setValue(FILE_CHOOSER_SHOW_PATH_PROPERTY, Boolean.toString(toShowTextField)); } private final class FileTreeExpansionListener implements TreeExpansionListener { public void treeExpanded(TreeExpansionEvent event) { final Object[] path = event.getPath().getPath(); if (path.length == 2) { // top node has been expanded => watch disk recursively final DefaultMutableTreeNode node = (DefaultMutableTreeNode)path[1]; Object userObject = node.getUserObject(); if (userObject instanceof FileNodeDescriptor) { final VirtualFile file = ((FileNodeDescriptor)userObject).getElement().getFile(); if (file != null && file.isDirectory()) { final String rootPath = file.getPath(); if (myRequests.get(rootPath) == null) { final LocalFileSystem.WatchRequest watchRequest = LocalFileSystem.getInstance().addRootToWatch(rootPath, true); myRequests.put(rootPath, watchRequest); } } } } } public void treeCollapsed(TreeExpansionEvent event) { } } private final class FileTreeSelectionListener implements TreeSelectionListener { public void valueChanged(TreeSelectionEvent e) { TreePath[] paths = e.getPaths(); boolean enabled = true; for (TreePath treePath : paths) { if (!e.isAddedPath(treePath)) { continue; } DefaultMutableTreeNode node = (DefaultMutableTreeNode)treePath.getLastPathComponent(); Object userObject = node.getUserObject(); if (!(userObject instanceof FileNodeDescriptor)) { enabled = false; break; } FileElement descriptor = ((FileNodeDescriptor)userObject).getElement(); VirtualFile file = descriptor.getFile(); enabled = file != null && myChooserDescriptor.isFileSelectable(file); } setOKActionEnabled(enabled); } } protected final class MyPanel extends JPanel implements DataProvider { public MyPanel() { super(new BorderLayout(0, 0)); } public Object getData(String dataId) { if (CommonDataKeys.VIRTUAL_FILE_ARRAY.is(dataId)) { return myFileSystemTree.getSelectedFiles(); } else if (PATH_FIELD.is(dataId)) { return new PathField() { public void toggleVisible() { toggleShowTextField(); } }; } else if (FileSystemTree.DATA_KEY.is(dataId)) { return myFileSystemTree; } return myChooserDescriptor.getUserData(dataId); } } public void toggleShowTextField() { setToShowTextField(!isToShowTextField()); updateTextFieldShowing(); } private void updateTextFieldShowing() { myTextFieldAction.update(); myNorthPanel.remove(myPathTextFieldWrapper); if (isToShowTextField()) { final ArrayList<VirtualFile> selection = new ArrayList<VirtualFile>(); if (myFileSystemTree.getSelectedFile() != null) { selection.add(myFileSystemTree.getSelectedFile()); } updatePathFromTree(selection, true); myNorthPanel.add(myPathTextFieldWrapper, BorderLayout.CENTER); } else { setErrorText(null); } myPathTextField.getField().requestFocus(); myNorthPanel.revalidate(); myNorthPanel.repaint(); } private void updatePathFromTree(final List<VirtualFile> selection, boolean now) { if (!isToShowTextField() || myTreeIsUpdating) return; String text = ""; if (selection.size() > 0) { text = VfsUtil.getReadableUrl(selection.get(0)); } else { final List<VirtualFile> roots = myChooserDescriptor.getRoots(); if (!myFileSystemTree.getTree().isRootVisible() && roots.size() == 1) { text = VfsUtil.getReadableUrl(roots.get(0)); } } myPathTextField.setText(text, now, new Runnable() { public void run() { myPathTextField.getField().selectAll(); setErrorText(null); } }); } private void updateTreeFromPath(final String text) { if (!isToShowTextField()) return; if (myPathTextField.isPathUpdating()) return; if (text == null) return; myUiUpdater.queue(new Update("treeFromPath.1") { public void run() { ApplicationManager.getApplication().executeOnPooledThread(new Runnable() { public void run() { final LocalFsFinder.VfsFile toFind = (LocalFsFinder.VfsFile)myPathTextField.getFile(); if (toFind == null || !toFind.exists()) return; myUiUpdater.queue(new Update("treeFromPath.2") { public void run() { selectInTree(toFind.getFile(), text); } }); } }); } }); } private void selectInTree(final VirtualFile vFile, String fromText) { if (vFile != null && vFile.isValid()) { if (fromText == null || fromText.equalsIgnoreCase(myPathTextField.getTextFieldText())) { selectInTree(new VirtualFile[]{vFile}, false); } } else { reportFileNotFound(); } } private void selectInTree(final VirtualFile[] array, final boolean requestFocus) { myTreeIsUpdating = true; final List<VirtualFile> fileList = Arrays.asList(array); if (!Arrays.asList(myFileSystemTree.getSelectedFiles()).containsAll(fileList)) { myFileSystemTree.select(array, new Runnable() { public void run() { if (!myFileSystemTree.areHiddensShown() && !Arrays.asList(myFileSystemTree.getSelectedFiles()).containsAll(fileList)) { myFileSystemTree.showHiddens(true); selectInTree(array, requestFocus); return; } myTreeIsUpdating = false; setErrorText(null); if (requestFocus) { //noinspection SSBasedInspection SwingUtilities.invokeLater(new Runnable() { public void run() { myFileSystemTree.getTree().requestFocus(); } }); } } }); } else { myTreeIsUpdating = false; setErrorText(null); } } private void reportFileNotFound() { myTreeIsUpdating = false; setErrorText(null); } @Override protected String getDimensionServiceKey() { return "FileChooserDialogImpl"; } @Override protected String getHelpId() { return "select.path.dialog"; } }
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.convention.daycount; import java.util.Arrays; import javax.time.calendar.LocalDate; import javax.time.calendar.LocalDateTime; import javax.time.calendar.TimeZone; import javax.time.calendar.ZonedDateTime; import org.apache.commons.lang.Validate; import com.opengamma.financial.convention.StubCalculator; import com.opengamma.financial.convention.StubType; import com.opengamma.financial.convention.calendar.Calendar; /** * Utility to calculate the accrued interest. */ public final class AccruedInterestCalculator { /** * Restricted constructor. */ private AccruedInterestCalculator() { } /** * Calculates the accrued interest for a {@code ZonedDateTime}. * * @param dayCount the day count convention, not null * @param settlementDate the settlement date, not null * @param nominalDates the nominalDates, not null, no null elements * @param coupon the coupon value * @param paymentsPerYear the number of payments per year, one, two, three, four, six or twelve * @param isEndOfMonthConvention whether to use end of month rules * @param exDividendDays the number of ex-dividend days * @param calendar The working day calendar to be used in calculating ex-dividend dates, not null * @return the accrued interest */ public static double getAccruedInterest(final DayCount dayCount, final ZonedDateTime settlementDate, final ZonedDateTime[] nominalDates, final double coupon, final int paymentsPerYear, final boolean isEndOfMonthConvention, final int exDividendDays, final Calendar calendar) { Validate.notNull(dayCount, "day-count"); Validate.notNull(settlementDate, "date"); Validate.noNullElements(nominalDates, "nominalDates"); Validate.notNull(calendar, "calendar"); Validate.isTrue(paymentsPerYear > 0); Validate.isTrue(exDividendDays >= 0); final int i = Arrays.binarySearch(nominalDates, settlementDate); if (i > 0) { return 0; } final int index = -i - 2; final int length = nominalDates.length; Validate.isTrue(index >= 0, "Settlement date is before first accrual date"); Validate.isTrue(index < length, "Settlement date is after maturity date"); final double accruedInterest = getAccruedInterest(dayCount, index, length, nominalDates[index], settlementDate, nominalDates[index + 1], coupon, paymentsPerYear, isEndOfMonthConvention); ZonedDateTime exDividendDate = nominalDates[index + 1]; for (int j = 0; j < exDividendDays; j++) { while (!calendar.isWorkingDay(exDividendDate.toLocalDate())) { exDividendDate = exDividendDate.minusDays(1); } exDividendDate = exDividendDate.minusDays(1); } if (exDividendDays != 0 && exDividendDate.isBefore(settlementDate)) { return accruedInterest - coupon; } return accruedInterest; } /** * Calculates the accrued interest for a {@code ZonedDateTime}. * * @param dayCount the day count convention, not null * @param settlementDate the settlement date, not null * @param nominalDates the nominalDates, not null, no null elements * @param coupon the coupon value * @param paymentsPerYear the number of payments per year, one, two, three, four, six or twelve * @param isEndOfMonthConvention whether to use end of month rules * @param exDividendDays the number of ex-dividend days * @param index The index of the previous coupon in the nominalDates array * @param calendar The working day calendar to be used in calculating ex-dividend dates, not null * @return the accrued interest */ public static double getAccruedInterest(final DayCount dayCount, final ZonedDateTime settlementDate, final ZonedDateTime[] nominalDates, final double coupon, final double paymentsPerYear, final boolean isEndOfMonthConvention, final int exDividendDays, final int index, final Calendar calendar) { Validate.notNull(dayCount, "day-count"); Validate.notNull(settlementDate, "date"); Validate.noNullElements(nominalDates, "nominalDates"); Validate.notNull(calendar, "calendar"); Validate.isTrue(paymentsPerYear > 0); Validate.isTrue(exDividendDays >= 0); final int length = nominalDates.length; Validate.isTrue(index >= 0 && index < length); final double accruedInterest = getAccruedInterest(dayCount, index, length, nominalDates[index], settlementDate, nominalDates[index + 1], coupon, paymentsPerYear, isEndOfMonthConvention); ZonedDateTime exDividendDate = nominalDates[index + 1]; for (int i = 0; i < exDividendDays; i++) { while (!calendar.isWorkingDay(exDividendDate.toLocalDate())) { exDividendDate = exDividendDate.minusDays(1); } exDividendDate = exDividendDate.minusDays(1); } if (exDividendDays != 0 && exDividendDate.isBefore(settlementDate)) { return accruedInterest - coupon; } return accruedInterest; } /** * Calculates the accrued interest for a {@code LocalDate}. * * @param dayCount the day count convention, not null * @param settlementDate the settlement date, not null * @param nominalDates the nominalDates, not null, no null elements * @param coupon the coupon value * @param paymentsPerYear the number of payments per year, one, two, three, four, six or twelve * @param isEndOfMonthConvention whether to use end of month rules * @param exDividendDays the number of ex-dividend days * @param calendar The working day calendar to be used in calculating ex-dividend dates, not null * @return the accrued interest */ //TODO one where you can pass in array of coupons public static double getAccruedInterest(final DayCount dayCount, final LocalDate settlementDate, final LocalDate[] nominalDates, final double coupon, final double paymentsPerYear, final boolean isEndOfMonthConvention, final int exDividendDays, final Calendar calendar) { Validate.notNull(dayCount, "day-count"); Validate.notNull(settlementDate, "date"); Validate.noNullElements(nominalDates, "nominalDates"); Validate.notNull(calendar, "calendar"); Validate.isTrue(paymentsPerYear > 0); Validate.isTrue(exDividendDays >= 0); final int i = Arrays.binarySearch(nominalDates, settlementDate); if (i > 0) { return 0; } final int index = -i - 2; final int length = nominalDates.length; if (index < 0) { throw new IllegalArgumentException("Settlement date is before first accrual date"); } if (index == length) { throw new IllegalArgumentException("Settlement date is after maturity date"); } final ZonedDateTime previousCouponDate = ZonedDateTime.of(LocalDateTime.ofMidnight(nominalDates[index]), TimeZone.UTC); final ZonedDateTime date = ZonedDateTime.of(LocalDateTime.ofMidnight(settlementDate), TimeZone.UTC); final ZonedDateTime nextCouponDate = ZonedDateTime.of(LocalDateTime.ofMidnight(nominalDates[index + 1]), TimeZone.UTC); final double accruedInterest = getAccruedInterest(dayCount, index, length, previousCouponDate, date, nextCouponDate, coupon, paymentsPerYear, isEndOfMonthConvention); LocalDate exDividendDate = nominalDates[index + 1]; for (int j = 0; j < exDividendDays; j++) { while (!calendar.isWorkingDay(exDividendDate)) { exDividendDate = exDividendDate.minusDays(1); } exDividendDate = exDividendDate.minusDays(1); } if (exDividendDays != 0 && exDividendDate.isBefore(settlementDate)) { return accruedInterest - coupon; } return accruedInterest; } /** * Calculates the accrued interest for a {@code LocalDate}. * * @param dayCount the day count convention, not null * @param settlementDate the settlement date, not null * @param nominalDates the nominalDates, not null, no null elements * @param coupon the coupon value * @param paymentsPerYear the number of payments per year, one, two, three, four, six or twelve * @param isEndOfMonthConvention whether to use end of month rules * @param exDividendDays the number of ex-dividend days * @param index The index of the previous coupon in the nominalDates * @param calendar The working day calendar to be used in calculating ex-dividend dates, not null * @return the accrued interest */ public static double getAccruedInterest(final DayCount dayCount, final LocalDate settlementDate, final LocalDate[] nominalDates, final double coupon, final double paymentsPerYear, final boolean isEndOfMonthConvention, final int exDividendDays, final int index, final Calendar calendar) { Validate.notNull(dayCount, "day-count"); Validate.notNull(settlementDate, "date"); Validate.noNullElements(nominalDates, "nominalDates"); Validate.notNull(calendar, "calendar"); Validate.isTrue(paymentsPerYear > 0); Validate.isTrue(exDividendDays >= 0); final int length = nominalDates.length; Validate.isTrue(index >= 0 && index < length); final ZonedDateTime previousCouponDate = ZonedDateTime.of(LocalDateTime.ofMidnight(nominalDates[index]), TimeZone.UTC); final ZonedDateTime date = ZonedDateTime.of(LocalDateTime.ofMidnight(settlementDate), TimeZone.UTC); final ZonedDateTime nextCouponDate = ZonedDateTime.of(LocalDateTime.ofMidnight(nominalDates[index + 1]), TimeZone.UTC); double accruedInterest; if (date.isAfter(nextCouponDate)) { accruedInterest = 0; } else { accruedInterest = getAccruedInterest(dayCount, index, length, previousCouponDate, date, nextCouponDate, coupon, paymentsPerYear, isEndOfMonthConvention); } LocalDate exDividendDate = nominalDates[index + 1]; for (int i = 0; i < exDividendDays; i++) { while (!calendar.isWorkingDay(exDividendDate)) { exDividendDate = exDividendDate.minusDays(1); } exDividendDate = exDividendDate.minusDays(1); } if (exDividendDays != 0 && exDividendDate.isBefore(settlementDate)) { return accruedInterest - coupon; } return accruedInterest; } /** * Calculates the accrued interest for a {@code LocalDate}. * * @param dayCount the day count convention, not null * @param settlementDate the settlement date, not null * @param nominalDates the nominalDates, not null, no null elements * @param settlementDates the settlement dates, not null, no null elements * @param coupon the coupon value * @param paymentsPerYear the number of payments per year, one, two, three, four, six or twelve * @param isEndOfMonthConvention whether to use end of month rules * @param exDividendDays the number of ex-dividend days * @param index The index of the previous coupon in the nominalDates * @param calendar The working day calendar used to calculate the ex-dividend date, not null * @return the accrued interest */ public static double getAccruedInterest(final DayCount dayCount, final LocalDate settlementDate, final LocalDate[] nominalDates, final LocalDate[] settlementDates, final double coupon, final double paymentsPerYear, final boolean isEndOfMonthConvention, final int exDividendDays, final int index, final Calendar calendar) { Validate.notNull(dayCount, "day-count"); Validate.notNull(settlementDate, "date"); Validate.notNull(calendar, "calendar"); Validate.noNullElements(nominalDates, "nominalDates"); Validate.noNullElements(settlementDates, "settlementDates"); Validate.isTrue(paymentsPerYear > 0); Validate.isTrue(exDividendDays >= 0); final int length = nominalDates.length; Validate.isTrue(index >= 0 && index < length); final LocalDate previousCouponDate = nominalDates[index]; final LocalDate nextCouponDate = nominalDates[index + 1]; double accruedInterest; if (settlementDate.isAfter(nextCouponDate)) { if (settlementDate.isBefore(settlementDates[index + 1])) { accruedInterest = coupon; } else { accruedInterest = 0; } } else { accruedInterest = getAccruedInterest(dayCount, index, length, previousCouponDate, settlementDate, nextCouponDate, coupon, paymentsPerYear, isEndOfMonthConvention); } LocalDate exDividendDate = nominalDates[index + 1]; for (int i = 0; i < exDividendDays; i++) { while (!calendar.isWorkingDay(exDividendDate)) { exDividendDate = exDividendDate.minusDays(1); } exDividendDate = exDividendDate.minusDays(1); } if (exDividendDays != 0 && exDividendDate.isBefore(settlementDate)) { return accruedInterest - coupon; } return accruedInterest; } public static double getAccruedInterest(final DayCount dayCount, final int index, final int length, final ZonedDateTime previousCouponDate, final ZonedDateTime date, final ZonedDateTime nextCouponDate, final double coupon, final double paymentsPerYear, final boolean isEndOfMonthConvention) { if (dayCount instanceof ActualActualICMANormal) { if (isEndOfMonthConvention) { throw new IllegalArgumentException("Inconsistent definition; asked for accrual with EOM convention but are not using Actual/Actual ICMA"); } final StubType stubType = getStubType(index, length, previousCouponDate, nextCouponDate, paymentsPerYear, isEndOfMonthConvention); return ((ActualActualICMANormal) dayCount).getAccruedInterest(previousCouponDate, date, nextCouponDate, coupon, paymentsPerYear, stubType); } else if (dayCount instanceof ActualActualICMA) { final StubType stubType = getStubType(index, length, previousCouponDate, nextCouponDate, paymentsPerYear, isEndOfMonthConvention); return ((ActualActualICMA) dayCount).getAccruedInterest(previousCouponDate, date, nextCouponDate, coupon, paymentsPerYear, stubType); } else if (dayCount instanceof ThirtyUThreeSixty) { return ((ThirtyUThreeSixty) dayCount).getAccruedInterest(previousCouponDate, date, coupon, isEndOfMonthConvention); } return dayCount.getAccruedInterest(previousCouponDate, date, nextCouponDate, coupon, paymentsPerYear); } public static double getAccruedInterest(final DayCount dayCount, final int index, final int length, final LocalDate previousCouponDate, final LocalDate date, final LocalDate nextCouponDate, final double coupon, final double paymentsPerYear, final boolean isEndOfMonthConvention) { if (dayCount instanceof ActualActualICMANormal) { if (isEndOfMonthConvention) { throw new IllegalArgumentException("Inconsistent definition; asked for accrual with EOM convention but are not using Actual/Actual ICMA"); } final StubType stubType = getStubType(index, length, previousCouponDate, nextCouponDate, paymentsPerYear, isEndOfMonthConvention); return ((ActualActualICMANormal) dayCount).getAccruedInterest(previousCouponDate, date, nextCouponDate, coupon, paymentsPerYear, stubType); } else if (dayCount instanceof ActualActualICMA) { final StubType stubType = getStubType(index, length, previousCouponDate, nextCouponDate, paymentsPerYear, isEndOfMonthConvention); return ((ActualActualICMA) dayCount).getAccruedInterest(previousCouponDate, date, nextCouponDate, coupon, paymentsPerYear, stubType); } else if (dayCount instanceof ThirtyUThreeSixty) { return ((ThirtyUThreeSixty) dayCount).getAccruedInterest(previousCouponDate, date, coupon, isEndOfMonthConvention); } return dayCount.getAccruedInterest(previousCouponDate, date, nextCouponDate, coupon, paymentsPerYear); } private static StubType getStubType(final int index, final int length, final ZonedDateTime previousCouponDate, final ZonedDateTime nextCouponDate, final double paymentsPerYear, final boolean isEndOfMonthConvention) { StubType stubType; if (index == 0) { stubType = StubCalculator.getStartStubType(new ZonedDateTime[] {previousCouponDate, nextCouponDate}, paymentsPerYear, isEndOfMonthConvention); } else if (index == length - 2) { stubType = StubCalculator.getEndStubType(new ZonedDateTime[] {previousCouponDate, nextCouponDate}, paymentsPerYear, isEndOfMonthConvention); } else { stubType = StubType.NONE; } return stubType; } private static StubType getStubType(final int index, final int length, final LocalDate previousCouponDate, final LocalDate nextCouponDate, final double paymentsPerYear, final boolean isEndOfMonthConvention) { StubType stubType; if (index == 0) { stubType = StubCalculator.getStartStubType(new LocalDate[] {previousCouponDate, nextCouponDate}, paymentsPerYear, isEndOfMonthConvention); } else if (index == length - 2) { stubType = StubCalculator.getEndStubType(new LocalDate[] {previousCouponDate, nextCouponDate}, paymentsPerYear, isEndOfMonthConvention); } else { stubType = StubType.NONE; } return stubType; } }
/* * Copyright (C) 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.dataflow.sdk.options; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory.JsonIgnorePredicate; import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory.Registration; import com.google.cloud.dataflow.sdk.options.ValueProvider.RuntimeValueProvider; import com.google.cloud.dataflow.sdk.options.ValueProvider.StaticValueProvider; import com.google.cloud.dataflow.sdk.transforms.display.DisplayData; import com.google.cloud.dataflow.sdk.transforms.display.HasDisplayData; import com.google.cloud.dataflow.sdk.util.InstanceBuilder; import com.google.cloud.dataflow.sdk.util.common.ReflectHelpers; import com.google.common.base.Defaults; import com.google.common.base.Function; import com.google.common.base.MoreObjects; import com.google.common.collect.ClassToInstanceMap; import com.google.common.collect.FluentIterable; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.MutableClassToInstanceMap; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.JsonSerializer; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.node.ObjectNode; import java.beans.PropertyDescriptor; import java.io.IOException; import java.lang.annotation.Annotation; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Proxy; import java.lang.reflect.Type; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import javax.annotation.Nullable; import javax.annotation.concurrent.ThreadSafe; /** * Represents and {@link InvocationHandler} for a {@link Proxy}. The invocation handler uses bean * introspection of the proxy class to store and retrieve values based off of the property name. * * <p>Unset properties use the {@code @Default} metadata on the getter to return values. If there * is no {@code @Default} annotation on the getter, then a <a * href="https://docs.oracle.com/javase/tutorial/java/nutsandbolts/datatypes.html">default</a> as * per the Java Language Specification for the expected return type is returned. * * <p>In addition to the getter/setter pairs, this proxy invocation handler supports * {@link Object#equals(Object)}, {@link Object#hashCode()}, {@link Object#toString()} and * {@link PipelineOptions#as(Class)}. */ @ThreadSafe class ProxyInvocationHandler implements InvocationHandler, HasDisplayData { private static final ObjectMapper MAPPER = new ObjectMapper(); /** * No two instances of this class are considered equivalent hence we generate a random hash code * between 0 and {@link Integer#MAX_VALUE}. */ private final int hashCode = (int) (Math.random() * Integer.MAX_VALUE); private final Set<Class<? extends PipelineOptions>> knownInterfaces; private final ClassToInstanceMap<PipelineOptions> interfaceToProxyCache; private final Map<String, BoundValue> options; private final Map<String, JsonNode> jsonOptions; private final Map<String, String> gettersToPropertyNames; private final Map<String, String> settersToPropertyNames; ProxyInvocationHandler(Map<String, Object> options) { this(bindOptions(options), Maps.<String, JsonNode>newHashMap()); } private static Map<String, BoundValue> bindOptions(Map<String, Object> inputOptions) { HashMap<String, BoundValue> options = Maps.newHashMap(); for (Map.Entry<String, Object> entry : inputOptions.entrySet()) { options.put(entry.getKey(), BoundValue.fromExplicitOption(entry.getValue())); } return options; } private ProxyInvocationHandler( Map<String, BoundValue> options, Map<String, JsonNode> jsonOptions) { this.options = options; this.jsonOptions = jsonOptions; this.knownInterfaces = new HashSet<>(PipelineOptionsFactory.getRegisteredOptions()); gettersToPropertyNames = Maps.newHashMap(); settersToPropertyNames = Maps.newHashMap(); interfaceToProxyCache = MutableClassToInstanceMap.create(); } @Override public Object invoke(Object proxy, Method method, Object[] args) { if (args == null && "toString".equals(method.getName())) { return toString(); } else if (args != null && args.length == 1 && "equals".equals(method.getName())) { return equals(args[0]); } else if (args == null && "hashCode".equals(method.getName())) { return hashCode(); } else if (args == null && "outputRuntimeOptions".equals(method.getName())) { return outputRuntimeOptions((PipelineOptions) proxy); } else if (args != null && "as".equals(method.getName()) && args[0] instanceof Class) { @SuppressWarnings("unchecked") Class<? extends PipelineOptions> clazz = (Class<? extends PipelineOptions>) args[0]; return as(clazz); } else if (args != null && "cloneAs".equals(method.getName()) && args[0] instanceof Class) { @SuppressWarnings("unchecked") Class<? extends PipelineOptions> clazz = (Class<? extends PipelineOptions>) args[0]; return cloneAs(proxy, clazz); } else if (args != null && "populateDisplayData".equals(method.getName()) && args[0] instanceof DisplayData.Builder) { @SuppressWarnings("unchecked") DisplayData.Builder builder = (DisplayData.Builder) args[0]; // Explicitly set display data namespace so thrown exceptions will have sensible type. builder.include(this, PipelineOptions.class); return Void.TYPE; } String methodName = method.getName(); synchronized (this) { if (gettersToPropertyNames.containsKey(methodName)) { String propertyName = gettersToPropertyNames.get(methodName); if (!options.containsKey(propertyName)) { // Lazy bind the default to the method. Object value = jsonOptions.containsKey(propertyName) ? getValueFromJson(propertyName, method) : getDefault((PipelineOptions) proxy, method); options.put(propertyName, BoundValue.fromDefault(value)); } return options.get(propertyName).getValue(); } else if (settersToPropertyNames.containsKey(methodName)) { options.put(settersToPropertyNames.get(methodName), BoundValue.fromExplicitOption(args[0])); return Void.TYPE; } } throw new RuntimeException("Unknown method [" + method + "] invoked with args [" + Arrays.toString(args) + "]."); } /** * Track whether options values are explicitly set, or retrieved from defaults. */ static final class BoundValue { @Nullable private final Object value; private final boolean isDefault; @Nullable Object getValue() { return value; } boolean isDefault() { return isDefault; } private static BoundValue of(@Nullable Object value, boolean isDefault) { return new BoundValue(value, isDefault); } private BoundValue(@Nullable Object value, boolean isDefault) { this.value = value; this.isDefault = isDefault; } /** * Create a {@link BoundValue} representing an explicitly set option. */ static BoundValue fromExplicitOption(@Nullable Object value) { return BoundValue.of(value, false); } /** * Create a {@link BoundValue} representing a default option value. */ static BoundValue fromDefault(@Nullable Object value) { return BoundValue.of(value, true); } @Override public String toString() { return MoreObjects.toStringHelper(BoundValue.class) .add("value", value) .add("isDefault", isDefault) .toString(); } @Override public int hashCode() { return Objects.hash(value, isDefault); } @Override public boolean equals(Object obj) { if (!(obj instanceof BoundValue)) { return false; } BoundValue that = (BoundValue) obj; return Objects.equals(this.value, that.value) && Objects.equals(this.isDefault, that.isDefault); } } /** * Backing implementation for {@link PipelineOptions#as(Class)}. * * @param iface The interface that the returned object needs to implement. * @return An object that implements the interface {@code <T>}. */ synchronized <T extends PipelineOptions> T as(Class<T> iface) { checkNotNull(iface); checkArgument(iface.isInterface(), "Not an interface: %s", iface); if (!interfaceToProxyCache.containsKey(iface)) { Registration<T> registration = PipelineOptionsFactory.validateWellFormed(iface, knownInterfaces); List<PropertyDescriptor> propertyDescriptors = registration.getPropertyDescriptors(); Class<T> proxyClass = registration.getProxyClass(); gettersToPropertyNames.putAll(generateGettersToPropertyNames(propertyDescriptors)); settersToPropertyNames.putAll(generateSettersToPropertyNames(propertyDescriptors)); knownInterfaces.add(iface); interfaceToProxyCache.putInstance(iface, InstanceBuilder.ofType(proxyClass) .fromClass(proxyClass) .withArg(InvocationHandler.class, this) .build()); } return interfaceToProxyCache.getInstance(iface); } /** * Backing implementation for {@link PipelineOptions#cloneAs(Class)}. * * @return A copy of the PipelineOptions. */ synchronized <T extends PipelineOptions> T cloneAs(Object proxy, Class<T> iface) { PipelineOptions clonedOptions; try { clonedOptions = MAPPER.readValue(MAPPER.writeValueAsBytes(proxy), PipelineOptions.class); } catch (IOException e) { throw new IllegalStateException("Failed to serialize the pipeline options to JSON.", e); } for (Class<? extends PipelineOptions> knownIface : knownInterfaces) { clonedOptions.as(knownIface); } return clonedOptions.as(iface); } /** * Returns true if the other object is a ProxyInvocationHandler or is a Proxy object and has the * same ProxyInvocationHandler as this. * * @param obj The object to compare against this. * @return true iff the other object is a ProxyInvocationHandler or is a Proxy object and has the * same ProxyInvocationHandler as this. */ @Override public boolean equals(Object obj) { return obj != null && ((obj instanceof ProxyInvocationHandler && this == obj) || (Proxy.isProxyClass(obj.getClass()) && this == Proxy.getInvocationHandler(obj))); } /** * Each instance of this ProxyInvocationHandler is unique and has a random hash code. * * @return A hash code that was generated randomly. */ @Override public int hashCode() { return hashCode; } /** * Returns a map of properties which correspond to {@link RuntimeValueProvider}. */ public Map<String, Map<String, Object>> outputRuntimeOptions(PipelineOptions options) { Set<PipelineOptionSpec> optionSpecs = PipelineOptionsReflector.getOptionSpecs(knownInterfaces); Map<String, Map<String, Object>> properties = Maps.newHashMap(); for (PipelineOptionSpec spec : optionSpecs) { if (spec.getGetterMethod().getReturnType().equals(ValueProvider.class)) { Object vp = invoke(options, spec.getGetterMethod(), null); if (((ValueProvider) vp).isAccessible()) { continue; } Map<String, Object> property = Maps.newHashMap(); property.put("type", ((ParameterizedType) spec.getGetterMethod() .getGenericReturnType()).getActualTypeArguments()[0]); properties.put(spec.getName(), property); } } return properties; } /** * Populate display data. See {@link HasDisplayData#populateDisplayData}. All explicitly set * pipeline options will be added as display data. */ public void populateDisplayData(DisplayData.Builder builder) { Set<PipelineOptionSpec> optionSpecs = PipelineOptionsReflector.getOptionSpecs(knownInterfaces); Multimap<String, PipelineOptionSpec> optionsMap = buildOptionNameToSpecMap(optionSpecs); for (Map.Entry<String, BoundValue> option : options.entrySet()) { BoundValue boundValue = option.getValue(); if (boundValue.isDefault()) { continue; } Object value = boundValue.getValue() == null ? "" : boundValue.getValue(); DisplayData.Type type = DisplayData.inferType(value); HashSet<PipelineOptionSpec> specs = new HashSet<>(optionsMap.get(option.getKey())); for (PipelineOptionSpec optionSpec : specs) { if (!optionSpec.shouldSerialize()) { // Options that are excluded for serialization (i.e. those with @JsonIgnore) are also // excluded from display data. These options are generally not useful for display. continue; } Class<?> pipelineInterface = optionSpec.getDefiningInterface(); if (type != null) { builder.add(DisplayData.item(option.getKey(), type, value) .withNamespace(pipelineInterface)); } else { builder.add(DisplayData.item(option.getKey(), displayDataString(value)) .withNamespace(pipelineInterface)); } } } for (Map.Entry<String, JsonNode> jsonOption : jsonOptions.entrySet()) { if (options.containsKey(jsonOption.getKey())) { // Option overwritten since deserialization; don't re-write continue; } HashSet<PipelineOptionSpec> specs = new HashSet<>(optionsMap.get(jsonOption.getKey())); if (specs.isEmpty()) { builder.add(DisplayData.item(jsonOption.getKey(), jsonOption.getValue().toString()) .withNamespace(UnknownPipelineOptions.class)); } else { for (PipelineOptionSpec spec : specs) { if (!spec.shouldSerialize()) { continue; } Object value = getValueFromJson(jsonOption.getKey(), spec.getGetterMethod()); value = value == null ? "" : value; DisplayData.Type type = DisplayData.inferType(value); if (type != null) { builder.add(DisplayData.item(jsonOption.getKey(), type, value) .withNamespace(spec.getDefiningInterface())); } else { builder.add(DisplayData.item(jsonOption.getKey(), displayDataString(value)) .withNamespace(spec.getDefiningInterface())); } } } } } /** * {@link Object#toString()} wrapper to extract display data values for various types. */ private String displayDataString(Object value) { checkNotNull(value, "value cannot be null"); if (!value.getClass().isArray()) { return value.toString(); } if (!value.getClass().getComponentType().isPrimitive()) { return Arrays.deepToString((Object[]) value); } // At this point, we have some type of primitive array. Arrays.deepToString(..) requires an // Object array, but will unwrap nested primitive arrays. String wrapped = Arrays.deepToString(new Object[]{value}); return wrapped.substring(1, wrapped.length() - 1); } /** * Marker interface used when the original {@link PipelineOptions} interface is not known at * runtime. This can occur if {@link PipelineOptions} are deserialized from JSON. * * <p>Pipeline authors can ensure {@link PipelineOptions} type information is available at * runtime by registering their {@link PipelineOptions options} interfaces. See the "Registration" * section of {@link PipelineOptions} documentation. */ interface UnknownPipelineOptions extends PipelineOptions {} /** * Construct a mapping from an option name to its {@link PipelineOptions} interface(s) * declarations. An option may be declared in multiple interfaces. If it is overridden in a * type hierarchy, only the overriding interface will be included. */ private Multimap<String, PipelineOptionSpec> buildOptionNameToSpecMap( Set<PipelineOptionSpec> props) { Multimap<String, PipelineOptionSpec> optionsMap = HashMultimap.create(); for (PipelineOptionSpec prop : props) { optionsMap.put(prop.getName(), prop); } // Filter out overridden options for (Map.Entry<String, Collection<PipelineOptionSpec>> entry : optionsMap.asMap().entrySet()) { /* Compare all interfaces for an option pairwise (iface1, iface2) to look for type hierarchies. If one is the base-class of the other, remove it from the output and continue iterating. This is an N^2 operation per-option, but the number of interfaces defining an option should always be small (usually 1). */ List<PipelineOptionSpec> specs = Lists.newArrayList(entry.getValue()); if (specs.size() < 2) { // Only one known implementing interface, no need to check for inheritance continue; } for (int i = 0; i < specs.size() - 1; i++) { Class<?> iface1 = specs.get(i).getDefiningInterface(); for (int j = i + 1; j < specs.size(); j++) { Class<?> iface2 = specs.get(j).getDefiningInterface(); if (iface1.isAssignableFrom(iface2)) { optionsMap.remove(entry.getKey(), specs.get(i)); specs.remove(i); // Removed element at current "i" index. Set iterators to re-evaluate // new "i" element in outer loop. i--; j = specs.size(); } else if (iface2.isAssignableFrom(iface1)) { optionsMap.remove(entry.getKey(), specs.get(j)); specs.remove(j); // Removed element at current "j" index. Set iterator to re-evaluate // new "j" element in inner-loop. j--; } } } } return optionsMap; } /** * This will output all the currently set values. This is a relatively costly function * as it will call {@code toString()} on each object that has been set and format * the results in a readable format. * * @return A pretty printed string representation of this. */ @Override public synchronized String toString() { SortedMap<String, Object> sortedOptions = new TreeMap<>(); // Add the options that we received from deserialization sortedOptions.putAll(jsonOptions); // Override with any programmatically set options. for (Map.Entry<String, BoundValue> entry : options.entrySet()) { sortedOptions.put(entry.getKey(), entry.getValue().getValue()); } StringBuilder b = new StringBuilder(); b.append("Current Settings:\n"); for (Map.Entry<String, Object> entry : sortedOptions.entrySet()) { b.append(" " + entry.getKey() + ": " + entry.getValue() + "\n"); } return b.toString(); } /** * Uses a Jackson {@link ObjectMapper} to attempt type conversion. * * @param method The method whose return type you would like to return. * @param propertyName The name of the property that is being returned. * @return An object matching the return type of the method passed in. */ private Object getValueFromJson(String propertyName, Method method) { try { JavaType type = MAPPER.getTypeFactory().constructType(method.getGenericReturnType()); JsonNode jsonNode = jsonOptions.get(propertyName); return MAPPER.readValue(jsonNode.toString(), type); } catch (IOException e) { throw new RuntimeException("Unable to parse representation", e); } } /** * Returns a default value for the method based upon {@code @Default} metadata on the getter * to return values. If there is no {@code @Default} annotation on the getter, then a <a * href="https://docs.oracle.com/javase/tutorial/java/nutsandbolts/datatypes.html">default</a> as * per the Java Language Specification for the expected return type is returned. * * @param proxy The proxy object for which we are attempting to get the default. * @param method The getter method that was invoked. * @return The default value from an {@link Default} annotation if present, otherwise a default * value as per the Java Language Specification. */ @SuppressWarnings({"unchecked", "rawtypes"}) private Object getDefault(PipelineOptions proxy, Method method) { if (method.getReturnType().equals(RuntimeValueProvider.class)) { throw new RuntimeException(String.format( "Method %s should not have return type " + "RuntimeValueProvider, use ValueProvider instead.", method.getName())); } if (method.getReturnType().equals(StaticValueProvider.class)) { throw new RuntimeException(String.format( "Method %s should not have return type " + "StaticValueProvider, use ValueProvider instead.", method.getName())); } @Nullable Object defaultObject = null; for (Annotation annotation : method.getAnnotations()) { defaultObject = returnDefaultHelper(annotation, proxy, method); if (defaultObject != null) { break; } } if (method.getReturnType().equals(ValueProvider.class)) { String propertyName = gettersToPropertyNames.get(method.getName()); return defaultObject == null ? new RuntimeValueProvider( method.getName(), propertyName, (Class<? extends PipelineOptions>) method.getDeclaringClass(), proxy.getOptionsId()) : new RuntimeValueProvider( method.getName(), propertyName, (Class<? extends PipelineOptions>) method.getDeclaringClass(), defaultObject, proxy.getOptionsId()); } else if (defaultObject != null) { return defaultObject; } /* * We need to make sure that we return something appropriate for the return type. Thus we return * a default value as defined by the JLS. */ return Defaults.defaultValue(method.getReturnType()); } /** * Helper method to return standard Default cases. */ @Nullable private Object returnDefaultHelper( Annotation annotation, PipelineOptions proxy, Method method) { if (annotation instanceof Default.Class) { return ((Default.Class) annotation).value(); } else if (annotation instanceof Default.String) { return ((Default.String) annotation).value(); } else if (annotation instanceof Default.Boolean) { return ((Default.Boolean) annotation).value(); } else if (annotation instanceof Default.Character) { return ((Default.Character) annotation).value(); } else if (annotation instanceof Default.Byte) { return ((Default.Byte) annotation).value(); } else if (annotation instanceof Default.Short) { return ((Default.Short) annotation).value(); } else if (annotation instanceof Default.Integer) { return ((Default.Integer) annotation).value(); } else if (annotation instanceof Default.Long) { return ((Default.Long) annotation).value(); } else if (annotation instanceof Default.Float) { return ((Default.Float) annotation).value(); } else if (annotation instanceof Default.Double) { return ((Default.Double) annotation).value(); } else if (annotation instanceof Default.Enum) { return Enum.valueOf((Class<Enum>) method.getReturnType(), ((Default.Enum) annotation).value()); } else if (annotation instanceof Default.InstanceFactory) { return InstanceBuilder.ofType(((Default.InstanceFactory) annotation).value()) .build() .create(proxy); } return null; } /** * Returns a map from the getters method name to the name of the property based upon the passed in * {@link PropertyDescriptor}s property descriptors. * * @param propertyDescriptors A list of {@link PropertyDescriptor}s to use when generating the * map. * @return A map of getter method name to property name. */ private static Map<String, String> generateGettersToPropertyNames( List<PropertyDescriptor> propertyDescriptors) { ImmutableMap.Builder<String, String> builder = ImmutableMap.builder(); for (PropertyDescriptor descriptor : propertyDescriptors) { if (descriptor.getReadMethod() != null) { builder.put(descriptor.getReadMethod().getName(), descriptor.getName()); } } return builder.build(); } /** * Returns a map from the setters method name to its matching getters method name based upon the * passed in {@link PropertyDescriptor}s property descriptors. * * @param propertyDescriptors A list of {@link PropertyDescriptor}s to use when generating the * map. * @return A map of setter method name to getter method name. */ private static Map<String, String> generateSettersToPropertyNames( List<PropertyDescriptor> propertyDescriptors) { ImmutableMap.Builder<String, String> builder = ImmutableMap.builder(); for (PropertyDescriptor descriptor : propertyDescriptors) { if (descriptor.getWriteMethod() != null) { builder.put(descriptor.getWriteMethod().getName(), descriptor.getName()); } } return builder.build(); } static class Serializer extends JsonSerializer<PipelineOptions> { @Override public void serialize(PipelineOptions value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException { ProxyInvocationHandler handler = (ProxyInvocationHandler) Proxy.getInvocationHandler(value); synchronized (handler) { // We first filter out any properties that have been modified since // the last serialization of this PipelineOptions and then verify that // they are all serializable. Map<String, BoundValue> filteredOptions = Maps.newHashMap(handler.options); removeIgnoredOptions(handler.knownInterfaces, filteredOptions); ensureSerializable(handler.knownInterfaces, filteredOptions); // Now we create the map of serializable options by taking the original // set of serialized options (if any) and updating them with any properties // instances that have been modified since the previous serialization. Map<String, Object> serializableOptions = Maps.<String, Object>newHashMap(handler.jsonOptions); for (Map.Entry<String, BoundValue> entry : filteredOptions.entrySet()) { serializableOptions.put(entry.getKey(), entry.getValue().getValue()); } jgen.writeStartObject(); jgen.writeFieldName("options"); jgen.writeObject(serializableOptions); List<Map<String, Object>> serializedDisplayData = Lists.newArrayList(); DisplayData displayData = DisplayData.from(value); for (DisplayData.Item<?> item : displayData.items()) { @SuppressWarnings("unchecked") Map<String, Object> serializedItem = MAPPER.convertValue(item, Map.class); serializedDisplayData.add(serializedItem); } jgen.writeFieldName("display_data"); jgen.writeObject(serializedDisplayData); jgen.writeEndObject(); } } /** * We remove all properties within the passed in options where there getter is annotated with * {@link JsonIgnore @JsonIgnore} from the passed in options using the passed in interfaces. */ private void removeIgnoredOptions( Set<Class<? extends PipelineOptions>> interfaces, Map<String, ?> options) { // Find all the method names that are annotated with JSON ignore. Set<String> jsonIgnoreMethodNames = FluentIterable.from( ReflectHelpers.getClosureOfMethodsOnInterfaces(interfaces)) .filter(JsonIgnorePredicate.INSTANCE).transform(new Function<Method, String>() { @Override public String apply(Method input) { return input.getName(); } }).toSet(); // Remove all options that have the same method name as the descriptor. for (PropertyDescriptor descriptor : PipelineOptionsFactory.getPropertyDescriptors(interfaces)) { if (jsonIgnoreMethodNames.contains(descriptor.getReadMethod().getName())) { options.remove(descriptor.getName()); } } } /** * We use an {@link ObjectMapper} to verify that the passed in options are serializable * and deserializable. */ private void ensureSerializable(Set<Class<? extends PipelineOptions>> interfaces, Map<String, BoundValue> options) throws IOException { // Construct a map from property name to the return type of the getter. Map<String, Type> propertyToReturnType = Maps.newHashMap(); for (PropertyDescriptor descriptor : PipelineOptionsFactory.getPropertyDescriptors(interfaces)) { if (descriptor.getReadMethod() != null) { propertyToReturnType.put(descriptor.getName(), descriptor.getReadMethod().getGenericReturnType()); } } // Attempt to serialize and deserialize each property. for (Map.Entry<String, BoundValue> entry : options.entrySet()) { try { String serializedValue = MAPPER.writeValueAsString(entry.getValue().getValue()); JavaType type = MAPPER.getTypeFactory() .constructType(propertyToReturnType.get(entry.getKey())); MAPPER.readValue(serializedValue, type); } catch (Exception e) { throw new IOException(String.format( "Failed to serialize and deserialize property '%s' with value '%s'", entry.getKey(), entry.getValue().getValue()), e); } } } } static class Deserializer extends JsonDeserializer<PipelineOptions> { @Override public PipelineOptions deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { ObjectNode objectNode = (ObjectNode) jp.readValueAsTree(); ObjectNode optionsNode = (ObjectNode) objectNode.get("options"); Map<String, JsonNode> fields = Maps.newHashMap(); for (Iterator<Map.Entry<String, JsonNode>> iterator = optionsNode.fields(); iterator.hasNext(); ) { Map.Entry<String, JsonNode> field = iterator.next(); fields.put(field.getKey(), field.getValue()); } PipelineOptions options = new ProxyInvocationHandler(Maps.<String, BoundValue>newHashMap(), fields) .as(PipelineOptions.class); ValueProvider.RuntimeValueProvider.setRuntimeOptions(options); return options; } } }
package com.usst.app.store.advertise.action; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import com.usst.app.component.file.service.FileUploadService; import com.usst.app.component.serialNumber.service.SerialNumberService; import com.usst.app.good.goodType.model.GoodType; import com.usst.app.good.goodType.service.GoodTypeService; import com.usst.app.store.advertise.model.Advertise; import com.usst.app.store.advertise.service.AdvertiseService; import com.usst.app.system.user.model.SysUser; import com.usst.code.struct.BaseAction; import com.usst.code.util.PageInfo; public class AdvertiseAction extends BaseAction { private static final long serialVersionUID = -1299499640055184711L; private static final Logger logger = Logger.getLogger(AdvertiseAction.class); private AdvertiseService advertiseService; private Advertise advertise; private String imgIdStr; private FileUploadService fileUploadService; private SerialNumberService serialNumberService; private List<GoodType> goodTypeList; private GoodTypeService goodTypeService; private GoodType goodType; private List<GoodType> goodTypeList1; public String listJson() { logger.info("start list advertise!"); List<Advertise> resultList = null; int totalRows = 0; try { PageInfo pageInfo = createPageInfo(); if (this.advertise == null) { this.advertise = new Advertise(); } resultList = this.advertiseService.pageList(pageInfo, this.advertise, true); totalRows = pageInfo.getCount(); } catch (Exception e) { logger.error("error occur when list advertise!", e); } if (resultList == null) { resultList = new ArrayList(); } this.jsonMap = new HashMap(); this.jsonMap.put("total", Integer.valueOf(totalRows)); this.jsonMap.put("rows", resultList); logger.info("finish list all data!"); return "success"; } public String list() { logger.info("start to query Advertise information"); return "list_advertise"; } public String edit() throws Exception { logger.info("start to query Advertise information"); if (this.goodType == null) { this.goodType = new GoodType(); } GoodType goodtype = new GoodType(); goodtype.setLevel(Integer.valueOf(1)); this.goodTypeList = this.goodTypeService.queryTypeTree(goodtype); SysUser loginMan = getSessionUserInfo(); if (this.advertise == null) { this.advertise = new Advertise(); } if (StringUtils.isBlank(this.advertise.getId())) { this.advertise.setState("c"); initModel(true, this.advertise, loginMan); } else { this.advertise = ((Advertise) this.advertiseService.getModel(this.advertise.getId())); initModel(false, this.advertise, loginMan); } return "edit_advertise"; } public void save() { logger.info("start to update advertise information"); HttpServletRequest request = getRequest(); String picId = request.getParameter("picId"); try { if (this.advertise == null) { this.advertise = new Advertise(); this.advertise.setPicId(picId); } else { this.advertise.setPicId(picId); } if (StringUtils.isBlank(this.advertise.getId())) { this.advertise.setId(this.advertiseService.makeId()); this.advertise.setCode(this.serialNumberService.getSerialNumberByDate("AD", "advertise")); this.advertise.setCreateTime(new Date()); this.advertise.setModifyTime(new Date()); this.advertiseService.insertAdvertise(this.advertise); } else { this.advertise.setModifyTime(new Date()); this.advertiseService.update(this.advertise); if (StringUtils.isNotBlank(picId)) { this.fileUploadService.updateAppId(picId, this.advertise.getId()); } } if (StringUtils.isNotBlank(this.imgIdStr)) { String[] imgIdArr = this.imgIdStr.split(","); for (int i = 0; i < imgIdArr.length; i++) { this.fileUploadService.updateAppId(imgIdArr[i], this.advertise.getId()); } if (StringUtils.isNotBlank(picId)) { this.fileUploadService.updateAppId(picId, this.advertise.getPicId()); } } responseFlag(true); } catch (Exception e) { logger.info("error occur when save advertise information"); e.printStackTrace(); responseFlag(false); } logger.info("finish to save advertise information"); } public void delete() { logger.info("start delete a good:" + this.advertise.getId()); try { this.fileUploadService.cleanAppId(this.advertise.getId()); this.advertiseService.delete(this.advertise.getId()); responseFlag(true); } catch (Exception e) { responseFlag(false); logger.error("error occur when delete a advertise!", e); } } public AdvertiseService getAdvertiseservice() { return this.advertiseService; } public void setAdvertiseservice(AdvertiseService advertiseservice) { this.advertiseService = advertiseservice; } public Advertise getAdvertise() { return this.advertise; } public void setAdvertise(Advertise advertise) { this.advertise = advertise; } public static long getSerialVersionUID() { return -1299499640055184711L; } public static Logger getLogger() { return logger; } public String getImgIdStr() { return this.imgIdStr; } public void setImgIdStr(String imgIdStr) { this.imgIdStr = imgIdStr; } public void setAdvertiseService(AdvertiseService advertiseService) { this.advertiseService = advertiseService; } public SerialNumberService getSerialNumberService() { return this.serialNumberService; } public void setSerialNumberService(SerialNumberService serialNumberService) { this.serialNumberService = serialNumberService; } public List<GoodType> getGoodTypeList() { return this.goodTypeList; } public void setGoodTypeList(List<GoodType> goodTypeList) { this.goodTypeList = goodTypeList; } public GoodTypeService getGoodTypeService() { return this.goodTypeService; } public void setGoodTypeService(GoodTypeService goodTypeService) { this.goodTypeService = goodTypeService; } public GoodType getGoodType() { return this.goodType; } public void setGoodType(GoodType goodType) { this.goodType = goodType; } public List<GoodType> getGoodTypeList1() { return this.goodTypeList1; } public void setGoodTypeList1(List<GoodType> goodTypeList1) { this.goodTypeList1 = goodTypeList1; } public AdvertiseService getAdvertiseService() { return this.advertiseService; } public FileUploadService getFileUploadService() { return this.fileUploadService; } public void setFileUploadService(FileUploadService fileUploadService) { this.fileUploadService = fileUploadService; } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.compute.model; /** * Represents a Target SSL Proxy resource. A target SSL proxy is a component of a SSL Proxy load * balancer. Global forwarding rules reference a target SSL proxy, and the target proxy then * references an external backend service. For more information, read Using Target Proxies. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class TargetSslProxy extends com.google.api.client.json.GenericJson { /** * URL of a certificate map that identifies a certificate map associated with the given target * proxy. This field can only be set for global target proxies. If set, sslCertificates will be * ignored. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String certificateMap; /** * [Output Only] Creation timestamp in RFC3339 text format. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String creationTimestamp; /** * An optional description of this resource. Provide this property when you create the resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String description; /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.math.BigInteger id; /** * [Output Only] Type of the resource. Always compute#targetSslProxy for target SSL proxies. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * Name of the resource. Provided by the client when the resource is created. The name must be * 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first * character must be a lowercase letter, and all following characters must be a dash, lowercase * letter, or digit, except the last character, which cannot be a dash. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * Specifies the type of proxy header to append before sending data to the backend, either NONE or * PROXY_V1. The default is NONE. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String proxyHeader; /** * [Output Only] Server-defined URL for the resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String selfLink; /** * URL to the BackendService resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String service; /** * URLs to SslCertificate resources that are used to authenticate connections to Backends. At * least one SSL certificate must be specified. Currently, you may specify up to 15 SSL * certificates. sslCertificates do not apply when the load balancing scheme is set to * INTERNAL_SELF_MANAGED. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> sslCertificates; /** * URL of SslPolicy resource that will be associated with the TargetSslProxy resource. If not set, * the TargetSslProxy resource will not have any SSL policy configured. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sslPolicy; /** * URL of a certificate map that identifies a certificate map associated with the given target * proxy. This field can only be set for global target proxies. If set, sslCertificates will be * ignored. * @return value or {@code null} for none */ public java.lang.String getCertificateMap() { return certificateMap; } /** * URL of a certificate map that identifies a certificate map associated with the given target * proxy. This field can only be set for global target proxies. If set, sslCertificates will be * ignored. * @param certificateMap certificateMap or {@code null} for none */ public TargetSslProxy setCertificateMap(java.lang.String certificateMap) { this.certificateMap = certificateMap; return this; } /** * [Output Only] Creation timestamp in RFC3339 text format. * @return value or {@code null} for none */ public java.lang.String getCreationTimestamp() { return creationTimestamp; } /** * [Output Only] Creation timestamp in RFC3339 text format. * @param creationTimestamp creationTimestamp or {@code null} for none */ public TargetSslProxy setCreationTimestamp(java.lang.String creationTimestamp) { this.creationTimestamp = creationTimestamp; return this; } /** * An optional description of this resource. Provide this property when you create the resource. * @return value or {@code null} for none */ public java.lang.String getDescription() { return description; } /** * An optional description of this resource. Provide this property when you create the resource. * @param description description or {@code null} for none */ public TargetSslProxy setDescription(java.lang.String description) { this.description = description; return this; } /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * @return value or {@code null} for none */ public java.math.BigInteger getId() { return id; } /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * @param id id or {@code null} for none */ public TargetSslProxy setId(java.math.BigInteger id) { this.id = id; return this; } /** * [Output Only] Type of the resource. Always compute#targetSslProxy for target SSL proxies. * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * [Output Only] Type of the resource. Always compute#targetSslProxy for target SSL proxies. * @param kind kind or {@code null} for none */ public TargetSslProxy setKind(java.lang.String kind) { this.kind = kind; return this; } /** * Name of the resource. Provided by the client when the resource is created. The name must be * 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first * character must be a lowercase letter, and all following characters must be a dash, lowercase * letter, or digit, except the last character, which cannot be a dash. * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * Name of the resource. Provided by the client when the resource is created. The name must be * 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first * character must be a lowercase letter, and all following characters must be a dash, lowercase * letter, or digit, except the last character, which cannot be a dash. * @param name name or {@code null} for none */ public TargetSslProxy setName(java.lang.String name) { this.name = name; return this; } /** * Specifies the type of proxy header to append before sending data to the backend, either NONE or * PROXY_V1. The default is NONE. * @return value or {@code null} for none */ public java.lang.String getProxyHeader() { return proxyHeader; } /** * Specifies the type of proxy header to append before sending data to the backend, either NONE or * PROXY_V1. The default is NONE. * @param proxyHeader proxyHeader or {@code null} for none */ public TargetSslProxy setProxyHeader(java.lang.String proxyHeader) { this.proxyHeader = proxyHeader; return this; } /** * [Output Only] Server-defined URL for the resource. * @return value or {@code null} for none */ public java.lang.String getSelfLink() { return selfLink; } /** * [Output Only] Server-defined URL for the resource. * @param selfLink selfLink or {@code null} for none */ public TargetSslProxy setSelfLink(java.lang.String selfLink) { this.selfLink = selfLink; return this; } /** * URL to the BackendService resource. * @return value or {@code null} for none */ public java.lang.String getService() { return service; } /** * URL to the BackendService resource. * @param service service or {@code null} for none */ public TargetSslProxy setService(java.lang.String service) { this.service = service; return this; } /** * URLs to SslCertificate resources that are used to authenticate connections to Backends. At * least one SSL certificate must be specified. Currently, you may specify up to 15 SSL * certificates. sslCertificates do not apply when the load balancing scheme is set to * INTERNAL_SELF_MANAGED. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getSslCertificates() { return sslCertificates; } /** * URLs to SslCertificate resources that are used to authenticate connections to Backends. At * least one SSL certificate must be specified. Currently, you may specify up to 15 SSL * certificates. sslCertificates do not apply when the load balancing scheme is set to * INTERNAL_SELF_MANAGED. * @param sslCertificates sslCertificates or {@code null} for none */ public TargetSslProxy setSslCertificates(java.util.List<java.lang.String> sslCertificates) { this.sslCertificates = sslCertificates; return this; } /** * URL of SslPolicy resource that will be associated with the TargetSslProxy resource. If not set, * the TargetSslProxy resource will not have any SSL policy configured. * @return value or {@code null} for none */ public java.lang.String getSslPolicy() { return sslPolicy; } /** * URL of SslPolicy resource that will be associated with the TargetSslProxy resource. If not set, * the TargetSslProxy resource will not have any SSL policy configured. * @param sslPolicy sslPolicy or {@code null} for none */ public TargetSslProxy setSslPolicy(java.lang.String sslPolicy) { this.sslPolicy = sslPolicy; return this; } @Override public TargetSslProxy set(String fieldName, Object value) { return (TargetSslProxy) super.set(fieldName, value); } @Override public TargetSslProxy clone() { return (TargetSslProxy) super.clone(); } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.stepsmetrics; import java.util.List; import org.pentaho.di.core.CheckResult; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaInteger; import org.pentaho.di.core.row.value.ValueMetaString; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; /* * Created on 30-06-2008 * */ public class StepsMetricsMeta extends BaseStepMeta implements StepMetaInterface { private static Class<?> PKG = StepsMetrics.class; // for i18n purposes, needed by Translator2!! public static final String[] RequiredStepsDesc = new String[] { BaseMessages.getString( PKG, "System.Combo.No" ), BaseMessages.getString( PKG, "System.Combo.Yes" ) }; public static final String[] RequiredStepsCode = new String[] { "N", "Y" }; public static final String YES = "Y"; public static final String NO = "N"; /** by which steps to display? */ private String[] stepName; private String[] stepCopyNr; /** Array of boolean values as string, indicating if a step is required. */ private String[] stepRequired; private String stepnamefield; private String stepidfield; private String steplinesinputfield; private String steplinesoutputfield; private String steplinesreadfield; private String steplinesupdatedfield; private String steplineswrittentfield; private String steplineserrorsfield; private String stepsecondsfield; public StepsMetricsMeta() { super(); // allocate BaseStepMeta } public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode ); } public Object clone() { StepsMetricsMeta retval = (StepsMetricsMeta) super.clone(); int nrfields = stepName.length; retval.allocate( nrfields ); System.arraycopy( stepName, 0, retval.stepName, 0, nrfields ); System.arraycopy( stepCopyNr, 0, retval.stepCopyNr, 0, nrfields ); System.arraycopy( stepRequired, 0, retval.stepRequired, 0, nrfields ); return retval; } public void allocate( int nrfields ) { stepName = new String[nrfields]; stepCopyNr = new String[nrfields]; stepRequired = new String[nrfields]; } /** * @return Returns the stepName. */ public String[] getStepName() { return stepName; } /** * @return Returns the stepCopyNr. */ public String[] getStepCopyNr() { return stepCopyNr; } /** * @param stepName * The stepName to set. */ public void setStepName( String[] stepName ) { this.stepName = stepName; } /** * @param stepCopyNr * The stepCopyNr to set. */ public void setStepCopyNr( String[] stepCopyNr ) { this.stepCopyNr = stepCopyNr; } public String getRequiredStepsDesc( String tt ) { if ( tt == null ) { return RequiredStepsDesc[0]; } if ( tt.equals( RequiredStepsCode[1] ) ) { return RequiredStepsDesc[1]; } else { return RequiredStepsDesc[0]; } } public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { r.clear(); String stepname = space.environmentSubstitute( stepnamefield ); if ( !Const.isEmpty( stepname ) ) { ValueMetaInterface v = new ValueMetaString( stepname ); v.setOrigin( name ); r.addValueMeta( v ); } String stepid = space.environmentSubstitute( stepidfield ); if ( !Const.isEmpty( stepid ) ) { ValueMetaInterface v = new ValueMetaString( stepid ); v.setOrigin( name ); v.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 ); r.addValueMeta( v ); } String steplinesinput = space.environmentSubstitute( steplinesinputfield ); if ( !Const.isEmpty( steplinesinput ) ) { ValueMetaInterface v = new ValueMetaInteger( steplinesinput ); v.setOrigin( name ); v.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 ); r.addValueMeta( v ); } String steplinesoutput = space.environmentSubstitute( steplinesoutputfield ); if ( !Const.isEmpty( steplinesoutput ) ) { ValueMetaInterface v = new ValueMetaInteger( steplinesoutput ); v.setOrigin( name ); v.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 ); r.addValueMeta( v ); } String steplinesread = space.environmentSubstitute( steplinesreadfield ); if ( !Const.isEmpty( steplinesread ) ) { ValueMetaInterface v = new ValueMetaInteger( steplinesread ); v.setOrigin( name ); v.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 ); r.addValueMeta( v ); } String steplinesupdated = space.environmentSubstitute( steplinesupdatedfield ); if ( !Const.isEmpty( steplinesupdated ) ) { ValueMetaInterface v = new ValueMetaInteger( steplinesupdated ); v.setOrigin( name ); v.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 ); r.addValueMeta( v ); } String steplineswritten = space.environmentSubstitute( steplineswrittentfield ); if ( !Const.isEmpty( steplineswritten ) ) { ValueMetaInterface v = new ValueMetaInteger( steplineswritten ); v.setOrigin( name ); v.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 ); r.addValueMeta( v ); } String steplineserrors = space.environmentSubstitute( steplineserrorsfield ); if ( !Const.isEmpty( steplineserrors ) ) { ValueMetaInterface v = new ValueMetaInteger( steplineserrors ); v.setOrigin( name ); v.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 ); r.addValueMeta( v ); } String stepseconds = space.environmentSubstitute( stepsecondsfield ); if ( !Const.isEmpty( stepseconds ) ) { ValueMetaInterface v = new ValueMetaInteger( stepseconds ); v.setOrigin( name ); r.addValueMeta( v ); } } private void readData( Node stepnode ) throws KettleXMLException { try { Node steps = XMLHandler.getSubNode( stepnode, "steps" ); int nrsteps = XMLHandler.countNodes( steps, "step" ); allocate( nrsteps ); for ( int i = 0; i < nrsteps; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( steps, "step", i ); stepName[i] = XMLHandler.getTagValue( fnode, "name" ); stepCopyNr[i] = XMLHandler.getTagValue( fnode, "copyNr" ); stepRequired[i] = XMLHandler.getTagValue( fnode, "stepRequired" ); } stepnamefield = XMLHandler.getTagValue( stepnode, "stepnamefield" ); stepidfield = XMLHandler.getTagValue( stepnode, "stepidfield" ); steplinesinputfield = XMLHandler.getTagValue( stepnode, "steplinesinputfield" ); steplinesoutputfield = XMLHandler.getTagValue( stepnode, "steplinesoutputfield" ); steplinesreadfield = XMLHandler.getTagValue( stepnode, "steplinesreadfield" ); steplinesupdatedfield = XMLHandler.getTagValue( stepnode, "steplinesupdatedfield" ); steplineswrittentfield = XMLHandler.getTagValue( stepnode, "steplineswrittentfield" ); steplineserrorsfield = XMLHandler.getTagValue( stepnode, "steplineserrorsfield" ); stepsecondsfield = XMLHandler.getTagValue( stepnode, "stepsecondsfield" ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } } public String getXML() { StringBuilder retval = new StringBuilder(); retval.append( " <steps>" + Const.CR ); for ( int i = 0; i < stepName.length; i++ ) { retval.append( " <step>" + Const.CR ); retval.append( " " + XMLHandler.addTagValue( "name", stepName[i] ) ); retval.append( " " + XMLHandler.addTagValue( "copyNr", stepCopyNr[i] ) ); retval.append( " " + XMLHandler.addTagValue( "stepRequired", stepRequired[i] ) ); retval.append( " </step>" + Const.CR ); } retval.append( " </steps>" + Const.CR ); retval.append( " " + XMLHandler.addTagValue( "stepnamefield", stepnamefield ) ); retval.append( " " + XMLHandler.addTagValue( "stepidfield", stepidfield ) ); retval.append( " " + XMLHandler.addTagValue( "steplinesinputfield", steplinesinputfield ) ); retval.append( " " + XMLHandler.addTagValue( "steplinesoutputfield", steplinesoutputfield ) ); retval.append( " " + XMLHandler.addTagValue( "steplinesreadfield", steplinesreadfield ) ); retval.append( " " + XMLHandler.addTagValue( "steplinesupdatedfield", steplinesupdatedfield ) ); retval.append( " " + XMLHandler.addTagValue( "steplineswrittentfield", steplineswrittentfield ) ); retval.append( " " + XMLHandler.addTagValue( "steplineserrorsfield", steplineserrorsfield ) ); retval.append( " " + XMLHandler.addTagValue( "stepsecondsfield", stepsecondsfield ) ); return retval.toString(); } public void setDefault() { int nrsteps = 0; allocate( nrsteps ); for ( int i = 0; i < nrsteps; i++ ) { stepName[i] = "step" + i; stepCopyNr[i] = "CopyNr" + i; stepRequired[i] = NO; } stepnamefield = BaseMessages.getString( PKG, "StepsMetricsDialog.Label.Stepname" ); stepidfield = BaseMessages.getString( PKG, "StepsMetricsDialog.Label.Stepid" ); steplinesinputfield = BaseMessages.getString( PKG, "StepsMetricsDialog.Label.Linesinput" ); steplinesoutputfield = BaseMessages.getString( PKG, "StepsMetricsDialog.Label.Linesoutput" ); steplinesreadfield = BaseMessages.getString( PKG, "StepsMetricsDialog.Label.Linesread" ); steplinesupdatedfield = BaseMessages.getString( PKG, "StepsMetricsDialog.Label.Linesupdated" ); steplineswrittentfield = BaseMessages.getString( PKG, "StepsMetricsDialog.Label.Lineswritten" ); steplineserrorsfield = BaseMessages.getString( PKG, "StepsMetricsDialog.Label.Lineserrors" ); stepsecondsfield = BaseMessages.getString( PKG, "StepsMetricsDialog.Label.Time" ); } public void setStepRequired( String[] stepRequiredin ) { for ( int i = 0; i < stepRequiredin.length; i++ ) { this.stepRequired[i] = getRequiredStepsCode( stepRequiredin[i] ); } } public String getRequiredStepsCode( String tt ) { if ( tt == null ) { return RequiredStepsCode[0]; } if ( tt.equals( RequiredStepsDesc[1] ) ) { return RequiredStepsCode[1]; } else { return RequiredStepsCode[0]; } } public String[] getStepRequired() { return stepRequired; } public String getStepNameFieldName() { return this.stepnamefield; } public void setStepNameFieldName( String stepnamefield ) { this.stepnamefield = stepnamefield; } public String getStepIdFieldName() { return this.stepidfield; } public void setStepIdFieldName( String stepidfield ) { this.stepidfield = stepidfield; } public String getStepLinesInputFieldName() { return this.steplinesinputfield; } public void setStepLinesInputFieldName( String steplinesinputfield ) { this.steplinesinputfield = steplinesinputfield; } public String getStepLinesOutputFieldName() { return this.steplinesoutputfield; } public void setStepLinesOutputFieldName( String steplinesoutputfield ) { this.steplinesoutputfield = steplinesoutputfield; } public String getStepLinesReadFieldName() { return this.steplinesreadfield; } public void setStepLinesReadFieldName( String steplinesreadfield ) { this.steplinesreadfield = steplinesreadfield; } public String getStepLinesWrittenFieldName() { return this.steplineswrittentfield; } public void setStepLinesWrittenFieldName( String steplineswrittentfield ) { this.steplineswrittentfield = steplineswrittentfield; } public String getStepLinesErrorsFieldName() { return this.steplineserrorsfield; } public String getStepSecondsFieldName() { return this.stepsecondsfield; } public void setStepSecondsFieldName( String fieldname ) { this.stepsecondsfield = fieldname; } public void setStepLinesErrorsFieldName( String steplineserrorsfield ) { this.steplineserrorsfield = steplineserrorsfield; } public String getStepLinesUpdatedFieldName() { return this.steplinesupdatedfield; } public void setStepLinesUpdatedFieldName( String steplinesupdatedfield ) { this.steplinesupdatedfield = steplinesupdatedfield; } public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { int nrsteps = rep.countNrStepAttributes( id_step, "step_name" ); allocate( nrsteps ); for ( int i = 0; i < nrsteps; i++ ) { stepName[i] = rep.getStepAttributeString( id_step, i, "step_name" ); stepCopyNr[i] = rep.getStepAttributeString( id_step, i, "step_CopyNr" ); stepRequired[i] = rep.getStepAttributeString( id_step, i, "step_required" ); if ( !YES.equalsIgnoreCase( stepRequired[i] ) ) { stepRequired[i] = NO; } } stepnamefield = rep.getStepAttributeString( id_step, "stepnamefield" ); stepidfield = rep.getStepAttributeString( id_step, "stepidfield" ); steplinesinputfield = rep.getStepAttributeString( id_step, "steplinesinputfield" ); steplinesoutputfield = rep.getStepAttributeString( id_step, "steplinesoutputfield" ); steplinesreadfield = rep.getStepAttributeString( id_step, "steplinesreadfield" ); steplineswrittentfield = rep.getStepAttributeString( id_step, "steplineswrittentfield" ); steplinesupdatedfield = rep.getStepAttributeString( id_step, "steplinesupdatedfield" ); steplineserrorsfield = rep.getStepAttributeString( id_step, "steplineserrorsfield" ); stepsecondsfield = rep.getStepAttributeString( id_step, "stepsecondsfield" ); } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } } public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { for ( int i = 0; i < stepName.length; i++ ) { rep.saveStepAttribute( id_transformation, id_step, i, "step_name", stepName[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "step_CopyNr", stepCopyNr[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "step_required", stepRequired[i] ); } rep.saveStepAttribute( id_transformation, id_step, "stepnamefield", stepnamefield ); rep.saveStepAttribute( id_transformation, id_step, "stepidfield", stepidfield ); rep.saveStepAttribute( id_transformation, id_step, "steplinesinputfield", steplinesinputfield ); rep.saveStepAttribute( id_transformation, id_step, "steplinesoutputfield", steplinesoutputfield ); rep.saveStepAttribute( id_transformation, id_step, "steplinesreadfield", steplinesreadfield ); rep.saveStepAttribute( id_transformation, id_step, "steplineswrittentfield", steplineswrittentfield ); rep.saveStepAttribute( id_transformation, id_step, "steplinesupdatedfield", steplinesupdatedfield ); rep.saveStepAttribute( id_transformation, id_step, "steplineserrorsfield", steplineserrorsfield ); rep.saveStepAttribute( id_transformation, id_step, "stepsecondsfield", stepsecondsfield ); } catch ( Exception e ) { throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e ); } } public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { CheckResult cr; if ( prev == null || prev.size() == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "StepsMetricsMeta.CheckResult.NotReceivingFields" ), stepMeta ); remarks.add( cr ); if ( stepName.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "StepsMetricsMeta.CheckResult.AllStepsFound" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_WARNING, BaseMessages.getString( PKG, "StepsMetricsMeta.CheckResult.NoStepsEntered" ), stepMeta ); } remarks.add( cr ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "StepsMetricsMeta.CheckResult.ReceivingFields" ), stepMeta ); remarks.add( cr ); } // See if we have input streams leading to this step! if ( input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "StepsMetricsMeta.CheckResult.StepRecevingData2" ), stepMeta ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "StepsMetricsMeta.CheckResult.NoInputReceivedFromOtherSteps" ), stepMeta ); } remarks.add( cr ); } public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr, Trans trans ) { return new StepsMetrics( stepMeta, stepDataInterface, cnr, tr, trans ); } public StepDataInterface getStepData() { return new StepsMetricsData(); } @Override public TransMeta.TransformationType[] getSupportedTransformationTypes() { return new TransMeta.TransformationType[] { TransMeta.TransformationType.Normal }; } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.impl.source; import com.intellij.formatting.FormatTextRanges; import com.intellij.lang.ASTNode; import com.intellij.lang.injection.InjectedLanguageManager; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationAdapter; import com.intellij.openapi.application.ApplicationListener; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.fileTypes.FileTypeManager; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.*; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.pom.PomManager; import com.intellij.pom.PomModelAspect; import com.intellij.pom.event.PomModelEvent; import com.intellij.pom.tree.TreeAspect; import com.intellij.pom.tree.events.ChangeInfo; import com.intellij.pom.tree.events.TreeChange; import com.intellij.pom.tree.events.TreeChangeEvent; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.CodeStyleSettingsManager; import com.intellij.psi.impl.PsiTreeDebugBuilder; import com.intellij.psi.impl.source.codeStyle.CodeEditUtil; import com.intellij.psi.impl.source.codeStyle.CodeFormatterFacade; import com.intellij.psi.impl.source.codeStyle.IndentHelperImpl; import com.intellij.psi.impl.source.tree.*; import com.intellij.util.LocalTimeCounter; import com.intellij.util.containers.ContainerUtilRt; import com.intellij.util.text.CharArrayUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.TestOnly; import java.util.*; import java.util.concurrent.atomic.AtomicInteger; public class PostprocessReformattingAspect implements PomModelAspect { private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.PostprocessReformattingAspect"); private final Project myProject; private final PsiManager myPsiManager; private final TreeAspect myTreeAspect; private final Map<FileViewProvider, List<ASTNode>> myReformatElements = new HashMap<FileViewProvider, List<ASTNode>>(); private volatile int myDisabledCounter = 0; private final Set<FileViewProvider> myUpdatedProviders = new HashSet<FileViewProvider>(); private final AtomicInteger myPostponedCounter = new AtomicInteger(); public PostprocessReformattingAspect(Project project, PsiManager psiManager, TreeAspect treeAspect,final CommandProcessor processor) { myProject = project; myPsiManager = psiManager; myTreeAspect = treeAspect; PomManager.getModel(psiManager.getProject()) .registerAspect(PostprocessReformattingAspect.class, this, Collections.singleton((PomModelAspect)treeAspect)); ApplicationListener applicationListener = new ApplicationAdapter() { @Override public void writeActionStarted(final Object action) { if (processor != null) { final Project project = processor.getCurrentCommandProject(); if (project == myProject) { incrementPostponedCounter(); } } } @Override public void writeActionFinished(final Object action) { if (processor != null) { final Project project = processor.getCurrentCommandProject(); if (project == myProject) { decrementPostponedCounter(); } } } }; ApplicationManager.getApplication().addApplicationListener(applicationListener, project); } public void disablePostprocessFormattingInside(final Runnable runnable) { disablePostprocessFormattingInside(new NullableComputable<Object>() { @Override public Object compute() { runnable.run(); return null; } }); } public <T> T disablePostprocessFormattingInside(Computable<T> computable) { try { myDisabledCounter++; return computable.compute(); } finally { myDisabledCounter--; LOG.assertTrue(myDisabledCounter > 0 || !isDisabled()); } } public void postponeFormattingInside(final Runnable runnable) { postponeFormattingInside(new NullableComputable<Object>() { @Override public Object compute() { runnable.run(); return null; } }); } public <T> T postponeFormattingInside(Computable<T> computable) { Application application = ApplicationManager.getApplication(); application.assertIsDispatchThread(); try { incrementPostponedCounter(); return computable.compute(); } finally { decrementPostponedCounter(); } } private void incrementPostponedCounter() { myPostponedCounter.incrementAndGet(); } private void decrementPostponedCounter() { Application application = ApplicationManager.getApplication(); application.assertIsDispatchThread(); if (myPostponedCounter.decrementAndGet() == 0) { if (application.isWriteAccessAllowed()) { doPostponedFormatting(); } else { application.runWriteAction(new Runnable() { @Override public void run() { doPostponedFormatting(); } }); } } } private static void atomic(@NotNull Runnable r) { ProgressManager.getInstance().executeNonCancelableSection(r); } @Override public void update(final PomModelEvent event) { atomic(new Runnable() { @Override public void run() { if (isDisabled() || myPostponedCounter.get() == 0 && !ApplicationManager.getApplication().isUnitTestMode()) return; final TreeChangeEvent changeSet = (TreeChangeEvent)event.getChangeSet(myTreeAspect); if (changeSet == null) return; final PsiElement psiElement = changeSet.getRootElement().getPsi(); if (psiElement == null) return; PsiFile containingFile = InjectedLanguageManager.getInstance(psiElement.getProject()).getTopLevelFile(psiElement); final FileViewProvider viewProvider = containingFile.getViewProvider(); if (!viewProvider.isEventSystemEnabled()) return; myUpdatedProviders.add(viewProvider); for (final ASTNode node : changeSet.getChangedElements()) { final TreeChange treeChange = changeSet.getChangesByElement(node); for (final ASTNode affectedChild : treeChange.getAffectedChildren()) { final ChangeInfo childChange = treeChange.getChangeByChild(affectedChild); switch (childChange.getChangeType()) { case ChangeInfo.ADD: case ChangeInfo.REPLACE: postponeFormatting(viewProvider, affectedChild); break; case ChangeInfo.CONTENTS_CHANGED: if (!CodeEditUtil.isNodeGenerated(affectedChild)) { ((TreeElement)affectedChild).acceptTree(new RecursiveTreeElementWalkingVisitor() { @Override protected void visitNode(TreeElement element) { if (CodeEditUtil.isNodeGenerated(element) && CodeEditUtil.isSuspendedNodesReformattingAllowed()) { postponeFormatting(viewProvider, element); return; } super.visitNode(element); } }); } break; } } } } }); } public void doPostponedFormatting() { atomic(new Runnable() { @Override public void run() { if (isDisabled()) return; try { FileViewProvider[] viewProviders = myUpdatedProviders.toArray(new FileViewProvider[myUpdatedProviders.size()]); for (final FileViewProvider viewProvider : viewProviders) { doPostponedFormatting(viewProvider); } } catch (Exception e) { LOG.error(e); } finally { LOG.assertTrue(myReformatElements.isEmpty(), myReformatElements); } } }); } public void postponedFormatting(final FileViewProvider viewProvider) { postponedFormattingImpl(viewProvider, true); } public void doPostponedFormatting(final FileViewProvider viewProvider) { postponedFormattingImpl(viewProvider, false); } private void postponedFormattingImpl(final FileViewProvider viewProvider, final boolean check) { atomic(new Runnable() { @Override public void run() { if (isDisabled() || check && !myUpdatedProviders.contains(viewProvider)) return; try { disablePostprocessFormattingInside(new Runnable() { @Override public void run() { doPostponedFormattingInner(viewProvider); } }); } finally { myUpdatedProviders.remove(viewProvider); myReformatElements.remove(viewProvider); } } }); } public boolean isViewProviderLocked(final FileViewProvider fileViewProvider) { return myReformatElements.containsKey(fileViewProvider); } public void beforeDocumentChanged(FileViewProvider viewProvider) { if (isViewProviderLocked(viewProvider)) { throw new RuntimeException("Document is locked by write PSI operations. " + "Use PsiDocumentManager.doPostponedOperationsAndUnblockDocument() to commit PSI changes to the document."); } postponedFormatting(viewProvider); } public static PostprocessReformattingAspect getInstance(Project project) { return project.getComponent(PostprocessReformattingAspect.class); } private void postponeFormatting(final FileViewProvider viewProvider, final ASTNode child) { if (!CodeEditUtil.isNodeGenerated(child) && child.getElementType() != TokenType.WHITE_SPACE) { final int oldIndent = CodeEditUtil.getOldIndentation(child); LOG.assertTrue(oldIndent >= 0, "for not generated items old indentation must be defined: element=" + child + ", text=" + child.getText()); } List<ASTNode> list = myReformatElements.get(viewProvider); if (list == null) { list = new ArrayList<ASTNode>(); myReformatElements.put(viewProvider, list); } list.add(child); } private void doPostponedFormattingInner(final FileViewProvider key) { final List<ASTNode> astNodes = myReformatElements.remove(key); final Document document = key.getDocument(); // Sort ranges by end offsets so that we won't need any offset adjustment after reformat or reindent if (document == null) return; final VirtualFile virtualFile = key.getVirtualFile(); if (!virtualFile.isValid()) return; final TreeSet<PostprocessFormattingTask> postProcessTasks = new TreeSet<PostprocessFormattingTask>(); Collection<Disposable> toDispose = ContainerUtilRt.newArrayList(); try { // process all roots in viewProvider to find marked for reformat before elements and create appropriate range markers handleReformatMarkers(key, postProcessTasks); toDispose.addAll(postProcessTasks); // then we create ranges by changed nodes. One per node. There ranges can intersect. Ranges are sorted by end offset. if (astNodes != null) createActionsMap(astNodes, key, postProcessTasks); if (Boolean.getBoolean("check.psi.is.valid") && ApplicationManager.getApplication().isUnitTestMode()) { checkPsiIsCorrect(key); } while (!postProcessTasks.isEmpty()) { // now we have to normalize actions so that they not intersect and ordered in most appropriate way // (free reformatting -> reindent -> formatting under reindent) final List<PostponedAction> normalizedActions = normalizeAndReorderPostponedActions(postProcessTasks, document); toDispose.addAll(normalizedActions); // only in following loop real changes in document are made for (final PostponedAction normalizedAction : normalizedActions) { CodeStyleSettings settings = CodeStyleSettingsManager.getSettings(myPsiManager.getProject()); boolean old = settings.ENABLE_JAVADOC_FORMATTING; settings.ENABLE_JAVADOC_FORMATTING = false; try { normalizedAction.execute(key); } finally { settings.ENABLE_JAVADOC_FORMATTING = old; } } } } finally { for (Disposable disposable : toDispose) { //noinspection SSBasedInspection disposable.dispose(); } } } private void checkPsiIsCorrect(final FileViewProvider key) { PsiFile actualPsi = key.getPsi(key.getBaseLanguage()); PsiTreeDebugBuilder treeDebugBuilder = new PsiTreeDebugBuilder().setShowErrorElements(false).setShowWhiteSpaces(false); String actualPsiTree = treeDebugBuilder.psiToString(actualPsi); String fileName = key.getVirtualFile().getName(); PsiFile psi = PsiFileFactory.getInstance(myProject) .createFileFromText(fileName, FileTypeManager.getInstance().getFileTypeByFileName(fileName), actualPsi.getNode().getText(), LocalTimeCounter.currentTime(), false); if (actualPsi.getClass().equals(psi.getClass())) { String expectedPsi = treeDebugBuilder.psiToString(psi); if (!expectedPsi.equals(actualPsiTree)) { myReformatElements.clear(); assert expectedPsi.equals(actualPsiTree) : "Refactored psi should be the same as result of parsing"; } } } private List<PostponedAction> normalizeAndReorderPostponedActions(TreeSet<PostprocessFormattingTask> rangesToProcess, Document document) { final List<PostprocessFormattingTask> freeFormattingActions = new ArrayList<PostprocessFormattingTask>(); final List<ReindentTask> indentActions = new ArrayList<ReindentTask>(); PostprocessFormattingTask accumulatedTask = null; Iterator<PostprocessFormattingTask> iterator = rangesToProcess.iterator(); while (iterator.hasNext()) { final PostprocessFormattingTask currentTask = iterator.next(); if (accumulatedTask == null) { accumulatedTask = currentTask; iterator.remove(); } else if (accumulatedTask.getStartOffset() > currentTask.getEndOffset() || accumulatedTask.getStartOffset() == currentTask.getEndOffset() && !canStickActionsTogether(accumulatedTask, currentTask)) { // action can be pushed if (accumulatedTask instanceof ReindentTask) { indentActions.add((ReindentTask) accumulatedTask); } else { freeFormattingActions.add(accumulatedTask); } accumulatedTask = currentTask; iterator.remove(); } else if (accumulatedTask instanceof ReformatTask && currentTask instanceof ReindentTask) { // split accumulated reformat range into two if (accumulatedTask.getStartOffset() < currentTask.getStartOffset()) { final RangeMarker endOfRange = document.createRangeMarker(accumulatedTask.getStartOffset(), currentTask.getStartOffset()); // add heading reformat part rangesToProcess.add(new ReformatTask(endOfRange)); // and manage heading whitespace because formatter does not edit it in previous action iterator = rangesToProcess.iterator(); //noinspection StatementWithEmptyBody while (iterator.next().getRange() != currentTask.getRange()) ; } final RangeMarker rangeToProcess = document.createRangeMarker(currentTask.getEndOffset(), accumulatedTask.getEndOffset()); freeFormattingActions.add(new ReformatWithHeadingWhitespaceTask(rangeToProcess)); accumulatedTask = currentTask; iterator.remove(); } else { if (!(accumulatedTask instanceof ReindentTask)) { iterator.remove(); boolean withLeadingWhitespace = accumulatedTask instanceof ReformatWithHeadingWhitespaceTask; if (accumulatedTask instanceof ReformatTask && currentTask instanceof ReformatWithHeadingWhitespaceTask && accumulatedTask.getStartOffset() == currentTask.getStartOffset()) { withLeadingWhitespace = true; } else if (accumulatedTask instanceof ReformatWithHeadingWhitespaceTask && currentTask instanceof ReformatTask && accumulatedTask.getStartOffset() < currentTask.getStartOffset()) { withLeadingWhitespace = false; } int newStart = Math.min(accumulatedTask.getStartOffset(), currentTask.getStartOffset()); int newEnd = Math.max(accumulatedTask.getEndOffset(), currentTask.getEndOffset()); RangeMarker rangeMarker; if (accumulatedTask.getStartOffset() == newStart && accumulatedTask.getEndOffset() == newEnd) { rangeMarker = accumulatedTask.getRange(); } else if (currentTask.getStartOffset() == newStart && currentTask.getEndOffset() == newEnd) { rangeMarker = currentTask.getRange(); } else { rangeMarker = document.createRangeMarker(newStart, newEnd); } if (withLeadingWhitespace) { accumulatedTask = new ReformatWithHeadingWhitespaceTask(rangeMarker); } else { accumulatedTask = new ReformatTask(rangeMarker); } } else if (currentTask instanceof ReindentTask) { iterator.remove(); } // TODO[ik]: need to be fixed to correctly process indent inside indent } } if (accumulatedTask != null) { if (accumulatedTask instanceof ReindentTask) { indentActions.add((ReindentTask) accumulatedTask); } else { freeFormattingActions.add(accumulatedTask); } } final List<PostponedAction> result = new ArrayList<PostponedAction>(); Collections.reverse(freeFormattingActions); Collections.reverse(indentActions); if (!freeFormattingActions.isEmpty()) { FormatTextRanges ranges = new FormatTextRanges(); for (PostprocessFormattingTask action : freeFormattingActions) { TextRange range = TextRange.create(action); ranges.add(range, action instanceof ReformatWithHeadingWhitespaceTask); } result.add(new ReformatRangesAction(ranges)); } if (!indentActions.isEmpty()) { ReindentRangesAction reindentRangesAction = new ReindentRangesAction(); for (ReindentTask action : indentActions) { reindentRangesAction.add(action.getRange(), action.getOldIndent()); } result.add(reindentRangesAction); } return result; } private static boolean canStickActionsTogether(final PostprocessFormattingTask currentTask, final PostprocessFormattingTask nextTask) { // empty reformat markers can't be stuck together with any action if (nextTask instanceof ReformatWithHeadingWhitespaceTask && nextTask.getStartOffset() == nextTask.getEndOffset()) return false; if (currentTask instanceof ReformatWithHeadingWhitespaceTask && currentTask.getStartOffset() == currentTask.getEndOffset()) { return false; } // reindent actions can't be be stuck at all return !(currentTask instanceof ReindentTask); } private static void createActionsMap(final List<ASTNode> astNodes, final FileViewProvider provider, final TreeSet<PostprocessFormattingTask> rangesToProcess) { final Set<ASTNode> nodesToProcess = new HashSet<ASTNode>(astNodes); final Document document = provider.getDocument(); if (document == null) { return; } for (final ASTNode node : astNodes) { nodesToProcess.remove(node); final FileElement fileElement = TreeUtil.getFileElement((TreeElement)node); if (fileElement == null || ((PsiFile)fileElement.getPsi()).getViewProvider() != provider) continue; final boolean isGenerated = CodeEditUtil.isNodeGenerated(node); ((TreeElement)node).acceptTree(new RecursiveTreeElementVisitor() { boolean inGeneratedContext = !isGenerated; @Override protected boolean visitNode(TreeElement element) { if (nodesToProcess.contains(element)) return false; final boolean currentNodeGenerated = CodeEditUtil.isNodeGenerated(element); CodeEditUtil.setNodeGenerated(element, false); if (currentNodeGenerated && !inGeneratedContext) { rangesToProcess.add(new ReformatTask(document.createRangeMarker(element.getTextRange()))); inGeneratedContext = true; } if (!currentNodeGenerated && inGeneratedContext) { if (element.getElementType() == TokenType.WHITE_SPACE) return false; final int oldIndent = CodeEditUtil.getOldIndentation(element); CodeEditUtil.setOldIndentation(element, -1); LOG.assertTrue(oldIndent >= 0, "for not generated items old indentation must be defined: element " + element); rangesToProcess.add(new ReindentTask(document.createRangeMarker(element.getTextRange()), oldIndent)); inGeneratedContext = false; } return true; } @Override public void visitComposite(CompositeElement composite) { boolean oldGeneratedContext = inGeneratedContext; super.visitComposite(composite); inGeneratedContext = oldGeneratedContext; } @Override public void visitLeaf(LeafElement leaf) { boolean oldGeneratedContext = inGeneratedContext; super.visitLeaf(leaf); inGeneratedContext = oldGeneratedContext; } }); } } private static void handleReformatMarkers(final FileViewProvider key, final TreeSet<PostprocessFormattingTask> rangesToProcess) { final Document document = key.getDocument(); if (document == null) { return; } for (final FileElement fileElement : ((SingleRootFileViewProvider)key).getKnownTreeRoots()) { fileElement.acceptTree(new RecursiveTreeElementWalkingVisitor() { @Override protected void visitNode(TreeElement element) { if (CodeEditUtil.isMarkedToReformatBefore(element)) { CodeEditUtil.markToReformatBefore(element, false); rangesToProcess.add(new ReformatWithHeadingWhitespaceTask( document.createRangeMarker(element.getStartOffset(), element.getStartOffset())) ); } else if (CodeEditUtil.isMarkedToReformat(element)) { CodeEditUtil.markToReformat(element, false); rangesToProcess.add(new ReformatWithHeadingWhitespaceTask( document.createRangeMarker(element.getStartOffset(), element.getStartOffset() + element.getTextLength())) ); } super.visitNode(element); } }); } } private static void adjustIndentationInRange(final PsiFile file, final Document document, final TextRange[] indents, final int indentAdjustment) { final CharSequence charsSequence = document.getCharsSequence(); for (final TextRange indent : indents) { final String oldIndentStr = charsSequence.subSequence(indent.getStartOffset() + 1, indent.getEndOffset()).toString(); final int oldIndent = IndentHelperImpl.getIndent(file.getProject(), file.getFileType(), oldIndentStr, true); final String newIndentStr = IndentHelperImpl .fillIndent(file.getProject(), file.getFileType(), Math.max(oldIndent + indentAdjustment, 0)); document.replaceString(indent.getStartOffset() + 1, indent.getEndOffset(), newIndentStr); } } @SuppressWarnings("StatementWithEmptyBody") private static int getNewIndent(final PsiFile psiFile, final int firstWhitespace) { final Document document = psiFile.getViewProvider().getDocument(); assert document != null; final int startOffset = document.getLineStartOffset(document.getLineNumber(firstWhitespace)); int endOffset = startOffset; final CharSequence charsSequence = document.getCharsSequence(); while (Character.isWhitespace(charsSequence.charAt(endOffset++))) ; final String newIndentStr = charsSequence.subSequence(startOffset, endOffset - 1).toString(); return IndentHelperImpl.getIndent(psiFile.getProject(), psiFile.getFileType(), newIndentStr, true); } public boolean isDisabled() { return myDisabledCounter > 0; } private CodeFormatterFacade getFormatterFacade(final FileViewProvider viewProvider) { final CodeStyleSettings styleSettings = CodeStyleSettingsManager.getSettings(myPsiManager.getProject()); final PsiDocumentManager documentManager = PsiDocumentManager.getInstance(myPsiManager.getProject()); final Document document = viewProvider.getDocument(); assert document != null; final CodeFormatterFacade codeFormatter = new CodeFormatterFacade(styleSettings); documentManager.commitDocument(document); return codeFormatter; } private abstract static class PostprocessFormattingTask implements Comparable<PostprocessFormattingTask>, Segment, Disposable { @NotNull private final RangeMarker myRange; public PostprocessFormattingTask(@NotNull RangeMarker rangeMarker) { myRange = rangeMarker; } @Override public int compareTo(@NotNull PostprocessFormattingTask o) { RangeMarker o1 = myRange; RangeMarker o2 = o.myRange; if (o1.equals(o2)) return 0; final int diff = o2.getEndOffset() - o1.getEndOffset(); if (diff == 0) { if (o1.getStartOffset() == o2.getStartOffset()) return 0; if (o1.getStartOffset() == o1.getEndOffset()) return -1; // empty ranges first if (o2.getStartOffset() == o2.getEndOffset()) return 1; // empty ranges first return o1.getStartOffset() - o2.getStartOffset(); } return diff; } @NotNull public RangeMarker getRange() { return myRange; } @Override public int getStartOffset() { return myRange.getStartOffset(); } @Override public int getEndOffset() { return myRange.getEndOffset(); } @Override public void dispose() { if (myRange.isValid()) { myRange.dispose(); } } } private static class ReformatTask extends PostprocessFormattingTask { public ReformatTask(RangeMarker rangeMarker) { super(rangeMarker); } } private static class ReformatWithHeadingWhitespaceTask extends PostprocessFormattingTask { public ReformatWithHeadingWhitespaceTask(RangeMarker rangeMarker) { super(rangeMarker); } } private static class ReindentTask extends PostprocessFormattingTask { private final int myOldIndent; public ReindentTask(RangeMarker rangeMarker, int oldIndent) { super(rangeMarker); myOldIndent = oldIndent; } public int getOldIndent() { return myOldIndent; } } private interface PostponedAction extends Disposable { void execute(FileViewProvider viewProvider); } private class ReformatRangesAction implements PostponedAction { private final FormatTextRanges myRanges; public ReformatRangesAction(FormatTextRanges ranges) { myRanges = ranges; } @Override public void execute(FileViewProvider viewProvider) { final CodeFormatterFacade codeFormatter = getFormatterFacade(viewProvider); codeFormatter.processText(viewProvider.getPsi(viewProvider.getBaseLanguage()), myRanges.ensureNonEmpty(), false); } @Override public void dispose() { } } private static class ReindentRangesAction implements PostponedAction { private final List<Pair<Integer, RangeMarker>> myRangesToReindent = new ArrayList<Pair<Integer, RangeMarker>>(); public void add(RangeMarker rangeMarker, int oldIndent) { myRangesToReindent.add(new Pair<Integer, RangeMarker>(oldIndent, rangeMarker)); } @Override public void execute(FileViewProvider viewProvider) { final Document document = viewProvider.getDocument(); assert document != null; final PsiFile psiFile = viewProvider.getPsi(viewProvider.getBaseLanguage()); for (Pair<Integer, RangeMarker> integerRangeMarkerPair : myRangesToReindent) { RangeMarker marker = integerRangeMarkerPair.second; final CharSequence charsSequence = document.getCharsSequence().subSequence(marker.getStartOffset(), marker.getEndOffset()); final int oldIndent = integerRangeMarkerPair.first; final TextRange[] whitespaces = CharArrayUtil.getIndents(charsSequence, marker.getStartOffset()); final int indentAdjustment = getNewIndent(psiFile, marker.getStartOffset()) - oldIndent; if (indentAdjustment != 0) adjustIndentationInRange(psiFile, document, whitespaces, indentAdjustment); } } @Override public void dispose() { for (Pair<Integer, RangeMarker> pair : myRangesToReindent) { RangeMarker marker = pair.second; if (marker.isValid()) { marker.dispose(); } } } } @TestOnly public void clear() { myReformatElements.clear(); } }
/* * Copyright 2018-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.dataflow.server.controller; import java.util.Arrays; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase; import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase.Replace; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.cloud.dataflow.core.AppRegistration; import org.springframework.cloud.dataflow.core.StreamDefinition; import org.springframework.cloud.dataflow.registry.repository.AppRegistrationRepository; import org.springframework.cloud.dataflow.server.configuration.TestDependencies; import org.springframework.cloud.dataflow.server.repository.StreamDefinitionRepository; import org.springframework.cloud.dataflow.server.stream.SkipperStreamDeployer; import org.springframework.cloud.deployer.spi.app.AppStatus; import org.springframework.cloud.deployer.spi.app.DeploymentState; import org.springframework.cloud.skipper.client.SkipperClient; import org.springframework.cloud.skipper.domain.Info; import org.springframework.cloud.skipper.domain.Status; import org.springframework.cloud.skipper.domain.StatusCode; import org.springframework.http.MediaType; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.annotation.DirtiesContext.ClassMode; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.web.context.WebApplicationContext; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.hasSize; import static org.mockito.Mockito.when; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; /** * @author Ilayaperumal Gopinathan * @author Christian Tzolov */ @RunWith(SpringRunner.class) @SpringBootTest(classes = TestDependencies.class) @DirtiesContext(classMode = ClassMode.BEFORE_EACH_TEST_METHOD) @AutoConfigureTestDatabase(replace = Replace.ANY) public class RuntimeAppsControllerTests { private MockMvc mockMvc; @Autowired private WebApplicationContext wac; @Autowired private AppRegistrationRepository appRegistrationRepository; @Autowired private StreamDefinitionRepository streamDefinitionRepository; @Autowired private SkipperClient skipperClient; @Autowired private SkipperStreamDeployer skipperStreamDeployer; @Before public void setupMocks() { this.mockMvc = MockMvcBuilders.webAppContextSetup(wac) .defaultRequest(get("/").accept(MediaType.APPLICATION_JSON)).build(); for (AppRegistration appRegistration : this.appRegistrationRepository.findAll()) { this.appRegistrationRepository.deleteAll(); } StreamDefinition streamDefinition3 = new StreamDefinition("ticktock3", "time|log"); StreamDefinition streamDefinition4 = new StreamDefinition("ticktock4", "time|log"); streamDefinitionRepository.save(streamDefinition3); streamDefinitionRepository.save(streamDefinition4); Info ticktock3Info = new Info(); Status ticktock3Status = new Status(); ticktock3Status.setStatusCode(StatusCode.DEPLOYED); ticktock3Status.setPlatformStatus("[{\"deploymentId\":\"ticktock3.log-v1\"," + "\"instances\":{\"ticktock3.log-v1-0\":{\"instanceNumber\":0,\"id\":\"ticktock3.log-v1-0\",\"state\":\"deployed\"}},\"state\":\"deployed\"}," + "{\"deploymentId\":\"ticktock3.time-v1\",\"instances\":{\"ticktock3.time-v1-0\":{\"instanceNumber\":0,\"baseUrl\":\"https://192.168.1.100:32451\"," + "\"process\":{\"alive\":true,\"inputStream\":{},\"outputStream\":{},\"errorStream\":{}}," + "\"attributes\":{\"guid\":\"32451\",\"pid\":\"53492\",\"port\":\"32451\"}," + "\"id\":\"ticktock3.time-v1-0\",\"state\":\"deployed\"}},\"state\":\"deployed\"}]"); ticktock3Info.setStatus(ticktock3Status); Info ticktock4Info = new Info(); Status ticktock4Status = new Status(); ticktock4Status.setStatusCode(StatusCode.DEPLOYED); ticktock4Status.setPlatformStatus("[{\"deploymentId\":\"ticktock4.log-v1\"," + "\"instances\":{\"ticktock4.log-v1-0\":{\"instanceNumber\":0,\"id\":\"ticktock4.log-v1-0\"," + "\"state\":\"deployed\"}},\"state\":\"deployed\"}," + "{\"deploymentId\":\"ticktock4.time-v1\",\"instances\":{\"ticktock4.time-v1-0\":{\"instanceNumber\":0," + "\"baseUrl\":\"https://192.168.1.100:32451\"," + "\"process\":{\"alive\":true,\"inputStream\":{},\"outputStream\":{},\"errorStream\":{}}," + "\"attributes\":{\"guid\":\"32451\",\"pid\":\"53492\",\"port\":\"32451\"}," + "\"id\":\"ticktock4.time-v1-0\",\"state\":\"deployed\"}},\"state\":\"deployed\"}]"); ticktock4Info.setStatus(ticktock4Status); when(this.skipperClient.status("ticktock3")).thenReturn(ticktock3Info); when(this.skipperClient.status("ticktock4")).thenReturn(ticktock4Info); } @Test public void testFindNonExistentApp() throws Exception { mockMvc.perform(get("/runtime/apps/foo").accept(MediaType.APPLICATION_JSON)) .andDo(print()) .andExpect(status().is4xxClientError()) .andExpect(jsonPath("$[0].logref", is("NoSuchAppException"))); } @Test public void testFindNonExistentAppUnknownState() throws Exception { Info info = new Info(); info.setStatus(new Status()); info.getStatus().setStatusCode(StatusCode.UNKNOWN); info.getStatus().setPlatformStatusAsAppStatusList( Arrays.asList(AppStatus.of("ticktock5.log2-v1").generalState(DeploymentState.unknown).build())); when(this.skipperClient.status("ticktock5")).thenReturn(info); streamDefinitionRepository.save(new StreamDefinition("ticktock5", "time2|log2")); mockMvc.perform(get("/runtime/apps/ticktock5.log2-v1.").accept(MediaType.APPLICATION_JSON)).andDo(print()) .andExpect(status().isNotFound()) .andExpect(jsonPath("$[0].logref", is("NoSuchAppException"))); } @Test public void testFindNonExistentAppInstance() throws Exception { Info info = new Info(); info.setStatus(new Status()); info.getStatus().setStatusCode(StatusCode.UNKNOWN); info.getStatus().setPlatformStatusAsAppStatusList( Arrays.asList(AppStatus.of("ticktock5.log2-v1").generalState(DeploymentState.unknown).build())); when(this.skipperClient.status("ticktock5")).thenReturn(info); streamDefinitionRepository.save(new StreamDefinition("ticktock5", "time2|log2")); mockMvc.perform(get("/runtime/apps/ticktock5.log2-v1/instances/log2-0").accept(MediaType.APPLICATION_JSON)) .andDo(print()) .andExpect(status().is4xxClientError()) .andExpect(jsonPath("$[0].logref", is("NoSuchAppException"))); info.getStatus().setPlatformStatusAsAppStatusList( Arrays.asList(AppStatus.of("ticktock5.log2-v1").generalState(DeploymentState.deployed).build())); mockMvc.perform(get("/runtime/apps/ticktock5.log2-v1/instances/log2-0").accept(MediaType.APPLICATION_JSON)) .andDo(print()) .andExpect(status().is4xxClientError()) .andExpect(jsonPath("$[0].logref", is("NoSuchAppInstanceException"))); } @Test public void testFindNonExistentAppInstance2() throws Exception { mockMvc.perform( get("/runtime/apps/ticktock4.log-v1/instances/ticktock4.log-v1-0.").accept(MediaType.APPLICATION_JSON)) .andDo(print()) .andExpect(status().isOk()) .andExpect(jsonPath("$.instanceId", is("ticktock4.log-v1-0"))) .andExpect(jsonPath("$.state", is("deployed"))) .andExpect(jsonPath("$.attributes").value(nullValue())) .andExpect(jsonPath("$.links.*", hasSize(1))) .andExpect(jsonPath("$.links.[0].rel", is("self"))) .andExpect(jsonPath("$.links.[0].href", is("http://localhost/runtime/apps/ticktock4.log-v1/instances/ticktock4.log-v1-0"))); } @Test public void testListRuntimeApps() throws Exception { mockMvc.perform(get("/runtime/apps").accept(MediaType.APPLICATION_JSON)) .andDo(print()) .andExpect(status().isOk()) .andExpect(jsonPath("$.content[0].deploymentId", is("ticktock3.log-v1"))) .andExpect(jsonPath("$.content[1].deploymentId", is("ticktock3.time-v1"))) .andExpect(jsonPath("$.content[2].deploymentId", is("ticktock4.log-v1"))) .andExpect(jsonPath("$.content[3].deploymentId", is("ticktock4.time-v1"))) .andExpect(jsonPath("$.content[0].instances.content[0].instanceId", is("ticktock3.log-v1-0"))) .andExpect(jsonPath("$.content[1].instances.content[0].instanceId", is("ticktock3.time-v1-0"))) .andExpect(jsonPath("$.content[2].instances.content[0].instanceId", is("ticktock4.log-v1-0"))) .andExpect(jsonPath("$.content[3].instances.content[0].instanceId", is("ticktock4.time-v1-0"))); } @Test public void testListRuntimeAppsPageSizes() throws Exception { mockMvc.perform(get("/runtime/apps?page=0&size=1").accept(MediaType.APPLICATION_JSON)) .andDo(print()) .andExpect(status().isOk()) .andExpect(jsonPath("$.content.*", hasSize(1))) .andExpect(jsonPath("$.content[0].deploymentId", is("ticktock3.log-v1"))); mockMvc.perform(get("/runtime/apps?page=0&size=2").accept(MediaType.APPLICATION_JSON)) .andDo(print()) .andExpect(status().isOk()) .andExpect(jsonPath("$.content.*", hasSize(2))) .andExpect(jsonPath("$.content[0].deploymentId", is("ticktock3.log-v1"))) .andExpect(jsonPath("$.content[1].deploymentId", is("ticktock3.time-v1"))); mockMvc.perform(get("/runtime/apps?page=0&size=3").accept(MediaType.APPLICATION_JSON)) .andDo(print()) .andExpect(status().isOk()) .andExpect(jsonPath("$.content.*", hasSize(3))) .andExpect(jsonPath("$.content[0].deploymentId", is("ticktock3.log-v1"))) .andExpect(jsonPath("$.content[1].deploymentId", is("ticktock3.time-v1"))) .andExpect(jsonPath("$.content[2].deploymentId", is("ticktock4.log-v1"))); mockMvc.perform(get("/runtime/apps?page=0&size=4").accept(MediaType.APPLICATION_JSON)) .andDo(print()) .andExpect(status().isOk()) .andExpect(jsonPath("$.content.*", hasSize(4))) .andExpect(jsonPath("$.content[0].deploymentId", is("ticktock3.log-v1"))) .andExpect(jsonPath("$.content[1].deploymentId", is("ticktock3.time-v1"))) .andExpect(jsonPath("$.content[2].deploymentId", is("ticktock4.log-v1"))) .andExpect(jsonPath("$.content[3].deploymentId", is("ticktock4.time-v1"))); mockMvc.perform(get("/runtime/apps?page=1&size=2").accept(MediaType.APPLICATION_JSON)) .andDo(print()) .andExpect(status().isOk()) .andExpect(jsonPath("$.content.*", hasSize(2))) .andExpect(jsonPath("$.content[0].deploymentId", is("ticktock4.log-v1"))) .andExpect(jsonPath("$.content[1].deploymentId", is("ticktock4.time-v1"))); mockMvc.perform(get("/runtime/apps?page=1&size=4").accept(MediaType.APPLICATION_JSON)) .andDo(print()) .andExpect(status().isOk()) .andExpect(jsonPath("$.content.*", hasSize(0))); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.streaming; import java.io.*; import java.util.regex.*; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.JobConf; /** A way to interpret XML fragments as Mapper input records. * Values are XML subtrees delimited by configurable tags. * Keys could be the value of a certain attribute in the XML subtree, * but this is left to the stream processor application. * * The name-value properties that StreamXmlRecordReader understands are: * String begin (chars marking beginning of record) * String end (chars marking end of record) * int maxrec (maximum record size) * int lookahead(maximum lookahead to sync CDATA) * boolean slowmatch */ public class StreamXmlRecordReader extends StreamBaseRecordReader { public StreamXmlRecordReader(FSDataInputStream in, FileSplit split, Reporter reporter, JobConf job, FileSystem fs) throws IOException { super(in, split, reporter, job, fs); //Debug.writeTime(); //Debug.writeDebug("In StreamXmlRecordReader::constructor!\n"); beginMark_ = checkJobGet(CONF_NS + "begin"); endMark_ = checkJobGet(CONF_NS + "end"); //Debug.writeDebug("The begin mark is: " + beginMark_ + "\n"); //Debug.writeDebug("The end mark is: " + endMark_ + "\n"); maxRecSize_ = job_.getInt(CONF_NS + "maxrec", 50 * 1000); lookAhead_ = job_.getInt(CONF_NS + "lookahead", 2 * maxRecSize_); synched_ = false; slowMatch_ = job_.getBoolean(CONF_NS + "slowmatch", false); if (slowMatch_) { beginPat_ = makePatternCDataOrMark(beginMark_); endPat_ = makePatternCDataOrMark(endMark_); } init(); } public void init() throws IOException { LOG.info("StreamBaseRecordReader.init: " + " start_=" + start_ + " end_=" + end_ + " length_=" + length_ + " start_ > in_.getPos() =" + (start_ > in_.getPos()) + " " + start_ + " > " + in_.getPos()); if (start_ > in_.getPos()) { in_.seek(start_); } pos_ = start_; bin_ = new BufferedInputStream(in_); seekNextRecordBoundary(); } int numNext = 0; public synchronized boolean next(Text key, Text value) throws IOException { numNext++; if (pos_ >= end_) { return false; } DataOutputBuffer buf = new DataOutputBuffer(); if (!readUntilMatchBegin()) { return false; } if (!readUntilMatchEnd(buf)) { return false; } // There is only one elem..key/value splitting is not done here. byte[] record = new byte[buf.getLength()]; System.arraycopy(buf.getData(), 0, record, 0, record.length); numRecStats(record, 0, record.length); key.set(record); value.set(""); //key.set(""); //value.set(record); return true; } public void seekNextRecordBoundary() throws IOException { readUntilMatchBegin(); } boolean readUntilMatchBegin() throws IOException { if (slowMatch_) { return slowReadUntilMatch(beginPat_, false, null); } else { return fastReadUntilMatch(beginMark_, false, null); } } private boolean readUntilMatchEnd(DataOutputBuffer buf) throws IOException { if (slowMatch_) { return slowReadUntilMatch(endPat_, true, buf); } else { return fastReadUntilMatch(endMark_, true, buf); } } private boolean slowReadUntilMatch(Pattern markPattern, boolean includePat, DataOutputBuffer outBufOrNull) throws IOException { byte[] buf = new byte[Math.max(lookAhead_, maxRecSize_)]; int read = 0; bin_.mark(Math.max(lookAhead_, maxRecSize_) + 2); //mark to invalidate if we read more read = bin_.read(buf); if (read == -1) return false; String sbuf = new String(buf, 0, read, "UTF-8"); Matcher match = markPattern.matcher(sbuf); firstMatchStart_ = NA; firstMatchEnd_ = NA; int bufPos = 0; int state = synched_ ? CDATA_OUT : CDATA_UNK; int s = 0; while (match.find(bufPos)) { int input; if (match.group(1) != null) { input = CDATA_BEGIN; } else if (match.group(2) != null) { input = CDATA_END; firstMatchStart_ = NA; // |<DOC CDATA[ </DOC> ]]> should keep it } else { input = RECORD_MAYBE; } if (input == RECORD_MAYBE) { if (firstMatchStart_ == NA) { firstMatchStart_ = match.start(); firstMatchEnd_ = match.end(); } } state = nextState(state, input, match.start()); if (state == RECORD_ACCEPT) { break; } bufPos = match.end(); s++; } if (state != CDATA_UNK) { synched_ = true; } boolean matched = (firstMatchStart_ != NA) && (state == RECORD_ACCEPT || state == CDATA_UNK); if (matched) { int endPos = includePat ? firstMatchEnd_ : firstMatchStart_; bin_.reset(); for (long skiplen = endPos; skiplen > 0; ) { skiplen -= bin_.skip(skiplen); // Skip succeeds as we have read this buffer } pos_ += endPos; if (outBufOrNull != null) { outBufOrNull.writeBytes(sbuf.substring(0,endPos)); } } return matched; } // states final static int CDATA_IN = 10; final static int CDATA_OUT = 11; final static int CDATA_UNK = 12; final static int RECORD_ACCEPT = 13; // inputs final static int CDATA_BEGIN = 20; final static int CDATA_END = 21; final static int RECORD_MAYBE = 22; /* also updates firstMatchStart_;*/ int nextState(int state, int input, int bufPos) { switch (state) { case CDATA_UNK: case CDATA_OUT: switch (input) { case CDATA_BEGIN: return CDATA_IN; case CDATA_END: if (state == CDATA_OUT) { //System.out.println("buggy XML " + bufPos); } return CDATA_OUT; case RECORD_MAYBE: return (state == CDATA_UNK) ? CDATA_UNK : RECORD_ACCEPT; } break; case CDATA_IN: return (input == CDATA_END) ? CDATA_OUT : CDATA_IN; } throw new IllegalStateException(state + " " + input + " " + bufPos + " " + splitName_); } Pattern makePatternCDataOrMark(String escapedMark) { StringBuffer pat = new StringBuffer(); addGroup(pat, StreamUtil.regexpEscape("CDATA[")); // CDATA_BEGIN addGroup(pat, StreamUtil.regexpEscape("]]>")); // CDATA_END addGroup(pat, escapedMark); // RECORD_MAYBE return Pattern.compile(pat.toString()); } void addGroup(StringBuffer pat, String escapedGroup) { if (pat.length() > 0) { pat.append("|"); } pat.append("("); pat.append(escapedGroup); pat.append(")"); } boolean fastReadUntilMatch(String textPat, boolean includePat, DataOutputBuffer outBufOrNull) throws IOException { byte[] cpat = textPat.getBytes("UTF-8"); int m = 0; boolean match = false; int msup = cpat.length; int LL = 120000 * 10; bin_.mark(LL); // large number to invalidate mark while (true) { int b = bin_.read(); if (b == -1) break; byte c = (byte) b; // this assumes eight-bit matching. OK with UTF-8 if (c == cpat[m]) { m++; if (m == msup) { match = true; break; } } else { bin_.mark(LL); // rest mark so we could jump back if we found a match if (outBufOrNull != null) { outBufOrNull.write(cpat, 0, m); outBufOrNull.write(c); pos_ += m; } m = 0; } } if (!includePat && match) { bin_.reset(); } else if (outBufOrNull != null) { outBufOrNull.write(cpat); pos_ += msup; } return match; } String checkJobGet(String prop) throws IOException { String val = job_.get(prop); if (val == null) { throw new IOException("JobConf: missing required property: " + prop); } return val; } String beginMark_; String endMark_; Pattern beginPat_; Pattern endPat_; boolean slowMatch_; int lookAhead_; // bytes to read to try to synch CDATA/non-CDATA. Should be more than max record size int maxRecSize_; BufferedInputStream bin_; // Wrap FSDataInputStream for efficient backward seeks long pos_; // Keep track on position with respect encapsulated FSDataInputStream final static int NA = -1; int firstMatchStart_ = 0; // candidate record boundary. Might just be CDATA. int firstMatchEnd_ = 0; boolean synched_; }
/* * Copyright (C) ${year} Omry Yadan <${email}> * All rights reserved. * * See https://github.com/omry/banana/blob/master/BSD-LICENSE for licensing information */ package net.yadan.banana.map; import gnu.trove.map.TLongLongMap; import gnu.trove.map.hash.TLongLongHashMap; import it.unimi.dsi.fastutil.longs.Long2LongOpenHashMap; import it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap; import net.yadan.banana.memory.Buffer; import net.yadan.banana.memory.IBlockAllocator; import net.yadan.banana.memory.IBuffer; import net.yadan.banana.memory.IMemAllocator; import net.yadan.banana.memory.block.BigBlockAllocator; import net.yadan.banana.memory.block.BlockAllocator; import net.yadan.banana.memory.initializers.NullInitializer; import net.yadan.banana.memory.malloc.ChainedAllocator; import net.yadan.banana.memory.malloc.MultiSizeAllocator; import net.yadan.banana.memory.malloc.TreeAllocator; import net.yadan.utils.Util; public class MapInsertRate { public static void main(String[] args) throws InterruptedException { float lf = 0.96f; int max = 1 * 1000 * 1000 * 1000; // 1b max = 50 * 1000 * 1000; System.out.print("Initializing " + Util.formatNum(max) + " keys sequence..."); int keys[] = new int[max]; for (int i = 0; i < max; i++) { keys[i] = i; } System.out.print("Shuffling..."); Util.shuffleArray(keys); System.out.println("Done"); // bananaRateString2Long(keys, 1f, false); // System.gc(); // Thread.sleep(5000); // fastUtilRateString2Long(keys, 1f); bananaRate(keys, lf); // System.gc(); // Thread.sleep(5000); // // fastUtilRate(keys, lf); // System.gc(); // Thread.sleep(5000); // javaMapRate(keys, lf); // // System.gc(); // Thread.sleep(5000); // // troveMapRate(keys, lf); } public static int javaMapRate(int keys[], float loadFactor) { int max = keys.length; long start = System.currentTimeMillis(); java.util.HashMap<Long, Long> map = new java.util.HashMap<Long, Long>(max, loadFactor); System.out.println("java.util.HashMap init : " + (System.currentTimeMillis() - start)); // SET int PRINT_BLOCK = max / 10; start = System.currentTimeMillis(); long last_print = -1; for (int i = 0; i < max; i++) { if (i % PRINT_BLOCK == 0) { if (last_print != -1) { long e = System.currentTimeMillis() - last_print; double rate = PRINT_BLOCK / (e / 1000f); System.out.println(String.format( "java.util.HashMap : Inserted %s items in %d ms, rate %s/sec ", Util.formatNum(i), e, Util.formatNum(rate))); } last_print = System.currentTimeMillis(); } map.put((long) keys[i], (long) i); } long elapsed = System.currentTimeMillis() - start; System.out.printf("java.util.HashMap : Insert time %d, Avg rate %s / sec\n", elapsed, Util.formatNum((long) (max / (elapsed / 1000f)))); // GET last_print = -1; start = System.currentTimeMillis(); for (int i = 0; i < max; i++) { if (i % PRINT_BLOCK == 0) { if (last_print != -1) { long e = System.currentTimeMillis() - last_print; double rate = PRINT_BLOCK / (e / 1000f); System.out.println(String.format( "java.util.HashMap : Got %s items in %d ms, rate %s/sec ", Util.formatNum(i), e, Util.formatNum(rate))); } last_print = System.currentTimeMillis(); } long n = map.get((long) keys[i]); if (i != n) { throw new RuntimeException("java.util.HashMap : Invalid value in map"); } } elapsed = System.currentTimeMillis() - start; System.out.printf("java.util.HashMap : Get time %d, Avg rate %s / sec\n", elapsed, Util.formatNum((long) (max / (elapsed / 1000f)))); System.gc(); System.out.println("java.util.HashMap : used memory " + Util.formatSize((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()))); return map.size(); } public static int fastUtilRate(int keys[], float loadFactor) { int max = keys.length; long start = System.currentTimeMillis(); Long2LongOpenHashMap map = new Long2LongOpenHashMap(max, loadFactor); System.out.println("FastUtil init : " + (System.currentTimeMillis() - start)); // SET int PRINT_BLOCK = max / 10; start = System.currentTimeMillis(); long last_print = -1; for (int i = 0; i < max; i++) { if (i % PRINT_BLOCK == 0) { if (last_print != -1) { long e = System.currentTimeMillis() - last_print; double rate = PRINT_BLOCK / (e / 1000f); System.out.println(String.format("FastUtil : Inserted %s items in %d ms, rate %s/sec ", Util.formatNum(i), e, Util.formatNum(rate))); } last_print = System.currentTimeMillis(); } map.put(keys[i], i); } long elapsed = System.currentTimeMillis() - start; System.out.printf("FastUtil : Insert time %d, Avg rate %s / sec\n", elapsed, Util.formatNum((long) (max / (elapsed / 1000f)))); // GET start = System.currentTimeMillis(); last_print = -1; for (int i = 0; i < max; i++) { if (i % PRINT_BLOCK == 0) { if (last_print != -1) { long e = System.currentTimeMillis() - last_print; double rate = PRINT_BLOCK / (e / 1000f); System.out.println(String.format("FastUtil : Got %s items in %d ms, rate %s/sec ", Util.formatNum(i), e, Util.formatNum(rate))); } last_print = System.currentTimeMillis(); } long n = map.get(keys[i]); if (i != n) { throw new RuntimeException("FastUtil : Invalid value in map"); } } elapsed = System.currentTimeMillis() - start; System.out.printf("FastUtil : Get time %d, Avg rate %s / sec\n", elapsed, Util.formatNum((long) (max / (elapsed / 1000f)))); System.gc(); System.out.println("FastUtil : used memory " + Util.formatSize((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()))); return map.size(); } public static int troveMapRate(int keys[], float loadFactor) { int max = keys.length; long start = System.currentTimeMillis(); TLongLongMap map = new TLongLongHashMap(max, loadFactor); System.out.println("TLongLongHashMap init : " + (System.currentTimeMillis() - start)); // SET int PRINT_BLOCK = max / 10; start = System.currentTimeMillis(); long last_print = -1; for (int i = 0; i < max; i++) { if (i % PRINT_BLOCK == 0) { if (last_print != -1) { long e = System.currentTimeMillis() - last_print; double rate = PRINT_BLOCK / (e / 1000f); System.out.println(String.format( "TLongLongHashMap : Inserted %s items in %d ms, rate %s/sec ", Util.formatNum(i), e, Util.formatNum(rate))); } last_print = System.currentTimeMillis(); } map.put(keys[i], i); } long elapsed = System.currentTimeMillis() - start; System.out.printf("TLongLongHashMap : Insert time %d, Avg rate %s / sec\n", elapsed, Util.formatNum((long) (max / (elapsed / 1000f)))); // GET last_print = -1; start = System.currentTimeMillis(); for (int i = 0; i < max; i++) { if (i % PRINT_BLOCK == 0) { if (last_print != -1) { long e = System.currentTimeMillis() - last_print; double rate = PRINT_BLOCK / (e / 1000f); System.out.println(String.format( "TLongLongHashMap : Got %s items in %d ms, rate %s/sec ", Util.formatNum(i), e, Util.formatNum(rate))); } last_print = System.currentTimeMillis(); } long n = map.get(keys[i]); if (i != n) { throw new RuntimeException("TLongLongHashMap : Invalid value in map"); } } elapsed = System.currentTimeMillis() - start; System.out.printf("TLongLongHashMap : Get time %d, Avg rate %s / sec\n", elapsed, Util.formatNum((long) (max / (elapsed / 1000f)))); System.gc(); System.out.println("TLongLongHashMap : used memory " + Util.formatSize((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()))); return map.size(); } public static int bananaRate(int keys[], float lf) { int max = keys.length; long start = System.currentTimeMillis(); int mapCap = (int) Math.ceil(max * (1 / lf)); IBlockAllocator blocks; if ((long) mapCap * (HashMap.RESERVED_SIZE + 2) > Integer.MAX_VALUE) { System.out.println("Using BigBlockAllocator"); blocks = new BigBlockAllocator(mapCap, HashMap.RESERVED_SIZE + 2, 0); } else { System.out.println("Using BlockAllocator"); blocks = new BlockAllocator(mapCap, HashMap.RESERVED_SIZE + 2, 0); } IMemAllocator memory = new ChainedAllocator(blocks); HashMap map = new HashMap(memory, mapCap, lf); System.out.println("Banana init : " + (System.currentTimeMillis() - start)); // SET int PRINT_BLOCK = max / 10; start = System.currentTimeMillis(); long last_print = -1; for (int i = 0; i < max; i++) { if (i % PRINT_BLOCK == 0) { if (last_print != -1) { long e = System.currentTimeMillis() - last_print; double rate = PRINT_BLOCK / (e / 1000f); System.out.println(String.format("Banana : Inserted %s items in %d ms, rate %s/sec ", Util.formatNum(i), e, Util.formatNum(rate))); } last_print = System.currentTimeMillis(); } int n = map.createRecord(keys[i], 2); map.setLong(n, 0, i); } long elapsed = System.currentTimeMillis() - start; System.out.printf("Banana : Insert time %d, Avg rate %s / sec\n", elapsed, Util.formatNum((long) (max / (elapsed / 1000f)))); // GET last_print = -1; start = System.currentTimeMillis(); for (int i = 0; i < max; i++) { if (i % PRINT_BLOCK == 0) { if (last_print != -1) { long e = System.currentTimeMillis() - last_print; double rate = PRINT_BLOCK / (e / 1000f); System.out.println(String.format("Banana : Got %s items in %d ms, rate %s/sec ", Util.formatNum(i), e, Util.formatNum(rate))); } last_print = System.currentTimeMillis(); } int n = map.findRecord(keys[i]); if (i != map.getLong(n, 0)) { throw new RuntimeException("Invalid value in map"); } } elapsed = System.currentTimeMillis() - start; System.out.printf("Banana : Get time %d, Avg rate %s / sec\n", elapsed, Util.formatNum((long) (max / (elapsed / 1000f)))); System.gc(); System.out.println("Banana : used memory " + Util.formatSize((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()))); System.out.println("Banana : reported memory usage " + Util.formatSize(map.computeMemoryUsage())); return map.size(); } public static int fastUtilRateString2Long(int keys[], float loadFactor) { int max = keys.length; long start = System.currentTimeMillis(); Object2LongOpenHashMap<String> map = new Object2LongOpenHashMap<String>(max, loadFactor); System.out.println("FastUtil Object2LongOpenHashMapinit : " + (System.currentTimeMillis() - start)); // SET int PRINT_BLOCK = max / 10; start = System.currentTimeMillis(); long last_print = -1; for (int i = 0; i < max; i++) { if (i % PRINT_BLOCK == 0) { if (last_print != -1) { long e = System.currentTimeMillis() - last_print; double rate = PRINT_BLOCK / (e / 1000f); System.out.println(String.format("FastUtil Object2LongOpenHashMap: Inserted %s items in %d ms, rate %s/sec ", Util.formatNum(i), e, Util.formatNum(rate))); } last_print = System.currentTimeMillis(); } map.put(String.valueOf(keys[i]), i); } long elapsed = System.currentTimeMillis() - start; System.out.printf("FastUtil Object2LongOpenHashMap: Insert time %d, Avg rate %s / sec\n", elapsed, Util.formatNum((long) (max / (elapsed / 1000f)))); // GET start = System.currentTimeMillis(); last_print = -1; for (int i = 0; i < max; i++) { if (i % PRINT_BLOCK == 0) { if (last_print != -1) { long e = System.currentTimeMillis() - last_print; double rate = PRINT_BLOCK / (e / 1000f); System.out.println(String.format("FastUtil Object2LongOpenHashMap: Got %s items in %d ms, rate %s/sec ", Util.formatNum(i), e, Util.formatNum(rate))); } last_print = System.currentTimeMillis(); } long n = map.get(String.valueOf(keys[i])); if (i != n) { throw new RuntimeException("FastUtil Object2LongOpenHashMap: Invalid value in map"); } } elapsed = System.currentTimeMillis() - start; System.out.printf("FastUtil Object2LongOpenHashMap: Get time %d, Avg rate %s / sec\n", elapsed, Util.formatNum((long) (max / (elapsed / 1000f)))); System.gc(); System.out.println("FastUtil Object2LongOpenHashMap: used memory " + Util.formatSize((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()))); return map.size(); } public static int bananaRateString2Long(int keys[], float lf, boolean warmup) { if (warmup) { System.out.println("Banana VarKeyHashMap warmup round"); } int max = keys.length; long start = System.currentTimeMillis(); NullInitializer nullInitializer = new NullInitializer(); IMemAllocator values = new TreeAllocator(100, VarKeyHashMap.RESERVED_SIZE + 2, 1.2); IMemAllocator keysMem = new MultiSizeAllocator(100, new int[] { 2, 3, 5, 6 }, 1.2); values.setInitializer(nullInitializer); keysMem.setInitializer(nullInitializer); IVarKeyHashMap map = new VarKeyHashMap(values, keysMem, max, 1.0); if (!warmup) System.out.println("Banana VarKeyHashMap init : " + (System.currentTimeMillis() - start)); // SET int PRINT_BLOCK = max / 10; start = System.currentTimeMillis(); long last_print = -1; IBuffer key = new Buffer(10); for (int i = 0; i < max; i++) { if (i % PRINT_BLOCK == 0) { if (last_print != -1) { long e = System.currentTimeMillis() - last_print; double rate = PRINT_BLOCK / (e / 1000f); if (!warmup) System.out.println(String.format("Banana VarKeyHashMap: Inserted %s items in %d ms, rate %s/sec ", Util.formatNum(i), e, Util.formatNum(rate))); } last_print = System.currentTimeMillis(); } key.appendChars(String.valueOf(keys[i]).toCharArray()); int n = map.createRecord(key, 2); map.setLong(n, 0, i); key.reset(); } long elapsed = System.currentTimeMillis() - start; if (!warmup) System.out.printf("Banana VarKeyHashMap: Insert time %d, Avg rate %s / sec\n", elapsed, Util.formatNum((long) (max / (elapsed / 1000f)))); // GET last_print = -1; start = System.currentTimeMillis(); for (int i = 0; i < max; i++) { if (i % PRINT_BLOCK == 0) { if (last_print != -1) { long e = System.currentTimeMillis() - last_print; double rate = PRINT_BLOCK / (e / 1000f); if (!warmup) System.out.println(String.format("Banana VarKeyHashMap: Got %s items in %d ms, rate %s/sec ", Util.formatNum(i), e, Util.formatNum(rate))); } last_print = System.currentTimeMillis(); } key.appendChars(String.valueOf(keys[i]).toCharArray()); int n = map.findRecord(key ); if (i != map.getLong(n, 0)) { throw new RuntimeException("Invalid value in map"); } key.reset(); } elapsed = System.currentTimeMillis() - start; if (!warmup) System.out.printf("Banana VarKeyHashMap: Get time %d, Avg rate %s / sec\n", elapsed, Util.formatNum((long) (max / (elapsed / 1000f)))); System.gc(); if (!warmup) { System.out.println("Banana VarKeyHashMap: used memory " + Util.formatSize((Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()))); System.out.println("Banana VarKeyHashMap: reported memory usage " + Util.formatSize(map.computeMemoryUsage())); } else { System.out.println("Banana VarKeyHashMap warm-up round done"); } return map.size(); } }
package com.home911.httpchat.client.gui; import com.google.gwt.user.client.rpc.AsyncCallback; import com.google.gwt.user.client.ui.Composite; import com.home911.httpchat.client.model.ProfileResult; import com.home911.httpchat.client.model.StatusResult; import com.home911.httpchat.shared.model.Contact; import com.smartgwt.client.data.DataSource; import com.smartgwt.client.data.events.ErrorEvent; import com.smartgwt.client.data.events.HandleErrorHandler; import com.smartgwt.client.data.fields.DataSourceTextField; import com.smartgwt.client.util.BooleanCallback; import com.smartgwt.client.util.SC; import com.smartgwt.client.widgets.Window; import com.smartgwt.client.widgets.grid.ListGrid; import com.smartgwt.client.widgets.grid.ListGridField; import com.smartgwt.client.widgets.grid.ListGridRecord; import com.smartgwt.client.widgets.grid.events.RecordDoubleClickEvent; import com.smartgwt.client.widgets.grid.events.RecordDoubleClickHandler; import com.smartgwt.client.widgets.grid.events.RowContextClickEvent; import com.smartgwt.client.widgets.grid.events.RowContextClickHandler; import com.smartgwt.client.widgets.menu.Menu; import com.smartgwt.client.widgets.menu.MenuItem; import com.smartgwt.client.widgets.menu.events.ClickHandler; import com.smartgwt.client.widgets.menu.events.MenuItemClickEvent; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; public class ContactListView extends Composite { private static final Logger LOGGER = Logger.getLogger(ContactListView.class.getName()); private final Window contactWnd; private final ListGrid contactsGrid; private final ContactDataSource contactDs; private final MainView mainView; private final String token; private static class ContactDataSource extends DataSource { private ContactDataSource(String id) { setID(id); DataSourceTextField pkDsField = new DataSourceTextField("id"); pkDsField.setHidden(true); pkDsField.setPrimaryKey(true); DataSourceTextField nameDsfield = new DataSourceTextField("name", "Name"); nameDsfield.setRequired(true); DataSourceTextField presenceDsField = new DataSourceTextField("presence", "Presence"); presenceDsField.setRequired(true); setFields(pkDsField, nameDsfield, presenceDsField); setClientOnly(true); addHandleErrorHandler(new HandleErrorHandler() { @Override public void onHandleError(ErrorEvent errorEvent) { LOGGER.log(Level.SEVERE, "An error has occured:" + errorEvent.getAssociatedType()); } }); } } public ContactListView(final MainView mainView, final String token) { this.mainView = mainView; this.token = token; contactWnd = new Window(); contactWnd.setTitle("HttpChat Contacts"); contactWnd.setAutoSize(true); contactWnd.setLeft(165); contactWnd.setAnimateMinimize(true); contactWnd.setCanDragResize(false); contactWnd.setCanDragReposition(true); contactWnd.setShowCloseButton(false); initWidget(contactWnd); contactDs = new ContactDataSource("contactsDS"); contactsGrid = new ListGrid(); contactsGrid.setWidth(310); contactsGrid.setHeight(250); contactsGrid.setAlternateRecordStyles(true); contactsGrid.setShowAllRecords(true); contactsGrid.setDataSource(contactDs); contactsGrid.setAutoFetchData(true); contactsGrid.addRecordDoubleClickHandler(new RecordDoubleClickHandler() { @Override public void onRecordDoubleClick(RecordDoubleClickEvent event) { LOGGER.log(Level.INFO, "Doubled clicked will open conversation for contact[" + event.getRecord().getAttributeAsLong("id") + "]"); mainView.showConversation(token, event.getRecord().getAttributeAsLong("id"), event.getRecord().getAttribute("name")); } }); contactsGrid.addRowContextClickHandler(new RowContextClickHandler() { public void onRowContextClick(RowContextClickEvent event) { Menu contactPopup = createContactPopup(event.getRecord().getAttributeAsLong("id"), event.getRecord().getAttribute("name")); // Show the popup contactPopup.showContextMenu(); event.cancel(); } }); ListGridField idField = new ListGridField("id", "Id", 0); ListGridField nameField = new ListGridField("name", "Name", 200); ListGridField presenceField = new ListGridField("presence", "Presence", 100); contactsGrid.setFields(idField, nameField, presenceField); contactsGrid.setCanResizeFields(true); contactsGrid.hideFields(idField); contactWnd.addItem(contactsGrid); } public void hide() { contactWnd.destroy(); } public void populateContactList(List<Contact> contacts) { LOGGER.log(Level.INFO, "Introducing new DS pattern... !!!"); if (contacts != null) { int idx = 0; ListGridRecord[] records = new ListGridRecord[contacts.size()]; for (Contact contact : contacts) { ListGridRecord record = new ListGridRecord(); record.setAttribute("id", String.valueOf(contact.getId())); record.setAttribute("name", contact.getName()); record.setAttribute("presence", contact.getPresence().name()); LOGGER.log(Level.INFO, "Adding record:" + record.toString()); records[idx++] = record; } contactDs.setTestData(records); } } public void addContactToList(Contact contact) { if (contact != null) { ListGridRecord contactRec = new ListGridRecord(); contactRec.setAttribute("id", String.valueOf(contact.getId())); contactRec.setAttribute("name", contact.getName()); contactRec.setAttribute("presence", contact.getPresence().name()); contactDs.addData(contactRec); } } public void removeContactFromList(Long id) { ListGridRecord contactRec = new ListGridRecord(); contactRec.setAttribute("id", String.valueOf(id)); contactDs.removeData(contactRec); } public void updateContactInList(Contact contact) { if (contact != null) { if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "Updating contact:" + contact.toString()); } ListGridRecord contactRec = new ListGridRecord(); contactRec.setAttribute("id", String.valueOf(contact.getId())); contactRec.setAttribute("name", contact.getName()); contactRec.setAttribute("presence", contact.getPresence().name()); contactDs.updateData(contactRec); if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "Contact updated.."); } contactWnd.flash(); } } private Menu createContactPopup(final Long id, final String name) { if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "Creating contact popup menu for id[" + id + "]"); } final Menu menu = new Menu(); menu.setShowShadow(true); menu.setShadowDepth(10); MenuItem profileItem = new MenuItem("Profile"); MenuItem removeItem = new MenuItem("Remove"); MenuItem messageItem = new MenuItem("Message"); menu.setItems(profileItem, removeItem, messageItem); profileItem.addClickHandler(new ClickHandler() { @Override public void onClick(MenuItemClickEvent menuItemClickEvent) { if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "Profile clicked..."); } mainView.getBackendService().getProfile(token, id, new AsyncCallback<ProfileResult>() { @Override public void onFailure(Throwable throwable) { LOGGER.log(Level.SEVERE, "An unexpected error has occured.", throwable); mainView.getMenuView().writeStatus("Unable to get contact profile."); } @Override public void onSuccess(ProfileResult profileResult) { if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "Received result:" + profileResult.toString()); } if (profileResult.getStatus().getCode() == 200) { ProfileView profileView = new ProfileView(mainView, profileResult.getProfile()); profileView.display(); } else { mainView.getMenuView().writeStatus(profileResult.getStatus().getDescription()); } } }); } }); removeItem.addClickHandler(new ClickHandler() { @Override public void onClick(MenuItemClickEvent menuItemClickEvent) { SC.confirm("Do you really want to delete this contact?", new BooleanCallback() { public void execute(Boolean value) { if (value != null && value) { mainView.getBackendService().removeContact(token, id, new AsyncCallback<StatusResult>() { @Override public void onFailure(Throwable throwable) { LOGGER.log(Level.SEVERE, "An unexpected error has occured.", throwable); mainView.getMenuView().writeStatus("Unable to remove contact."); } @Override public void onSuccess(StatusResult result) { if (LOGGER.isLoggable(Level.INFO)) { LOGGER.log(Level.INFO, "Received result:" + result.toString()); } mainView.getMenuView().writeStatus(result.getStatus().getDescription()); if (result.getStatus().getCode() == 200) { removeContactFromList(id); } } }); } } }); } }); messageItem.addClickHandler(new ClickHandler() { @Override public void onClick(MenuItemClickEvent menuItemClickEvent) { LOGGER.log(Level.INFO, "Conversation clicked for contact[" + id + "]"); mainView.showConversation(token, id, name); } }); return menu; } }
/* * Copyright Terracotta, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ehcache.core; import org.ehcache.Cache; import org.ehcache.PersistentCacheManager; import org.ehcache.Status; import org.ehcache.config.Builder; import org.ehcache.config.CacheConfiguration; import org.ehcache.config.Configuration; import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourceType; import org.ehcache.core.config.BaseCacheConfiguration; import org.ehcache.core.config.DefaultConfiguration; import org.ehcache.core.config.store.StoreEventSourceConfiguration; import org.ehcache.core.internal.store.StoreConfigurationImpl; import org.ehcache.core.events.CacheEventDispatcher; import org.ehcache.core.events.CacheEventDispatcherFactory; import org.ehcache.core.events.CacheManagerListener; import org.ehcache.core.spi.LifeCycledAdapter; import org.ehcache.core.internal.service.ServiceLocator; import org.ehcache.core.spi.store.InternalCacheManager; import org.ehcache.core.spi.store.Store; import org.ehcache.core.internal.store.StoreSupport; import org.ehcache.core.spi.service.CacheManagerProviderService; import org.ehcache.core.internal.util.ClassLoading; import org.ehcache.event.CacheEventListener; import org.ehcache.core.events.CacheEventListenerConfiguration; import org.ehcache.core.events.CacheEventListenerProvider; import org.ehcache.CachePersistenceException; import org.ehcache.core.spi.LifeCycled; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.spi.loaderwriter.CacheLoaderWriter; import org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider; import org.ehcache.spi.loaderwriter.WriteBehindConfiguration; import org.ehcache.spi.loaderwriter.WriteBehindProvider; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.UnsupportedTypeException; import org.ehcache.spi.service.MaintainableService; import org.ehcache.spi.persistence.PersistableResourceService; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.spi.service.ServiceCreationConfiguration; import org.ehcache.spi.service.ServiceDependencies; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Deque; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArrayList; /** * Implementation class for the {@link org.ehcache.CacheManager} and {@link PersistentCacheManager} * <P> * {@code Ehcache} users should not have to depend on this type but rely exclusively on the api types in package * {@code org.ehcache}. * </P> */ public class EhcacheManager implements PersistentCacheManager, InternalCacheManager { @ServiceDependencies({ Store.Provider.class, CacheLoaderWriterProvider.class, WriteBehindProvider.class, CacheEventDispatcherFactory.class, CacheEventListenerProvider.class }) private static class ServiceDeps { private ServiceDeps() { throw new UnsupportedOperationException("This is an annotation placeholder, not to be instantiated"); } } private static final Logger LOGGER = LoggerFactory.getLogger(EhcacheManager.class); private final DefaultConfiguration configuration; private final ClassLoader cacheManagerClassLoader; private final boolean useLoaderInAtomics; private final ConcurrentMap<String, CacheHolder> caches = new ConcurrentHashMap<String, CacheHolder>(); private final CopyOnWriteArrayList<CacheManagerListener> listeners = new CopyOnWriteArrayList<CacheManagerListener>(); private final StatusTransitioner statusTransitioner = new StatusTransitioner(LOGGER); private final String simpleName; protected final ServiceLocator serviceLocator; public EhcacheManager(Configuration config) { this(config, Collections.<Service>emptyList(), true); } public EhcacheManager(Configuration config, Collection<Service> services) { this(config, services, true); } public EhcacheManager(Configuration config, Collection<Service> services, boolean useLoaderInAtomics) { final String simpleName = this.getClass().getSimpleName(); this.simpleName = (simpleName.isEmpty() ? this.getClass().getName() : simpleName); this.configuration = new DefaultConfiguration(config); this.cacheManagerClassLoader = config.getClassLoader() != null ? config.getClassLoader() : ClassLoading.getDefaultClassLoader(); this.serviceLocator = new ServiceLocator(services.toArray(new Service[services.size()])); this.useLoaderInAtomics = useLoaderInAtomics; validateServicesConfigs(); resolveServices(); } private void validateServicesConfigs() { HashSet<Class> classes = new HashSet<Class>(); for (ServiceCreationConfiguration<?> service : configuration.getServiceCreationConfigurations()) { if (!classes.add(service.getServiceType())) { throw new IllegalStateException("Duplicate creation configuration for service " + service.getServiceType()); } } } private void resolveServices() { if (serviceLocator.getService(CacheManagerProviderService.class) == null) { this.serviceLocator.addService(new DefaultCacheManagerProviderService(this)); } for (ServiceCreationConfiguration<? extends Service> serviceConfig : configuration.getServiceCreationConfigurations()) { Service service = serviceLocator.getOrCreateServiceFor(serviceConfig); if (service == null) { throw new IllegalArgumentException("Couldn't resolve Service " + serviceConfig.getServiceType().getName()); } } serviceLocator.loadDependenciesOf(ServiceDeps.class); } @Override public <K, V> Cache<K, V> getCache(String alias, Class<K> keyType, Class<V> valueType) { statusTransitioner.checkAvailable(); final CacheHolder cacheHolder = caches.get(alias); if(cacheHolder == null) { return null; } else { try { return cacheHolder.retrieve(keyType, valueType); } catch (IllegalArgumentException e) { throw new IllegalArgumentException("Cache '" + alias + "' type is <" + cacheHolder.keyType.getName() + ", " + cacheHolder.valueType.getName() + ">, but you retrieved it with <" + keyType.getName() + ", " + valueType.getName() +">"); } } } @Override public void removeCache(final String alias) { if (alias == null) { throw new NullPointerException("Alias cannot be null"); } removeCache(alias, true); } /** * Closes and removes a cache, by alias, from this cache manager. * * @param alias the alias of the cache to remove * @param removeFromConfig if {@code true}, the cache configuration is altered to remove the cache */ private void removeCache(final String alias, final boolean removeFromConfig) { statusTransitioner.checkAvailable(); final CacheHolder cacheHolder = caches.remove(alias); if(cacheHolder != null) { final InternalCache<?, ?> ehcache = cacheHolder.retrieve(cacheHolder.keyType, cacheHolder.valueType); if (ehcache != null) { if (!statusTransitioner.isTransitioning()) { for (CacheManagerListener listener : listeners) { listener.cacheRemoved(alias, ehcache); } } ehcache.close(); closeEhcache(alias, ehcache); if (removeFromConfig) { configuration.removeCacheConfiguration(alias); } } LOGGER.info("Cache '{}' removed from {}.", alias, simpleName); } } /** * Perform cache closure actions specific to a cache manager implementation. * This method is called <i>after</i> the {@code InternalCache} instance is closed. * * @param alias the cache alias * @param ehcache the {@code InternalCache} instance for the cache to close */ protected void closeEhcache(final String alias, final InternalCache<?, ?> ehcache) { for (ResourceType<?> resourceType : ehcache.getRuntimeConfiguration().getResourcePools().getResourceTypeSet()) { if (resourceType.isPersistable()) { ResourcePool resourcePool = ehcache.getRuntimeConfiguration() .getResourcePools() .getPoolForResource(resourceType); if (!resourcePool.isPersistent()) { PersistableResourceService persistableResourceService = getPersistableResourceService(resourceType); try { persistableResourceService.destroy(alias); } catch (CachePersistenceException e) { LOGGER.warn("Unable to clear persistence space for cache {}", alias, e); } } } } } @Override public <K, V> Cache<K, V> createCache(String alias, Builder<? extends CacheConfiguration<K, V>> configBuilder) { return createCache(alias, configBuilder.build()); } @Override public <K, V> Cache<K, V> createCache(final String alias, CacheConfiguration<K, V> config) throws IllegalArgumentException { return createCache(alias, config, true); } private <K, V> Cache<K, V> createCache(final String alias, CacheConfiguration<K, V> originalConfig, boolean addToConfig) throws IllegalArgumentException { statusTransitioner.checkAvailable(); LOGGER.debug("Creating Cache '{}' in {}.", alias, simpleName); CacheConfiguration<K, V> config = adjustConfigurationWithCacheManagerDefaults(originalConfig); Class<K> keyType = config.getKeyType(); Class<V> valueType = config.getValueType(); final CacheHolder value = new CacheHolder(keyType, valueType, null); if (caches.putIfAbsent(alias, value) != null) { throw new IllegalArgumentException("Cache '" + alias +"' already exists"); } InternalCache<K, V> cache = null; boolean success = false; RuntimeException failure = null; try { cache = createNewEhcache(alias, config, keyType, valueType); cache.init(); if (addToConfig) { configuration.addCacheConfiguration(alias, cache.getRuntimeConfiguration()); } else { configuration.replaceCacheConfiguration(alias, originalConfig, cache.getRuntimeConfiguration()); } success = true; } catch (RuntimeException e) { failure = e; } finally { if (!success) { caches.remove(alias); value.setCache(null); } } if(failure == null) { try { if(!statusTransitioner.isTransitioning()) { for (CacheManagerListener listener : listeners) { listener.cacheAdded(alias, cache); } } } finally { value.setCache(cache); } } else { throw new IllegalStateException("Cache '"+alias+"' creation in " + simpleName + " failed.", failure); } LOGGER.info("Cache '{}' created in {}.", alias, simpleName); return cache; } <K, V> InternalCache<K, V> createNewEhcache(final String alias, final CacheConfiguration<K, V> config, final Class<K> keyType, final Class<V> valueType) { Collection<ServiceConfiguration<?>> adjustedServiceConfigs = new ArrayList<ServiceConfiguration<?>>(config.getServiceConfigurations()); List<ServiceConfiguration> unknownServiceConfigs = new ArrayList<ServiceConfiguration>(); for (ServiceConfiguration serviceConfig : adjustedServiceConfigs) { if (!serviceLocator.knowsServiceFor(serviceConfig)) { unknownServiceConfigs.add(serviceConfig); } } if (!unknownServiceConfigs.isEmpty()) { throw new IllegalStateException("Cannot find service(s) that can handle following configuration(s) : " + unknownServiceConfigs); } List<LifeCycled> lifeCycledList = new ArrayList<LifeCycled>(); final Store<K, V> store = getStore(alias, config, keyType, valueType, adjustedServiceConfigs, lifeCycledList); final CacheLoaderWriterProvider cacheLoaderWriterProvider = serviceLocator.getService(CacheLoaderWriterProvider.class); final CacheLoaderWriter<? super K, V> decorator ; if(cacheLoaderWriterProvider != null) { final CacheLoaderWriter<? super K, V> loaderWriter; loaderWriter = cacheLoaderWriterProvider.createCacheLoaderWriter(alias, config); WriteBehindConfiguration writeBehindConfiguration = ServiceLocator.findSingletonAmongst(WriteBehindConfiguration.class, config.getServiceConfigurations().toArray()); if(writeBehindConfiguration == null) { decorator = loaderWriter; } else { final WriteBehindProvider factory = serviceLocator.getService(WriteBehindProvider.class); decorator = factory.createWriteBehindLoaderWriter(loaderWriter, writeBehindConfiguration); if(decorator != null) { lifeCycledList.add(new LifeCycledAdapter() { @Override public void close() { factory.releaseWriteBehindLoaderWriter(decorator); } }); } } if (loaderWriter != null) { lifeCycledList.add(new LifeCycledAdapter() { @Override public void close() throws Exception { cacheLoaderWriterProvider.releaseCacheLoaderWriter(loaderWriter); } }); } } else { decorator = null; } final CacheEventDispatcherFactory cenlProvider = serviceLocator.getService(CacheEventDispatcherFactory.class); final CacheEventDispatcher<K, V> evtService = cenlProvider.createCacheEventDispatcher(store, adjustedServiceConfigs.toArray(new ServiceConfiguration[adjustedServiceConfigs.size()])); lifeCycledList.add(new LifeCycledAdapter() { @Override public void close() { cenlProvider.releaseCacheEventDispatcher(evtService); } }); evtService.setStoreEventSource(store.getStoreEventSource()); final InternalCache<K, V> cache; if (decorator == null) { cache = new Ehcache<K, V>(config, store, evtService, LoggerFactory.getLogger(Ehcache.class + "-" + alias)); } else { cache = new EhcacheWithLoaderWriter<K, V>(config, store, decorator, evtService, useLoaderInAtomics, LoggerFactory.getLogger(EhcacheWithLoaderWriter.class + "-" + alias)); } final CacheEventListenerProvider evntLsnrFactory = serviceLocator.getService(CacheEventListenerProvider.class); if (evntLsnrFactory != null) { Collection<CacheEventListenerConfiguration> evtLsnrConfigs = ServiceLocator.findAmongst(CacheEventListenerConfiguration.class, config.getServiceConfigurations()); for (CacheEventListenerConfiguration lsnrConfig: evtLsnrConfigs) { final CacheEventListener<K, V> lsnr = evntLsnrFactory.createEventListener(alias, lsnrConfig); if (lsnr != null) { cache.getRuntimeConfiguration().registerCacheEventListener(lsnr, lsnrConfig.orderingMode(), lsnrConfig.firingMode(), lsnrConfig.fireOn()); lifeCycledList.add(new LifeCycled() { @Override public void init() throws Exception { // no-op for now } @Override public void close() throws Exception { evntLsnrFactory.releaseEventListener(lsnr); } }); } } evtService.setListenerSource(cache); } for (LifeCycled lifeCycled : lifeCycledList) { cache.addHook(lifeCycled); } return cache; } /** * Instantiates a {@code Store} used for the cache data. * * @param alias the alias assigned to the cache * @param config the configuration used for the cache * @param keyType the cache key type * @param valueType the cache value type * @param serviceConfigs the {@code List} of {@code ServiceConfiguration} instances available to the cache; * this list may be augmented by the implementation of this method * @param lifeCycledList the {@code List} of {@code LifeCycled} instances used to manage components of the * cache; this list may be augmented by the implementation of this method * @param <K> the cache key type * @param <V> the cache value type * * @return the {@code Store} instance used to create the cache */ protected <K, V> Store<K,V> getStore(final String alias, final CacheConfiguration<K, V> config, final Class<K> keyType, final Class<V> valueType, final Collection<ServiceConfiguration<?>> serviceConfigs, final List<LifeCycled> lifeCycledList) { final Set<ResourceType<?>> resourceTypes = config.getResourcePools().getResourceTypeSet(); for (ResourceType<?> resourceType : resourceTypes) { if (resourceType.isPersistable()) { final PersistableResourceService persistableResourceService = getPersistableResourceService(resourceType); try { final PersistableResourceService.PersistenceSpaceIdentifier<?> spaceIdentifier = persistableResourceService .getPersistenceSpaceIdentifier(alias, config); serviceConfigs.add(spaceIdentifier); lifeCycledList.add(new LifeCycledAdapter() { @Override public void close() throws Exception { persistableResourceService.releasePersistenceSpaceIdentifier(spaceIdentifier); } }); } catch (CachePersistenceException e) { throw new RuntimeException("Unable to handle persistence", e); } } } final Store.Provider storeProvider = StoreSupport.selectStoreProvider(serviceLocator, resourceTypes, serviceConfigs); Serializer<K> keySerializer = null; Serializer<V> valueSerializer = null; final SerializationProvider serialization = serviceLocator.getService(SerializationProvider.class); ServiceConfiguration<?>[] serviceConfigArray = serviceConfigs.toArray(new ServiceConfiguration[serviceConfigs.size()]); if (serialization != null) { try { final Serializer<K> keySer = serialization.createKeySerializer(keyType, config.getClassLoader(), serviceConfigArray); lifeCycledList.add(new LifeCycledAdapter() { @Override public void close() throws Exception { serialization.releaseSerializer(keySer); } }); keySerializer = keySer; } catch (UnsupportedTypeException e) { for (ResourceType<?> resource : resourceTypes) { if (resource.requiresSerialization()) { throw new RuntimeException(e); } } LOGGER.debug("Could not create serializers for {}", alias, e); } try { final Serializer<V> valueSer = serialization.createValueSerializer(valueType, config.getClassLoader(), serviceConfigArray); lifeCycledList.add(new LifeCycledAdapter() { @Override public void close() throws Exception { serialization.releaseSerializer(valueSer); } }); valueSerializer = valueSer; } catch (UnsupportedTypeException e) { for (ResourceType<?> resource : resourceTypes) { if (resource.requiresSerialization()) { throw new RuntimeException(e); } } LOGGER.debug("Could not create serializers for {}", alias, e); } } int dispatcherConcurrency; StoreEventSourceConfiguration eventSourceConfiguration = ServiceLocator.findSingletonAmongst(StoreEventSourceConfiguration.class, config .getServiceConfigurations() .toArray()); if (eventSourceConfiguration != null) { dispatcherConcurrency = eventSourceConfiguration.getDispatcherConcurrency(); } else { dispatcherConcurrency = StoreEventSourceConfiguration.DEFAULT_DISPATCHER_CONCURRENCY; } Store.Configuration<K, V> storeConfiguration = new StoreConfigurationImpl<K, V>(config, dispatcherConcurrency, keySerializer, valueSerializer); final Store<K, V> store = storeProvider.createStore(storeConfiguration, serviceConfigArray); lifeCycledList.add(new LifeCycled() { @Override public void init() throws Exception { storeProvider.initStore(store); } @Override public void close() { storeProvider.releaseStore(store); } }); return store; } private PersistableResourceService getPersistableResourceService(ResourceType<?> resourceType) { Collection<PersistableResourceService> services = serviceLocator.getServicesOfType(PersistableResourceService.class); for (PersistableResourceService service : services) { if (service.handlesResourceType(resourceType)) { return service; } } throw new IllegalStateException("No service found for persistable resource: " + resourceType); } /** * adjusts the config to reflect new classloader & serialization provider */ private <K, V> CacheConfiguration<K, V> adjustConfigurationWithCacheManagerDefaults(CacheConfiguration<K, V> config) { ClassLoader cacheClassLoader = config.getClassLoader(); if (cacheClassLoader == null) { cacheClassLoader = cacheManagerClassLoader; } if (cacheClassLoader != config.getClassLoader() ) { config = new BaseCacheConfiguration<K, V>(config.getKeyType(), config.getValueType(), config.getEvictionAdvisor(), cacheClassLoader, config.getExpiry(), config.getResourcePools(), config.getServiceConfigurations().toArray( new ServiceConfiguration<?>[config.getServiceConfigurations().size()])); } return config; } @Override public void registerListener(CacheManagerListener listener) { if(!listeners.contains(listener)) { listeners.add(listener); statusTransitioner.registerListener(listener); } } @Override public void deregisterListener(CacheManagerListener listener) { if(listeners.remove(listener)) { statusTransitioner.deregisterListener(listener); } } /** * {@inheritDoc} */ @Override public void init() { final StatusTransitioner.Transition st = statusTransitioner.init(); try { serviceLocator.startAllServices(); Deque<String> initiatedCaches = new ArrayDeque<String>(); try { for (Map.Entry<String, CacheConfiguration<?, ?>> cacheConfigurationEntry : configuration.getCacheConfigurations() .entrySet()) { final String alias = cacheConfigurationEntry.getKey(); createCache(alias, cacheConfigurationEntry.getValue(), false); initiatedCaches.push(alias); } } catch (RuntimeException e) { while (!initiatedCaches.isEmpty()) { String toBeClosed = initiatedCaches.pop(); try { removeCache(toBeClosed, false); } catch (Exception exceptionClosingCache) { LOGGER.error("Cache '{}' could not be removed after initialization failure due to ", toBeClosed, exceptionClosingCache); } } try { serviceLocator.stopAllServices(); } catch (Exception exceptionStoppingServices) { LOGGER.error("Stopping services after initialization failure failed due to ", exceptionStoppingServices); } throw e; } st.succeeded(); } catch (Exception e) { throw st.failed(e); } finally { st.failed(null); } } @Override public Status getStatus() { return statusTransitioner.currentStatus(); } @Override public void close() { final StatusTransitioner.Transition st = statusTransitioner.close(); Exception firstException = null; try { for (String alias : caches.keySet()) { try { removeCache(alias, false); } catch (Exception e) { if(firstException == null) { firstException = e; } else { LOGGER.error("Cache '{}' could not be removed due to ", alias, e); } } } serviceLocator.stopAllServices(); if (firstException == null) { st.succeeded(); } } catch (Exception e) { if(firstException == null) { firstException = e; } } finally { if(firstException != null) { throw st.failed(firstException); } st.failed(null); } } @Override public Configuration getRuntimeConfiguration() { return configuration; } /** * Removes and closes a cache without performing {@link CacheManagerListener#cacheRemoved(String, Cache)} * notifications. * * @param alias the alias of the cache to remove */ protected void removeAndCloseWithoutNotice(final String alias) { final CacheHolder cacheHolder = caches.remove(alias); if(cacheHolder != null) { final InternalCache<?, ?> ehcache = cacheHolder.retrieve(cacheHolder.keyType, cacheHolder.valueType); if(ehcache.getStatus() == Status.AVAILABLE) { ehcache.close(); } } configuration.removeCacheConfiguration(alias); } @Override public void destroyCache(final String alias) throws CachePersistenceException { if (alias == null) { throw new NullPointerException("Alias cannot be null"); } LOGGER.debug("Destroying Cache '{}' in {}.", alias, simpleName); removeAndCloseWithoutNotice(alias); destroyPersistenceSpace(alias); LOGGER.info("Cache '{}' successfully destroyed in {}.", alias, simpleName); } private void destroyPersistenceSpace(String alias) throws CachePersistenceException { Collection<PersistableResourceService> services = serviceLocator.getServicesOfType(PersistableResourceService.class); for (PersistableResourceService service : services) { service.destroy(alias); } } @Override public void destroy() throws CachePersistenceException { StatusTransitioner.Transition st = statusTransitioner.maintenance(); try { startMaintainableServices(); st.succeeded(); } catch (Throwable t) { throw st.failed(t); } destroyInternal(); st = statusTransitioner.exitMaintenance(); try { stopMaintainableServices(); st.succeeded(); } catch (Throwable t) { throw st.failed(t); } LOGGER.info("All persistent data destroyed for {}", simpleName); } private void startMaintainableServices() { ServiceProvider<MaintainableService> provider = getMaintainableServiceProvider(); Collection<MaintainableService> services = serviceLocator.getServicesOfType(MaintainableService.class); for (MaintainableService service : services) { service.startForMaintenance(provider); } } private ServiceProvider<MaintainableService> getMaintainableServiceProvider() { return new ServiceProvider<MaintainableService>() { @Override public <U extends MaintainableService> U getService(Class<U> serviceType) { return serviceLocator.getService(serviceType); } @Override public <U extends MaintainableService> Collection<U> getServicesOfType(final Class<U> serviceType) { return serviceLocator.getServicesOfType(serviceType); } }; } private void stopMaintainableServices() { Collection<MaintainableService> services = serviceLocator.getServicesOfType(MaintainableService.class); for (MaintainableService service : services) { service.stop(); } } // for tests at the moment ClassLoader getClassLoader() { return cacheManagerClassLoader; } void destroyInternal() throws CachePersistenceException { statusTransitioner.checkMaintenance(); Collection<PersistableResourceService> services = serviceLocator.getServicesOfType(PersistableResourceService.class); for (PersistableResourceService service : services) { service.destroyAll(); } } private static final class CacheHolder { private final Class<?> keyType; private final Class<?> valueType; private volatile InternalCache<?, ?> cache; private volatile boolean isValueSet = false; CacheHolder(Class<?> keyType, Class<?> valueType, InternalCache<?, ?> cache) { this.keyType = keyType; this.valueType = valueType; this.cache = cache; } <K, V> InternalCache<K, V> retrieve(Class<K> refKeyType, Class<V> refValueType) { if (!isValueSet) { synchronized (this) { boolean interrupted = false; try { while(!isValueSet) { try { wait(); } catch (InterruptedException e) { interrupted = true; } } } finally { if(interrupted) { Thread.currentThread().interrupt(); } } } } if (keyType == refKeyType && valueType == refValueType) { return cast(cache); } else { throw new IllegalArgumentException(); } } @SuppressWarnings("unchecked") private static <K, V> InternalCache<K, V> cast(InternalCache<?, ?> cache) { return (InternalCache<K, V>)cache; } public synchronized void setCache(final InternalCache<?, ?> cache) { this.cache = cache; this.isValueSet = true; notifyAll(); } } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.server.integrationtests.jbpm; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import org.kie.api.KieServices; import org.kie.api.task.model.Status; import org.kie.internal.KieInternalServices; import org.kie.internal.process.CorrelationKey; import org.kie.internal.process.CorrelationKeyFactory; import org.kie.internal.task.api.model.TaskEvent; import org.kie.server.api.model.ReleaseId; import org.kie.server.api.model.definition.ProcessDefinition; import org.kie.server.api.model.instance.NodeInstance; import org.kie.server.api.model.instance.ProcessInstance; import org.kie.server.api.model.instance.TaskEventInstance; import org.kie.server.api.model.instance.TaskInstance; import org.kie.server.api.model.instance.TaskSummary; import org.kie.server.api.model.instance.VariableInstance; import org.kie.server.api.model.instance.WorkItemInstance; import org.kie.server.client.KieServicesException; import org.kie.server.client.QueryServicesClient; import org.kie.server.integrationtests.category.Smoke; import org.kie.server.integrationtests.config.TestConfig; import static org.junit.Assert.*; import org.kie.server.integrationtests.shared.KieServerAssert; import org.kie.server.integrationtests.shared.KieServerDeployer; public class RuntimeDataServiceIntegrationTest extends JbpmKieServerBaseIntegrationTest { private static ReleaseId releaseId = new ReleaseId("org.kie.server.testing", "definition-project", "1.0.0.Final"); protected static final String SORT_BY_PROCESS_ID = "ProcessId"; protected static final String SORT_BY_INSTANCE_PROCESS_ID = "Id"; protected static final String SORT_BY_TASK_STATUS = "Status"; protected static final String SORT_BY_TASK_EVENTS_TYPE = "Type"; @BeforeClass public static void buildAndDeployArtifacts() { KieServerDeployer.buildAndDeployCommonMavenParent(); KieServerDeployer.buildAndDeployMavenProject(ClassLoader.class.getResource("/kjars-sources/definition-project").getFile()); kieContainer = KieServices.Factory.get().newKieContainer(releaseId); createContainer(CONTAINER_ID, releaseId); } @Override protected void addExtraCustomClasses(Map<String, Class<?>> extraClasses) throws Exception { extraClasses.put(PERSON_CLASS_NAME, Class.forName(PERSON_CLASS_NAME, true, kieContainer.getClassLoader())); } @Test public void testGetProcessDefinitions() throws Exception { List<ProcessDefinition> definitions = queryClient.findProcesses(0, 20); assertNotNull(definitions); assertEquals(11, definitions.size()); List<String> processIds = collectDefinitions(definitions); checkProcessDefinitions(processIds); // test paging of the result definitions = queryClient.findProcesses(0, 3); assertNotNull(definitions); assertEquals(3, definitions.size()); processIds = collectDefinitions(definitions); assertTrue(processIds.contains(PROCESS_ID_ASYNC_SCRIPT)); assertTrue(processIds.contains(PROCESS_ID_SIGNAL_START)); assertTrue(processIds.contains(PROCESS_ID_TIMER)); definitions = queryClient.findProcesses(1, 3); assertNotNull(definitions); assertEquals(3, definitions.size()); processIds = collectDefinitions(definitions); assertTrue(processIds.contains(PROCESS_ID_EVALUATION)); assertTrue(processIds.contains(PROCESS_ID_CUSTOM_TASK)); assertTrue(processIds.contains(PROCESS_ID_CALL_EVALUATION)); } @Test public void testGetProcessDefinitionsSorted() throws Exception { List<ProcessDefinition> definitions = queryClient.findProcesses(0, 20, QueryServicesClient.SORT_BY_NAME, false); assertNotNull(definitions); assertEquals(11, definitions.size()); List<String> processIds = collectDefinitions(definitions); checkProcessDefinitions(processIds); // test paging of the result definitions = queryClient.findProcesses(0, 3, QueryServicesClient.SORT_BY_NAME, true); assertNotNull(definitions); assertEquals(3, definitions.size()); processIds = collectDefinitions(definitions); assertTrue(processIds.contains(PROCESS_ID_ASYNC_SCRIPT)); assertTrue(processIds.contains(PROCESS_ID_SIGNAL_START)); assertTrue(processIds.contains(PROCESS_ID_TIMER)); definitions = queryClient.findProcesses(0, 3, QueryServicesClient.SORT_BY_NAME, false); assertNotNull(definitions); assertEquals(3, definitions.size()); processIds = collectDefinitions(definitions); assertTrue(processIds.contains(PROCESS_ID_XYZ_TRANSLATIONS)); assertTrue(processIds.contains(PROCESS_ID_USERTASK)); assertTrue(processIds.contains(PROCESS_ID_SIGNAL_PROCESS)); } @Test public void testGetProcessDefinitionsWithFilter() throws Exception { List<ProcessDefinition> definitions = queryClient.findProcesses("evaluation", 0, 20); assertNotNull(definitions); assertEquals(2, definitions.size()); List<String> processIds = collectDefinitions(definitions); assertTrue(processIds.contains(PROCESS_ID_CALL_EVALUATION)); assertTrue(processIds.contains(PROCESS_ID_EVALUATION)); // test paging of the result definitions = queryClient.findProcesses("evaluation", 0, 1); assertNotNull(definitions); assertEquals(1, definitions.size()); processIds = collectDefinitions(definitions); assertTrue(processIds.contains(PROCESS_ID_CALL_EVALUATION)); definitions = queryClient.findProcesses("evaluation", 1, 1); assertNotNull(definitions); assertEquals(1, definitions.size()); processIds = collectDefinitions(definitions); assertTrue(processIds.contains(PROCESS_ID_EVALUATION)); } @Test public void testGetProcessDefinitionsWithFilterSorted() throws Exception { List<ProcessDefinition> definitions = queryClient.findProcesses("evaluation", 0, 20, QueryServicesClient.SORT_BY_NAME, true); assertNotNull(definitions); assertEquals(2, definitions.size()); List<String> processIds = collectDefinitions(definitions); assertTrue(processIds.get(0).equals(PROCESS_ID_CALL_EVALUATION)); assertTrue(processIds.get(1).equals(PROCESS_ID_EVALUATION)); // test paging of the result definitions = queryClient.findProcesses("evaluation", 0, 20, QueryServicesClient.SORT_BY_NAME, false); assertNotNull(definitions); assertEquals(2, definitions.size()); processIds = collectDefinitions(definitions); assertTrue(processIds.get(0).equals(PROCESS_ID_EVALUATION)); assertTrue(processIds.get(1).equals(PROCESS_ID_CALL_EVALUATION)); } @Test public void testGetProcessDefinitionsByContainer() throws Exception { List<ProcessDefinition> definitions = queryClient.findProcessesByContainerId(CONTAINER_ID, 0, 20); assertNotNull(definitions); assertEquals(11, definitions.size()); List<String> processIds = collectDefinitions(definitions); checkProcessDefinitions(processIds); // test paging of the result definitions = queryClient.findProcessesByContainerId(CONTAINER_ID, 0, 3); assertNotNull(definitions); assertEquals(3, definitions.size()); processIds = collectDefinitions(definitions); assertTrue(processIds.contains(PROCESS_ID_ASYNC_SCRIPT)); assertTrue(processIds.contains(PROCESS_ID_SIGNAL_START)); assertTrue(processIds.contains(PROCESS_ID_TIMER)); definitions = queryClient.findProcessesByContainerId(CONTAINER_ID, 1, 3); assertNotNull(definitions); assertEquals(3, definitions.size()); processIds = collectDefinitions(definitions); assertTrue(processIds.contains(PROCESS_ID_EVALUATION)); assertTrue(processIds.contains(PROCESS_ID_CUSTOM_TASK)); assertTrue(processIds.contains(PROCESS_ID_CALL_EVALUATION)); // last check if there are process def for not existing project definitions = queryClient.findProcessesByContainerId("not-existing-project", 0, 10); assertNotNull(definitions); assertEquals(0, definitions.size()); } @Test public void testGetProcessDefinitionsByContainerSorted() throws Exception { List<ProcessDefinition> definitions = queryClient.findProcessesByContainerId(CONTAINER_ID, 0, 20, QueryServicesClient.SORT_BY_NAME, true); assertNotNull(definitions); assertEquals(11, definitions.size()); List<String> processIds = collectDefinitions(definitions); checkProcessDefinitions(processIds); // test paging of the result definitions = queryClient.findProcessesByContainerId(CONTAINER_ID, 0, 3, QueryServicesClient.SORT_BY_NAME, true); assertNotNull(definitions); assertEquals(3, definitions.size()); processIds = collectDefinitions(definitions); assertTrue(processIds.contains(PROCESS_ID_ASYNC_SCRIPT)); assertTrue(processIds.contains(PROCESS_ID_SIGNAL_START)); assertTrue(processIds.contains(PROCESS_ID_TIMER)); definitions = queryClient.findProcessesByContainerId(CONTAINER_ID, 0, 3, QueryServicesClient.SORT_BY_NAME, false); assertNotNull(definitions); assertEquals(3, definitions.size()); processIds = collectDefinitions(definitions); assertTrue(processIds.contains(PROCESS_ID_XYZ_TRANSLATIONS)); assertTrue(processIds.contains(PROCESS_ID_USERTASK)); assertTrue(processIds.contains(PROCESS_ID_SIGNAL_PROCESS)); } @Test public void testGetProcessDefinitionsById() throws Exception { List<ProcessDefinition> definitions = queryClient.findProcessesById(PROCESS_ID_USERTASK); assertNotNull(definitions); assertEquals(1, definitions.size()); List<String> processIds = collectDefinitions(definitions); assertTrue(processIds.contains(PROCESS_ID_USERTASK)); // last check if there are process def for not existing project definitions = queryClient.findProcessesById("not-existing-project"); assertNotNull(definitions); assertEquals(0, definitions.size()); } @Test public void testGetProcessDefinitionByContainerAndId() throws Exception { ProcessDefinition definition = queryClient.findProcessByContainerIdProcessId(CONTAINER_ID, PROCESS_ID_USERTASK); assertNotNull(definition); assertEquals(PROCESS_ID_USERTASK, definition.getId()); assertEquals("usertask", definition.getName()); assertEquals("1.0", definition.getVersion()); assertEquals("org.jbpm", definition.getPackageName()); assertEquals(CONTAINER_ID, definition.getContainerId()); } @Test public void testGetProcessDefinitionByContainerAndNonExistingId() throws Exception { try { queryClient.findProcessByContainerIdProcessId(CONTAINER_ID, "non-existing"); fail("KieServicesException should be thrown complaining about process definition not found."); } catch (KieServicesException e) { KieServerAssert.assertResultContainsString(e.getMessage(), "Could not find process definition \"non-existing\" in container \"definition-project\""); } } @Test public void testGetProcessInstances() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); List<Long> processInstanceIds = createProcessInstances(parameters); try { List<ProcessInstance> instances = queryClient.findProcessInstances(0, 10); assertNotNull(instances); assertEquals(5, instances.size()); List<Long> found = collectInstances(instances); assertEquals(processInstanceIds, found); instances = queryClient.findProcessInstances(0, 3); assertNotNull(instances); assertEquals(3, instances.size()); instances = queryClient.findProcessInstances(1, 3); assertNotNull(instances); assertEquals(2, instances.size()); } finally { abortProcessInstances(processInstanceIds); } } @Test public void testGetProcessInstancesSortedByName() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); List<Long> processInstanceIds = createProcessInstances(parameters); try { List<ProcessInstance> instances = queryClient.findProcessInstances(0, 3, SORT_BY_PROCESS_ID, true); assertNotNull(instances); assertEquals(3, instances.size()); for (ProcessInstance instance : instances) { assertTrue(processInstanceIds.contains(instance.getId())); assertEquals(PROCESS_ID_SIGNAL_PROCESS, instance.getProcessId()); } instances = queryClient.findProcessInstances(1, 3, SORT_BY_PROCESS_ID, true); assertNotNull(instances); assertEquals(2, instances.size()); for (ProcessInstance instance : instances) { assertTrue(processInstanceIds.contains(instance.getId())); assertEquals(PROCESS_ID_USERTASK, instance.getProcessId()); } instances = queryClient.findProcessInstances(0, 10, SORT_BY_PROCESS_ID, false); assertNotNull(instances); assertEquals(5, instances.size()); for (int i = 0; i < instances.size(); i++) { if (i < 2) { assertEquals(PROCESS_ID_USERTASK, instances.get(i).getProcessId()); } else { assertEquals(PROCESS_ID_SIGNAL_PROCESS, instances.get(i).getProcessId()); } } } finally { abortProcessInstances(processInstanceIds); } } @Test public void testGetProcessInstancesByContainer() throws Exception { int offset = queryClient.findProcessInstancesByContainerId(CONTAINER_ID, Collections.singletonList(2), 0, 10).size(); Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); List<Long> processInstanceIds = createProcessInstances(parameters); try { List<ProcessInstance> instances = queryClient.findProcessInstancesByContainerId(CONTAINER_ID, null, 0, 10); assertNotNull(instances); assertEquals(5, instances.size()); List<Long> found = collectInstances(instances); assertEquals(processInstanceIds, found); instances = queryClient.findProcessInstancesByContainerId(CONTAINER_ID, null, 0, 3); assertNotNull(instances); assertEquals(3, instances.size()); instances = queryClient.findProcessInstancesByContainerId(CONTAINER_ID, null, 1, 3); assertNotNull(instances); assertEquals(2, instances.size()); // search for completed only instances = queryClient.findProcessInstancesByContainerId(CONTAINER_ID, Collections.singletonList(2), 0, 10); assertNotNull(instances); assertEquals(0 + offset, instances.size()); } finally { abortProcessInstances(processInstanceIds); } } @Test public void testGetProcessInstancesByContainerSortedByName() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); List<Long> processInstanceIds = createProcessInstances(parameters); try { List<ProcessInstance> instances = queryClient.findProcessInstancesByContainerId(CONTAINER_ID, null, 0, 3, SORT_BY_PROCESS_ID, true); assertNotNull(instances); assertEquals(3, instances.size()); for (ProcessInstance instance : instances) { assertTrue(processInstanceIds.contains(instance.getId())); assertEquals(PROCESS_ID_SIGNAL_PROCESS, instance.getProcessId()); } instances = queryClient.findProcessInstancesByContainerId(CONTAINER_ID, null, 1, 3, SORT_BY_PROCESS_ID, true); assertNotNull(instances); assertEquals(2, instances.size()); for (ProcessInstance instance : instances) { assertTrue(processInstanceIds.contains(instance.getId())); assertEquals(PROCESS_ID_USERTASK, instance.getProcessId()); } instances = queryClient.findProcessInstancesByContainerId(CONTAINER_ID, null, 0, 10, SORT_BY_PROCESS_ID, false); assertNotNull(instances); assertEquals(5, instances.size()); for (int i = 0; i < instances.size(); i++) { if (i < 2) { assertEquals(PROCESS_ID_USERTASK, instances.get(i).getProcessId()); } else { assertEquals(PROCESS_ID_SIGNAL_PROCESS, instances.get(i).getProcessId()); } } } finally { abortProcessInstances(processInstanceIds); } } @Test public void testGetProcessInstancesByProcessId() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); List<Long> processInstanceIds = createProcessInstances(parameters); try { List<ProcessInstance> instances = queryClient.findProcessInstancesByProcessId(PROCESS_ID_USERTASK, null, 0, 10); assertNotNull(instances); assertEquals(2, instances.size()); assertEquals(PROCESS_ID_USERTASK, instances.get(0).getProcessId()); assertEquals(PROCESS_ID_USERTASK, instances.get(1).getProcessId()); instances = queryClient.findProcessInstancesByProcessId(PROCESS_ID_USERTASK, null, 0, 1); assertNotNull(instances); assertEquals(1, instances.size()); assertEquals(PROCESS_ID_USERTASK, instances.get(0).getProcessId()); instances = queryClient.findProcessInstancesByProcessId(PROCESS_ID_USERTASK, null, 1, 1); assertNotNull(instances); assertEquals(1, instances.size()); assertEquals(PROCESS_ID_USERTASK, instances.get(0).getProcessId()); // search for completed only instances = queryClient.findProcessInstancesByProcessId(PROCESS_ID_USERTASK, Collections.singletonList(2), 0, 10); assertNotNull(instances); assertEquals(0, instances.size()); } finally { abortProcessInstances(processInstanceIds); } } @Test public void testGetProcessInstancesByProcessIdSortedByInstanceId() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); List<Long> processInstanceIds = createProcessInstances(parameters); try { List<ProcessInstance> instances = queryClient.findProcessInstancesByProcessId(PROCESS_ID_USERTASK, null, 0, 10, SORT_BY_INSTANCE_PROCESS_ID, false); assertNotNull(instances); assertEquals(2, instances.size()); assertEquals(PROCESS_ID_USERTASK, instances.get(0).getProcessId()); assertEquals(PROCESS_ID_USERTASK, instances.get(1).getProcessId()); assertTrue(instances.get(0).getId() > instances.get(1).getId()); } finally { abortProcessInstances(processInstanceIds); } } @Test public void testGetProcessInstancesByProcessName() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); List<Long> processInstanceIds = createProcessInstances(parameters); try { List<ProcessInstance> instances = queryClient.findProcessInstancesByProcessName("usertask", null, 0, 10); assertNotNull(instances); assertEquals(2, instances.size()); assertEquals(PROCESS_ID_USERTASK, instances.get(0).getProcessId()); assertEquals(PROCESS_ID_USERTASK, instances.get(1).getProcessId()); instances = queryClient.findProcessInstancesByProcessName("usertask", null, 0, 1); assertNotNull(instances); assertEquals(1, instances.size()); assertEquals(PROCESS_ID_USERTASK, instances.get(0).getProcessId()); instances = queryClient.findProcessInstancesByProcessName("usertask", null, 1, 1); assertNotNull(instances); assertEquals(1, instances.size()); assertEquals(PROCESS_ID_USERTASK, instances.get(0).getProcessId()); // search for completed only instances = queryClient.findProcessInstancesByProcessName("usertask", Collections.singletonList(2), 0, 10); assertNotNull(instances); assertEquals(0, instances.size()); } finally { abortProcessInstances(processInstanceIds); } } @Test public void testGetProcessInstancesByProcessNameSortedByInstanceId() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); List<Long> processInstanceIds = createProcessInstances(parameters); try { List<ProcessInstance> instances = queryClient.findProcessInstancesByProcessName("usertask", null, 0, 10, SORT_BY_INSTANCE_PROCESS_ID, false); assertNotNull(instances); assertEquals(2, instances.size()); assertEquals(PROCESS_ID_USERTASK, instances.get(0).getProcessId()); assertEquals(PROCESS_ID_USERTASK, instances.get(1).getProcessId()); assertTrue(instances.get(0).getId() > instances.get(1).getId()); } finally { abortProcessInstances(processInstanceIds); } } @Test public void testGetProcessInstancesByStatus() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); List<Long> processInstanceIds = createProcessInstances(parameters); try { List<ProcessInstance> instances = queryClient.findProcessInstancesByStatus(Collections.singletonList(1), 0, 10); assertNotNull(instances); assertEquals(5, instances.size()); instances = queryClient.findProcessInstancesByStatus(Collections.singletonList(1), 0, 3); assertNotNull(instances); assertEquals(3, instances.size()); instances = queryClient.findProcessInstancesByStatus(Collections.singletonList(1), 1, 3); assertNotNull(instances); assertEquals(2, instances.size()); // search for completed only instances = queryClient.findProcessInstancesByProcessId(PROCESS_ID_USERTASK, Collections.singletonList(2), 0, 10); assertNotNull(instances); assertEquals(0, instances.size()); } finally { abortProcessInstances(processInstanceIds); } } @Test public void testGetProcessInstancesByStatusSortedByName() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); List<Long> processInstanceIds = createProcessInstances(parameters); try { List<ProcessInstance> instances = queryClient.findProcessInstancesByStatus(Collections.singletonList(1), 0, 3, SORT_BY_PROCESS_ID, true); assertNotNull(instances); assertEquals(3, instances.size()); for (ProcessInstance instance : instances) { assertTrue(processInstanceIds.contains(instance.getId())); assertEquals(PROCESS_ID_SIGNAL_PROCESS, instance.getProcessId()); } instances = queryClient.findProcessInstancesByStatus(Collections.singletonList(1), 1, 3, SORT_BY_PROCESS_ID, true); assertNotNull(instances); assertEquals(2, instances.size()); for (ProcessInstance instance : instances) { assertTrue(processInstanceIds.contains(instance.getId())); assertEquals(PROCESS_ID_USERTASK, instance.getProcessId()); } instances = queryClient.findProcessInstancesByStatus(Collections.singletonList(1), 0, 10, SORT_BY_PROCESS_ID, false); assertNotNull(instances); assertEquals(5, instances.size()); for (int i = 0; i < instances.size(); i++) { if (i < 2) { assertEquals(PROCESS_ID_USERTASK, instances.get(i).getProcessId()); } else { assertEquals(PROCESS_ID_SIGNAL_PROCESS, instances.get(i).getProcessId()); } } } finally { abortProcessInstances(processInstanceIds); } } @Test public void testGetProcessInstancesByInitiator() throws Exception { int offset = queryClient.findProcessInstancesByInitiator(USER_YODA, Collections.singletonList(2), 0, 10).size(); Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); List<Long> processInstanceIds = createProcessInstances(parameters); try { List<ProcessInstance> instances = queryClient.findProcessInstancesByInitiator(USER_YODA, null, 0, 10); assertNotNull(instances); assertEquals(5, instances.size()); instances = queryClient.findProcessInstancesByInitiator(USER_YODA, null, 0, 3); assertNotNull(instances); assertEquals(3, instances.size()); instances = queryClient.findProcessInstancesByInitiator(USER_YODA, null, 1, 3); assertNotNull(instances); assertEquals(2, instances.size()); // search for completed only instances = queryClient.findProcessInstancesByInitiator(USER_YODA, Collections.singletonList(2), 0, 10); assertNotNull(instances); assertEquals(0 + offset, instances.size()); } finally { abortProcessInstances(processInstanceIds); } } @Test public void testGetProcessInstancesByInitiatorSortedByName() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); List<Long> processInstanceIds = createProcessInstances(parameters); try { List<ProcessInstance> instances = queryClient.findProcessInstancesByInitiator(USER_YODA, null, 0, 3, SORT_BY_PROCESS_ID, true); assertNotNull(instances); assertEquals(3, instances.size()); for (ProcessInstance instance : instances) { assertTrue(processInstanceIds.contains(instance.getId())); assertEquals(PROCESS_ID_SIGNAL_PROCESS, instance.getProcessId()); } instances = queryClient.findProcessInstancesByInitiator(USER_YODA, null, 1, 3, SORT_BY_PROCESS_ID, true); assertNotNull(instances); assertEquals(2, instances.size()); for (ProcessInstance instance : instances) { assertTrue(processInstanceIds.contains(instance.getId())); assertEquals(PROCESS_ID_USERTASK, instance.getProcessId()); } instances = queryClient.findProcessInstancesByInitiator(USER_YODA, null, 0, 10, SORT_BY_PROCESS_ID, false); assertNotNull(instances); assertEquals(5, instances.size()); for (int i = 0; i < instances.size(); i++) { if (i < 2) { assertEquals(PROCESS_ID_USERTASK, instances.get(i).getProcessId()); } else { assertEquals(PROCESS_ID_SIGNAL_PROCESS, instances.get(i).getProcessId()); } } } finally { abortProcessInstances(processInstanceIds); } } @Test @Category(Smoke.class) public void testGetProcessInstanceById() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); Long processInstanceId = processClient.startProcess(CONTAINER_ID, PROCESS_ID_EVALUATION, parameters); try { ProcessInstance instance = queryClient.findProcessInstanceById(processInstanceId); assertNotNull(instance); assertEquals(processInstanceId, instance.getId()); assertEquals(PROCESS_ID_EVALUATION, instance.getProcessId()); assertEquals("evaluation", instance.getProcessName()); assertEquals("1.0", instance.getProcessVersion()); assertEquals(USER_YODA, instance.getInitiator()); assertEquals(CONTAINER_ID, instance.getContainerId()); KieServerAssert.assertNullOrEmpty(instance.getCorrelationKey()); assertEquals("evaluation", instance.getProcessInstanceDescription()); assertEquals(-1, instance.getParentId().longValue()); } finally { processClient.abortProcessInstance(CONTAINER_ID, processInstanceId); } } @Test public void testGetProcessInstanceWithVariables() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); Object person = createPersonInstance(USER_JOHN); parameters.put("personData", person); Long processInstanceId = processClient.startProcess(CONTAINER_ID, PROCESS_ID_SIGNAL_PROCESS, parameters); assertNotNull(processInstanceId); assertTrue(processInstanceId.longValue() > 0); try { ProcessInstance processInstance = queryClient.findProcessInstanceById(processInstanceId, true); assertNotNull(processInstance); assertEquals(processInstanceId, processInstance.getId()); assertEquals(org.kie.api.runtime.process.ProcessInstance.STATE_ACTIVE, processInstance.getState().intValue()); assertEquals(PROCESS_ID_SIGNAL_PROCESS, processInstance.getProcessId()); assertEquals("signalprocess", processInstance.getProcessName()); assertEquals("1.0", processInstance.getProcessVersion()); assertEquals(CONTAINER_ID, processInstance.getContainerId()); assertEquals("signalprocess", processInstance.getProcessInstanceDescription()); assertEquals(TestConfig.getUsername(), processInstance.getInitiator()); assertEquals(-1l, processInstance.getParentId().longValue()); assertNotNull(processInstance.getCorrelationKey()); assertNotNull(processInstance.getDate()); Map<String, Object> variables = processInstance.getVariables(); assertNotNull(variables); assertEquals(3, variables.size()); assertTrue(variables.containsKey("stringData")); assertTrue(variables.containsKey("personData")); assertTrue(variables.containsKey("initiator")); String stringVar = (String) variables.get("stringData"); Object personVar = variables.get("personData"); String initiator = (String) variables.get("initiator"); assertNotNull(personVar); assertEquals(person.toString(), personVar); assertNotNull(stringVar); assertEquals("waiting for signal", stringVar); assertNotNull(initiator); assertEquals(TestConfig.getUsername(), initiator); } finally { processClient.abortProcessInstance(CONTAINER_ID, processInstanceId); } } @Test public void testGetProcessInstanceByNonExistingId() throws Exception { try { queryClient.findProcessInstanceById(-9999l); fail("KieServicesException should be thrown complaining about process instance not found."); } catch (KieServicesException e) { KieServerAssert.assertResultContainsString(e.getMessage(), "Could not find process instance with id"); } } @Test public void testGetProcessInstanceByCorrelationKey() throws Exception { CorrelationKeyFactory correlationKeyFactory = KieInternalServices.Factory.get().newCorrelationKeyFactory(); String businessKey = "simple-key"; CorrelationKey key = correlationKeyFactory.newCorrelationKey(businessKey); Map<String, Object> parameters = new HashMap<String, Object>(); Long processInstanceId = processClient.startProcess(CONTAINER_ID, PROCESS_ID_EVALUATION, key, parameters); try { List<ProcessInstance> returnedProcessInstances = new ArrayList<ProcessInstance>(); ProcessInstance instance = queryClient.findProcessInstanceById(processInstanceId); returnedProcessInstances.add(instance); instance = queryClient.findProcessInstanceByCorrelationKey(key); returnedProcessInstances.add(instance); List<ProcessInstance> processInstances = queryClient.findProcessInstancesByCorrelationKey(key, 0, 100); assertNotNull(processInstances); // Separate active instances as response contains also instances already closed or aborted. List<ProcessInstance> activeInstances = new ArrayList<ProcessInstance>(); for (ProcessInstance processInstance : processInstances) { if (org.kie.api.runtime.process.ProcessInstance.STATE_ACTIVE == processInstance.getState().intValue()) { activeInstances.add(processInstance); } } assertEquals(1, activeInstances.size()); returnedProcessInstances.addAll(activeInstances); // All returned instances should contain all values for (ProcessInstance returnedProcessInstance : returnedProcessInstances) { assertNotNull(returnedProcessInstance); assertEquals(processInstanceId, returnedProcessInstance.getId()); assertEquals(PROCESS_ID_EVALUATION, returnedProcessInstance.getProcessId()); assertEquals("evaluation", returnedProcessInstance.getProcessName()); assertEquals("1.0", returnedProcessInstance.getProcessVersion()); assertEquals(USER_YODA, returnedProcessInstance.getInitiator()); assertEquals(CONTAINER_ID, returnedProcessInstance.getContainerId()); assertEquals(businessKey, returnedProcessInstance.getCorrelationKey()); assertEquals("evaluation", returnedProcessInstance.getProcessInstanceDescription()); assertEquals(-1, returnedProcessInstance.getParentId().longValue()); assertEquals(org.kie.api.runtime.process.ProcessInstance.STATE_ACTIVE, returnedProcessInstance.getState().intValue()); } } finally { processClient.abortProcessInstance(CONTAINER_ID, processInstanceId); } } @Test public void testGetProcessInstancesByCorrelationKeySortedById() throws Exception { CorrelationKeyFactory correlationKeyFactory = KieInternalServices.Factory.get().newCorrelationKeyFactory(); String firstBusinessKey = "my-simple-key-first"; String secondBusinessKey = "my-simple-key-second"; CorrelationKey firstKey = correlationKeyFactory.newCorrelationKey(firstBusinessKey); CorrelationKey secondKey = correlationKeyFactory.newCorrelationKey(secondBusinessKey); CorrelationKey partKey = correlationKeyFactory.newCorrelationKey("my-simple%"); Long processInstanceEvalutionId = processClient.startProcess(CONTAINER_ID, PROCESS_ID_EVALUATION, firstKey); Long processInstanceSignalId = processClient.startProcess(CONTAINER_ID, PROCESS_ID_SIGNAL_PROCESS, secondKey); try { List<ProcessInstance> returnedProcessInstances = queryClient.findProcessInstancesByCorrelationKey(partKey, 0, 10, SORT_BY_INSTANCE_PROCESS_ID, false); assertNotNull(returnedProcessInstances); assertEquals(PROCESS_ID_SIGNAL_PROCESS, returnedProcessInstances.get(0).getProcessId()); assertEquals(processInstanceSignalId, returnedProcessInstances.get(0).getId()); assertEquals(secondBusinessKey, returnedProcessInstances.get(0).getCorrelationKey()); assertEquals(PROCESS_ID_EVALUATION, returnedProcessInstances.get(1).getProcessId()); assertEquals(processInstanceEvalutionId, returnedProcessInstances.get(1).getId()); assertEquals(firstBusinessKey, returnedProcessInstances.get(1).getCorrelationKey()); } finally { processClient.abortProcessInstance(CONTAINER_ID, processInstanceEvalutionId); processClient.abortProcessInstance(CONTAINER_ID, processInstanceSignalId); } } @Test public void testGetProcessInstanceByCorrelationKeyPaging() throws Exception { CorrelationKeyFactory correlationKeyFactory = KieInternalServices.Factory.get().newCorrelationKeyFactory(); String businessKey = "simple-key"; CorrelationKey key = correlationKeyFactory.newCorrelationKey(businessKey); // Start and abort 2 processes to be sure that there are processes to be returned. Map<String, Object> parameters = new HashMap<String, Object>(); Long processInstanceId1 = processClient.startProcess(CONTAINER_ID, PROCESS_ID_EVALUATION, key, parameters); processClient.abortProcessInstance(CONTAINER_ID, processInstanceId1); Long processInstanceId2 = processClient.startProcess(CONTAINER_ID, PROCESS_ID_EVALUATION, key, parameters); processClient.abortProcessInstance(CONTAINER_ID, processInstanceId2); List<ProcessInstance> processInstancesPage0 = queryClient.findProcessInstancesByCorrelationKey(key, 0, 1); List<ProcessInstance> processInstancesPage1 = queryClient.findProcessInstancesByCorrelationKey(key, 1, 1); assertEquals(1, processInstancesPage0.size()); assertEquals(1, processInstancesPage1.size()); assertNotEquals("Process instances are same! Paging doesn't work.", processInstancesPage0.get(0).getId(), processInstancesPage1.get(0).getId()); } @Test public void testGetProcessInstancesByVariableName() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); List<Long> processInstanceIds = createProcessInstances(parameters); try { List<ProcessInstance> instances = queryClient.findProcessInstancesByVariable("stringData", null, 0, 10); assertNotNull(instances); assertEquals(5, instances.size()); List<Long> found = collectInstances(instances); assertEquals(processInstanceIds, found); instances = queryClient.findProcessInstancesByVariable("stringData", null, 0, 3); assertNotNull(instances); assertEquals(3, instances.size()); instances = queryClient.findProcessInstancesByVariable("stringData", null, 1, 3); assertNotNull(instances); assertEquals(2, instances.size()); } finally { abortProcessInstances(processInstanceIds); } } @Test public void testGetProcessInstancesByVariableNameSortedByName() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); List<Long> processInstanceIds = createProcessInstances(parameters); try { List<ProcessInstance> instances = queryClient.findProcessInstancesByVariable("stringData", null, 0, 3, SORT_BY_PROCESS_ID, true); assertNotNull(instances); assertEquals(3, instances.size()); for (ProcessInstance instance : instances) { assertTrue(processInstanceIds.contains(instance.getId())); assertEquals(PROCESS_ID_SIGNAL_PROCESS, instance.getProcessId()); } instances = queryClient.findProcessInstancesByVariable("stringData", null, 1, 3, SORT_BY_PROCESS_ID, true); assertNotNull(instances); assertEquals(2, instances.size()); for (ProcessInstance instance : instances) { assertTrue(processInstanceIds.contains(instance.getId())); assertEquals(PROCESS_ID_USERTASK, instance.getProcessId()); } instances = queryClient.findProcessInstancesByVariable("stringData", null, 0, 10, SORT_BY_PROCESS_ID, false); assertNotNull(instances); assertEquals(5, instances.size()); for (int i = 0; i < instances.size(); i++) { if (i < 2) { assertEquals(PROCESS_ID_USERTASK, instances.get(i).getProcessId()); } else { assertEquals(PROCESS_ID_SIGNAL_PROCESS, instances.get(i).getProcessId()); } } } finally { abortProcessInstances(processInstanceIds); } } @Test public void testGetProcessInstancesByVariableNameAndValue() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("personData", createPersonInstance(USER_JOHN)); List<Long> processInstanceIds = createProcessInstances(parameters); for (Long processInstanceId : processInstanceIds) { processClient.setProcessVariable(CONTAINER_ID, processInstanceId, "stringData", "waiting for signal"); } try { List<ProcessInstance> instances = queryClient.findProcessInstancesByVariableAndValue("stringData", "waiting%", null, 0, 10); assertNotNull(instances); assertEquals(5, instances.size()); List<Long> found = collectInstances(instances); assertEquals(processInstanceIds, found); instances = queryClient.findProcessInstancesByVariableAndValue("stringData", "waiting%", null, 0, 3); assertNotNull(instances); assertEquals(3, instances.size()); instances = queryClient.findProcessInstancesByVariableAndValue("stringData", "waiting%", null, 1, 3); assertNotNull(instances); assertEquals(2, instances.size()); processClient.setProcessVariable(CONTAINER_ID, processInstanceIds.get(0), "stringData", "updated value"); instances = queryClient.findProcessInstancesByVariableAndValue("stringData", "waiting%", null, 0, 10); assertNotNull(instances); assertEquals(4, instances.size()); instances = queryClient.findProcessInstancesByVariableAndValue("stringData", "updated value", null, 0, 10); assertNotNull(instances); assertEquals(1, instances.size()); } finally { abortProcessInstances(processInstanceIds); } } @Test public void testGetProcessInstancesByVariableNameAndValueSortedByName() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("personData", createPersonInstance(USER_JOHN)); List<Long> processInstanceIds = createProcessInstances(parameters); for (Long processInstanceId : processInstanceIds) { processClient.setProcessVariable(CONTAINER_ID, processInstanceId, "stringData", "waiting for signal"); } try { List<ProcessInstance> instances = queryClient.findProcessInstancesByVariableAndValue("stringData", "waiting%", null, 0, 3, SORT_BY_PROCESS_ID, true); assertNotNull(instances); assertEquals(3, instances.size()); for (ProcessInstance instance : instances) { assertTrue(processInstanceIds.contains(instance.getId())); assertEquals(PROCESS_ID_SIGNAL_PROCESS, instance.getProcessId()); } instances = queryClient.findProcessInstancesByVariableAndValue("stringData", "waiting%", null, 1, 3, SORT_BY_PROCESS_ID, true); assertNotNull(instances); assertEquals(2, instances.size()); for (ProcessInstance instance : instances) { assertTrue(processInstanceIds.contains(instance.getId())); assertEquals(PROCESS_ID_USERTASK, instance.getProcessId()); } instances = queryClient.findProcessInstancesByVariableAndValue("stringData", "waiting%", null, 0, 10, SORT_BY_PROCESS_ID, false); assertNotNull(instances); assertEquals(5, instances.size()); for (int i = 0; i < instances.size(); i++) { if (i < 2) { assertEquals(PROCESS_ID_USERTASK, instances.get(i).getProcessId()); } else { assertEquals(PROCESS_ID_SIGNAL_PROCESS, instances.get(i).getProcessId()); } } } finally { abortProcessInstances(processInstanceIds); } } @Test public void testGetNodeInstances() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); Long processInstanceId = processClient.startProcess(CONTAINER_ID, PROCESS_ID_USERTASK, parameters); try { List<NodeInstance> instances = queryClient.findActiveNodeInstances(processInstanceId, 0, 10); assertNotNull(instances); assertEquals(1, instances.size()); NodeInstance expectedFirstTask = NodeInstance .builder() .name("First task") .containerId(CONTAINER_ID) .nodeType("HumanTaskNode") .completed(false) .processInstanceId(processInstanceId) .build(); NodeInstance nodeInstance = instances.get(0); assertNodeInstance(expectedFirstTask, nodeInstance); assertNotNull(nodeInstance.getWorkItemId()); assertNotNull(nodeInstance.getDate()); nodeInstance = queryClient.findNodeInstanceByWorkItemId(processInstanceId, nodeInstance.getWorkItemId()); assertNodeInstance(expectedFirstTask, nodeInstance); assertNotNull(nodeInstance.getWorkItemId()); assertNotNull(nodeInstance.getDate()); instances = queryClient.findCompletedNodeInstances(processInstanceId, 0, 10); assertNotNull(instances); assertEquals(1, instances.size()); NodeInstance expectedStart = NodeInstance .builder() .name("start") .containerId(CONTAINER_ID) .nodeType("StartNode") .completed(true) .processInstanceId(processInstanceId) .build(); nodeInstance = instances.get(0); assertNodeInstance(expectedStart, nodeInstance); assertNull(nodeInstance.getWorkItemId()); assertNotNull(nodeInstance.getDate()); instances = queryClient.findNodeInstances(processInstanceId, 0, 10); assertNotNull(instances); assertEquals(3, instances.size()); nodeInstance = instances.get(0); assertNodeInstance(expectedStart, nodeInstance); assertNull(nodeInstance.getWorkItemId()); assertNotNull(nodeInstance.getDate()); nodeInstance = instances.get(1); assertNodeInstance(expectedFirstTask, nodeInstance); assertNotNull(nodeInstance.getWorkItemId()); assertNotNull(nodeInstance.getDate()); nodeInstance = instances.get(2); expectedStart.setCompleted(false); assertNodeInstance(expectedStart, nodeInstance); assertNull(nodeInstance.getWorkItemId()); assertNotNull(nodeInstance.getDate()); } finally { processClient.abortProcessInstance(CONTAINER_ID, processInstanceId); } } @Test public void testGetVariableInstance() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); Long processInstanceId = processClient.startProcess(CONTAINER_ID, PROCESS_ID_USERTASK, parameters); try { List<VariableInstance> currentState = queryClient.findVariablesCurrentState(processInstanceId); assertNotNull(currentState); assertEquals(3, currentState.size()); for (VariableInstance variableInstance : currentState) { if ("personData".equals(variableInstance.getVariableName())) { assertNotNull(variableInstance); assertEquals(processInstanceId, variableInstance.getProcessInstanceId()); KieServerAssert.assertNullOrEmpty(variableInstance.getOldValue()); assertEquals("Person{name='john'}", variableInstance.getValue()); assertEquals("personData", variableInstance.getVariableName()); } else if ("stringData".equals(variableInstance.getVariableName())) { assertNotNull(variableInstance); assertEquals(processInstanceId, variableInstance.getProcessInstanceId()); KieServerAssert.assertNullOrEmpty(variableInstance.getOldValue()); assertEquals("waiting for signal", variableInstance.getValue()); assertEquals("stringData", variableInstance.getVariableName()); } else if("initiator".equals(variableInstance.getVariableName())){ assertNotNull(variableInstance); assertEquals(processInstanceId, variableInstance.getProcessInstanceId()); assertEquals(TestConfig.getUsername(), variableInstance.getValue()); KieServerAssert.assertNullOrEmpty(variableInstance.getOldValue()); } else { fail("Got unexpected variable " + variableInstance.getVariableName()); } } List<VariableInstance> varHistory = queryClient.findVariableHistory(processInstanceId, "stringData", 0, 10); assertNotNull(varHistory); assertEquals(1, varHistory.size()); VariableInstance variableInstance = varHistory.get(0); assertNotNull(variableInstance); assertEquals(processInstanceId, variableInstance.getProcessInstanceId()); KieServerAssert.assertNullOrEmpty(variableInstance.getOldValue()); assertEquals("waiting for signal", variableInstance.getValue()); assertEquals("stringData", variableInstance.getVariableName()); processClient.setProcessVariable(CONTAINER_ID, processInstanceId, "stringData", "updated value"); currentState = queryClient.findVariablesCurrentState(processInstanceId); assertNotNull(currentState); assertEquals(3, currentState.size()); for (VariableInstance variable : currentState) { if ("personData".equals(variable.getVariableName())) { assertNotNull(variable); assertEquals(processInstanceId, variable.getProcessInstanceId()); KieServerAssert.assertNullOrEmpty(variable.getOldValue()); assertEquals("Person{name='john'}", variable.getValue()); assertEquals("personData", variable.getVariableName()); } else if ("stringData".equals(variable.getVariableName())) { assertNotNull(variable); assertEquals(processInstanceId, variable.getProcessInstanceId()); assertEquals("waiting for signal", variable.getOldValue()); assertEquals("updated value", variable.getValue()); assertEquals("stringData", variable.getVariableName()); } else if("initiator".equals(variable.getVariableName())){ assertNotNull(variable); assertEquals(processInstanceId, variable.getProcessInstanceId()); assertEquals(TestConfig.getUsername(), variable.getValue()); KieServerAssert.assertNullOrEmpty(variable.getOldValue()); } else { fail("Got unexpected variable " + variable.getVariableName()); } } varHistory = queryClient.findVariableHistory(processInstanceId, "stringData", 0, 10); assertNotNull(varHistory); assertEquals(2, varHistory.size()); variableInstance = varHistory.get(0); assertNotNull(variableInstance); assertEquals(processInstanceId, variableInstance.getProcessInstanceId()); assertEquals("waiting for signal", variableInstance.getOldValue()); assertEquals("updated value", variableInstance.getValue()); assertEquals("stringData", variableInstance.getVariableName()); variableInstance = varHistory.get(1); assertNotNull(variableInstance); assertEquals(processInstanceId, variableInstance.getProcessInstanceId()); KieServerAssert.assertNullOrEmpty(variableInstance.getOldValue()); assertEquals("waiting for signal", variableInstance.getValue()); assertEquals("stringData", variableInstance.getVariableName()); } finally { processClient.abortProcessInstance(CONTAINER_ID, processInstanceId); } } @Test public void testFindTasks() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); Long processInstanceId = processClient.startProcess(CONTAINER_ID, PROCESS_ID_USERTASK, parameters); try { List<TaskSummary> tasks = taskClient.findTasks(USER_YODA, 0, 50); assertNotNull(tasks); TaskSummary taskSummary = null; for (TaskSummary t : tasks) { if (t.getProcessInstanceId().equals(processInstanceId)) { taskSummary = t; break; } } TaskSummary expectedTaskSummary = createDefaultTaskSummary(processInstanceId); assertTaskSummary(expectedTaskSummary, taskSummary); TaskInstance expecteTaskInstace = TaskInstance .builder() .name("First task") .status(Status.Reserved.toString()) .priority(0) .actualOwner(USER_YODA) .createdBy(USER_YODA) .processId(PROCESS_ID_USERTASK) .containerId(CONTAINER_ID) .processInstanceId(processInstanceId) .build(); TaskInstance taskById = taskClient.findTaskById(taskSummary.getId()); assertTaskInstace(expecteTaskInstace, taskById); List<WorkItemInstance> workItems = processClient.getWorkItemByProcessInstance(CONTAINER_ID, processInstanceId); assertNotNull(workItems); assertEquals(1, workItems.size()); taskById = taskClient.findTaskByWorkItemId(workItems.get(0).getId()); assertTaskInstace(expecteTaskInstace, taskById); } finally { processClient.abortProcessInstance(CONTAINER_ID, processInstanceId); } } @Test public void testFindTasksSortedByProcessInstanceId() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); Long processInstanceId = processClient.startProcess(CONTAINER_ID, PROCESS_ID_USERTASK, parameters); Long processInstanceId2 = processClient.startProcess(CONTAINER_ID, PROCESS_ID_USERTASK, parameters); try { List<TaskSummary> tasks = taskClient.findTasks(USER_YODA, 0, 50, "processInstanceId", false); assertNotNull(tasks); //latest task is from second process TaskSummary task = tasks.get(0); TaskSummary expectedTaskSummary = createDefaultTaskSummary(processInstanceId2); assertTaskSummary(expectedTaskSummary, task); task = tasks.get(1); expectedTaskSummary = createDefaultTaskSummary(processInstanceId); assertTaskSummary(expectedTaskSummary, task); } finally { processClient.abortProcessInstance(CONTAINER_ID, processInstanceId); processClient.abortProcessInstance(CONTAINER_ID, processInstanceId2); } } @Test public void testFindTaskEvents() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); Long processInstanceId = processClient.startProcess(CONTAINER_ID, PROCESS_ID_USERTASK, parameters); try { List<TaskSummary> tasks = taskClient.findTasksByStatusByProcessInstanceId(processInstanceId, null, 0, 10); assertNotNull(tasks); assertEquals(1, tasks.size()); TaskSummary taskInstance = tasks.get(0); List<TaskEventInstance> events = taskClient.findTaskEvents(taskInstance.getId(), 0, 10); assertNotNull(events); assertEquals(1, events.size()); TaskEventInstance expectedTaskEventInstance = TaskEventInstance .builder() .type(TaskEvent.TaskEventType.ADDED.toString()) .processInstanceId(processInstanceId) .taskId(taskInstance.getId()) .user(PROCESS_ID_USERTASK) // is this really correct to set process id as user for added task .build(); TaskEventInstance event = events.get(0); assertTaskEventInstance(expectedTaskEventInstance, event); //assertEquals(PROCESS_ID_USERTASK, event.getUserId()); // is this really correct to set process id as user for added task // now let's start it taskClient.startTask(CONTAINER_ID, taskInstance.getId(), USER_YODA); events = taskClient.findTaskEvents(taskInstance.getId(), 0, 10); assertNotNull(events); assertEquals(2, events.size()); event = getTaskEventInstanceFromListByType(events, TaskEvent.TaskEventType.ADDED.toString()); assertTaskEventInstance(expectedTaskEventInstance, event); event = getTaskEventInstanceFromListByType(events, TaskEvent.TaskEventType.STARTED.toString()); expectedTaskEventInstance.setType(TaskEvent.TaskEventType.STARTED.toString()); expectedTaskEventInstance.setUserId(USER_YODA); assertTaskEventInstance(expectedTaskEventInstance, event); // now let's stop it taskClient.stopTask(CONTAINER_ID, taskInstance.getId(), USER_YODA); events = taskClient.findTaskEvents(taskInstance.getId(), 0, 10); assertNotNull(events); assertEquals(3, events.size()); event = getTaskEventInstanceFromListByType(events, TaskEvent.TaskEventType.ADDED.toString()); expectedTaskEventInstance.setType(TaskEvent.TaskEventType.ADDED.toString()); expectedTaskEventInstance.setUserId(PROCESS_ID_USERTASK); // is this really correct to set process id as user for added task assertTaskEventInstance(expectedTaskEventInstance, event); event = getTaskEventInstanceFromListByType(events, TaskEvent.TaskEventType.STARTED.toString()); expectedTaskEventInstance.setType(TaskEvent.TaskEventType.STARTED.toString()); expectedTaskEventInstance.setUserId(USER_YODA); assertTaskEventInstance(expectedTaskEventInstance, event); event = getTaskEventInstanceFromListByType(events, TaskEvent.TaskEventType.STOPPED.toString()); expectedTaskEventInstance.setType(TaskEvent.TaskEventType.STOPPED.toString()); assertTaskEventInstance(expectedTaskEventInstance, event); } finally { processClient.abortProcessInstance(CONTAINER_ID, processInstanceId); } } private TaskEventInstance getTaskEventInstanceFromListByType(List<TaskEventInstance> events, String type) { for (TaskEventInstance t : events) { if (t.getType().equals(type)) { return t; } } return null; } @Test public void testFindTaskEventsSortedByType() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); Long processInstanceId = processClient.startProcess(CONTAINER_ID, PROCESS_ID_USERTASK, parameters); try { List<TaskSummary> tasks = taskClient.findTasksByStatusByProcessInstanceId(processInstanceId, null, 0, 10); assertNotNull(tasks); assertEquals(1, tasks.size()); TaskSummary taskInstance = tasks.get(0); // now let's start it taskClient.startTask(CONTAINER_ID, taskInstance.getId(), USER_YODA); // now let's stop it taskClient.stopTask(CONTAINER_ID, taskInstance.getId(), USER_YODA); List<TaskEventInstance> events = taskClient.findTaskEvents(taskInstance.getId(), 0, 10, SORT_BY_TASK_EVENTS_TYPE, true); assertNotNull(events); assertEquals(3, events.size()); TaskEventInstance event = events.get(0); assertNotNull(event); assertEquals(taskInstance.getId(), event.getTaskId()); assertEquals(TaskEvent.TaskEventType.ADDED.toString(), event.getType()); event = events.get(1); assertNotNull(event); assertEquals(taskInstance.getId(), event.getTaskId()); assertEquals(TaskEvent.TaskEventType.STARTED.toString(), event.getType()); event = events.get(2); assertNotNull(event); assertEquals(taskInstance.getId(), event.getTaskId()); assertEquals(TaskEvent.TaskEventType.STOPPED.toString(), event.getType()); events = taskClient.findTaskEvents(taskInstance.getId(), 0, 10, SORT_BY_TASK_EVENTS_TYPE, false); assertNotNull(events); assertEquals(3, events.size()); event = events.get(0); assertNotNull(event); assertEquals(taskInstance.getId(), event.getTaskId()); assertEquals(TaskEvent.TaskEventType.STOPPED.toString(), event.getType()); event = events.get(1); assertNotNull(event); assertEquals(taskInstance.getId(), event.getTaskId()); assertEquals(TaskEvent.TaskEventType.STARTED.toString(), event.getType()); } finally { processClient.abortProcessInstance(CONTAINER_ID, processInstanceId); } } @Test public void testFindTasksOwned() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); Long processInstanceId = processClient.startProcess(CONTAINER_ID, PROCESS_ID_USERTASK, parameters); try { List<TaskSummary> tasks = taskClient.findTasksOwned(USER_YODA, 0, 10); assertNotNull(tasks); assertEquals(1, tasks.size()); TaskSummary expectedTaskSummary = createDefaultTaskSummary(processInstanceId); TaskSummary taskInstance = tasks.get(0); assertTaskSummary(expectedTaskSummary, taskInstance); List<String> status = new ArrayList<String>(); status.add(Status.InProgress.toString()); tasks = taskClient.findTasksOwned(USER_YODA, status, 0, 10); assertNotNull(tasks); assertEquals(0, tasks.size()); taskClient.startTask(CONTAINER_ID, taskInstance.getId(), USER_YODA); tasks = taskClient.findTasksOwned(USER_YODA, status, 0, 10); assertNotNull(tasks); assertEquals(1, tasks.size()); taskInstance = tasks.get(0); expectedTaskSummary.setStatus(Status.InProgress.toString()); assertTaskSummary(expectedTaskSummary, taskInstance); } finally { processClient.abortProcessInstance(CONTAINER_ID, processInstanceId); } } @Test public void testFindTasksOwnedSortedByStatus() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); Long processInstanceId = processClient.startProcess(CONTAINER_ID, PROCESS_ID_USERTASK, parameters); Long processInstanceId2 = processClient.startProcess(CONTAINER_ID, PROCESS_ID_USERTASK, parameters); try { List<TaskSummary> tasks = taskClient.findTasksOwned(USER_YODA, 0, 10, SORT_BY_TASK_STATUS, true); assertNotNull(tasks); assertEquals(2, tasks.size()); Long someTaskId = tasks.get(0).getId(); taskClient.startTask(CONTAINER_ID, someTaskId, USER_YODA); tasks = taskClient.findTasksOwned(USER_YODA, 0, 10, SORT_BY_TASK_STATUS, true); assertNotNull(tasks); assertEquals(2, tasks.size()); assertEquals(Status.InProgress.toString(), tasks.get(0).getStatus()); assertEquals(Status.Reserved.toString(), tasks.get(1).getStatus()); tasks = taskClient.findTasksOwned(USER_YODA, 0, 10, SORT_BY_TASK_STATUS, false); assertNotNull(tasks); assertEquals(2, tasks.size()); assertEquals(Status.Reserved.toString(), tasks.get(0).getStatus()); assertEquals(Status.InProgress.toString(), tasks.get(1).getStatus()); } finally { processClient.abortProcessInstance(CONTAINER_ID, processInstanceId); processClient.abortProcessInstance(CONTAINER_ID, processInstanceId2); } } @Test public void testFindTasksAssignedAsPotentialOwner() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); Long processInstanceId = processClient.startProcess(CONTAINER_ID, PROCESS_ID_USERTASK, parameters); try { List<TaskSummary> tasks = taskClient.findTasksAssignedAsPotentialOwner(USER_YODA, 0, 10); assertNotNull(tasks); assertEquals(1, tasks.size()); TaskSummary expectedTaskSummary = createDefaultTaskSummary(processInstanceId); TaskSummary taskInstance = tasks.get(0); assertTaskSummary(expectedTaskSummary, taskInstance); List<String> status = new ArrayList<String>(); status.add(Status.InProgress.toString()); tasks = taskClient.findTasksAssignedAsPotentialOwner(USER_YODA, status, 0, 10); assertNotNull(tasks); assertEquals(0, tasks.size()); taskClient.startTask(CONTAINER_ID, taskInstance.getId(), USER_YODA); tasks = taskClient.findTasksAssignedAsPotentialOwner(USER_YODA, status, 0, 10); assertNotNull(tasks); assertEquals(1, tasks.size()); taskInstance = tasks.get(0); expectedTaskSummary.setStatus(Status.InProgress.toString()); assertTaskSummary(expectedTaskSummary, taskInstance); } finally { processClient.abortProcessInstance(CONTAINER_ID, processInstanceId); } } @Test public void testFindTasksAssignedAsPotentialOwnerSortedByStatus() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); Long processInstanceId = processClient.startProcess(CONTAINER_ID, PROCESS_ID_USERTASK, parameters); Long processInstanceId2 = processClient.startProcess(CONTAINER_ID, PROCESS_ID_USERTASK, parameters); try { List<TaskSummary> tasks = taskClient.findTasksAssignedAsPotentialOwner(USER_YODA, 0, 10, SORT_BY_TASK_STATUS, true); assertNotNull(tasks); assertEquals(2, tasks.size()); Long someTaskId = tasks.get(0).getId(); taskClient.startTask(CONTAINER_ID, someTaskId, USER_YODA); tasks = taskClient.findTasksAssignedAsPotentialOwner(USER_YODA, 0, 10, SORT_BY_TASK_STATUS, true); assertNotNull(tasks); assertEquals(2, tasks.size()); assertEquals(Status.InProgress.toString(), tasks.get(0).getStatus()); assertEquals(Status.Reserved.toString(), tasks.get(1).getStatus()); tasks = taskClient.findTasksAssignedAsPotentialOwner(USER_YODA, 0, 10, SORT_BY_TASK_STATUS, false); assertNotNull(tasks); assertEquals(2, tasks.size()); assertEquals(Status.Reserved.toString(), tasks.get(0).getStatus()); assertEquals(Status.InProgress.toString(), tasks.get(1).getStatus()); } finally { processClient.abortProcessInstance(CONTAINER_ID, processInstanceId); processClient.abortProcessInstance(CONTAINER_ID, processInstanceId2); } } @Test public void testFindTasksByStatusByProcessInstanceId() throws Exception { Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("stringData", "waiting for signal"); parameters.put("personData", createPersonInstance(USER_JOHN)); Long processInstanceId = processClient.startProcess(CONTAINER_ID, PROCESS_ID_USERTASK, parameters); try { List<String> status = new ArrayList<String>(); status.add(Status.Reserved.toString()); status.add(Status.InProgress.toString()); List<TaskSummary> tasks = taskClient.findTasksByStatusByProcessInstanceId(processInstanceId, status, 0, 10); assertNotNull(tasks); assertEquals(1, tasks.size()); TaskSummary expectedTaskSummary = createDefaultTaskSummary(processInstanceId); TaskSummary taskInstance = tasks.get(0); assertTaskSummary(expectedTaskSummary, taskInstance); status = new ArrayList<String>(); status.add(Status.InProgress.toString()); tasks = taskClient.findTasksByStatusByProcessInstanceId(processInstanceId, status, 0, 10); assertNotNull(tasks); assertEquals(0, tasks.size()); taskClient.startTask(CONTAINER_ID, taskInstance.getId(), USER_YODA); tasks = taskClient.findTasksByStatusByProcessInstanceId(processInstanceId, status, 0, 10); assertNotNull(tasks); assertEquals(1, tasks.size()); taskInstance = tasks.get(0); expectedTaskSummary.setStatus(Status.InProgress.toString()); assertTaskSummary(expectedTaskSummary, taskInstance); } finally { processClient.abortProcessInstance(CONTAINER_ID, processInstanceId); } } private void checkProcessDefinitions(List<String> processIds) { assertTrue(processIds.contains(PROCESS_ID_CALL_EVALUATION)); assertTrue(processIds.contains(PROCESS_ID_EVALUATION)); assertTrue(processIds.contains(PROCESS_ID_GROUPTASK)); assertTrue(processIds.contains(PROCESS_ID_SIGNAL_PROCESS)); assertTrue(processIds.contains(PROCESS_ID_USERTASK)); assertTrue(processIds.contains(PROCESS_ID_CUSTOM_TASK)); assertTrue(processIds.contains(PROCESS_ID_SIGNAL_START)); assertTrue(processIds.contains(PROCESS_ID_ASYNC_SCRIPT)); assertTrue(processIds.contains(PROCESS_ID_TIMER)); } private void assertNodeInstance(NodeInstance expected, NodeInstance actual) { assertNotNull(actual); assertEquals(expected.getName(), actual.getName()); assertEquals(expected.getContainerId(), actual.getContainerId()); assertEquals(expected.getNodeType(), actual.getNodeType()); assertEquals(expected.getCompleted(), actual.getCompleted()); assertEquals(expected.getProcessInstanceId(), actual.getProcessInstanceId()); } private void assertTaskSummary(TaskSummary expected, TaskSummary actual) { assertNotNull(actual); assertEquals(expected.getName(), actual.getName()); assertEquals(expected.getProcessId(), actual.getProcessId()); KieServerAssert.assertNullOrEmpty(actual.getDescription()); assertEquals(expected.getStatus(), actual.getStatus()); assertEquals(expected.getPriority(), actual.getPriority()); assertEquals(expected.getActualOwner(), actual.getActualOwner()); assertEquals(expected.getCreatedBy(), actual.getCreatedBy()); assertEquals(expected.getContainerId(), actual.getContainerId()); assertEquals(expected.getParentId(), actual.getParentId()); assertEquals(expected.getProcessInstanceId(), actual.getProcessInstanceId()); } private void assertTaskInstace(TaskInstance expected, TaskInstance actual) { assertNotNull(actual); assertEquals(expected.getName(), actual.getName()); KieServerAssert.assertNullOrEmpty(actual.getDescription()); assertEquals(expected.getStatus(), actual.getStatus()); assertEquals(expected.getPriority(), actual.getPriority()); assertEquals(expected.getActualOwner(), actual.getActualOwner()); assertEquals(expected.getCreatedBy(), actual.getCreatedBy()); assertEquals(expected.getProcessId(), actual.getProcessId()); assertEquals(expected.getContainerId(), actual.getContainerId()); assertEquals(expected.getProcessInstanceId(), actual.getProcessInstanceId()); } private void assertTaskEventInstance(TaskEventInstance expected, TaskEventInstance actual) { assertNotNull(actual); assertEquals(expected.getType(), actual.getType()); assertEquals(expected.getProcessInstanceId(), actual.getProcessInstanceId()); assertEquals(expected.getTaskId(), actual.getTaskId()); assertEquals(expected.getUserId(), actual.getUserId()); } private TaskSummary createDefaultTaskSummary(long processInstanceId) { return TaskSummary .builder() .name("First task") .status(Status.Reserved.toString()) .priority(0) .actualOwner(USER_YODA) .createdBy(USER_YODA) .processId(PROCESS_ID_USERTASK) .containerId(CONTAINER_ID) .taskParentId(-1l) .processInstanceId(processInstanceId) .build(); } protected List<Long> createProcessInstances(Map<String, Object> parameters) { List<Long> processInstanceIds = new ArrayList<Long>(); processInstanceIds.add(processClient.startProcess(CONTAINER_ID, PROCESS_ID_SIGNAL_PROCESS, parameters)); processInstanceIds.add(processClient.startProcess(CONTAINER_ID, PROCESS_ID_USERTASK, parameters)); processInstanceIds.add(processClient.startProcess(CONTAINER_ID, PROCESS_ID_SIGNAL_PROCESS, parameters)); processInstanceIds.add(processClient.startProcess(CONTAINER_ID, PROCESS_ID_USERTASK, parameters)); processInstanceIds.add(processClient.startProcess(CONTAINER_ID, PROCESS_ID_SIGNAL_PROCESS, parameters)); Collections.sort(processInstanceIds); return processInstanceIds; } protected void abortProcessInstances(List<Long> processInstanceIds) { for (Long piId : processInstanceIds) { processClient.abortProcessInstance(CONTAINER_ID, piId); } } protected List<String> collectDefinitions(List<ProcessDefinition> definitions) { List<String> ids = new ArrayList<String>(); for (ProcessDefinition definition : definitions) { ids.add(definition.getId()); } return ids; } protected List<Long> collectInstances(List<ProcessInstance> instances) { List<Long> ids = new ArrayList<Long>(); for (ProcessInstance instance : instances) { ids.add(instance.getId()); } return ids; } }
package lingscope.algorithms; import generalutils.Statistics; import java.util.List; import lingscope.io.AnnotatedSentencesIO; import lingscope.structures.AnnotatedSentence; /** * Compares two annotations or two annotation files * @author shashank */ public class AnnotationComparer { private double tp; private double fp; private double fn; private double tn; private double perfectMatches; private int totalSentences; private int numFolds; private double[] tpFolds; private double[] fpFolds; private double[] fnFolds; private double[] tnFolds; private double[] perfectMatchesFolds; private double[] totalSentencesFolds; public AnnotationComparer(int numFolds) { this.numFolds = numFolds; reset(); } /** * Resets the values for the comparer */ public final void reset() { tp = 0; fp = 0; fn = 0; tn = 0; perfectMatches = 0; totalSentences = 0; tpFolds = new double[numFolds]; resetFold(tpFolds, numFolds); fpFolds = new double[numFolds]; resetFold(fpFolds, numFolds); fnFolds = new double[numFolds]; resetFold(fnFolds, numFolds); tnFolds = new double[numFolds]; resetFold(tnFolds, numFolds); perfectMatchesFolds = new double[numFolds]; resetFold(perfectMatchesFolds, numFolds); totalSentencesFolds = new double[numFolds]; resetFold(totalSentencesFolds, numFolds); } private void resetFold(double[] folds, int numFolds) { for (int i = 0; i < numFolds; ++i) { folds[i] = 0; } } public void compareAnnotationFiles(String goldFile, String testFile) { List<AnnotatedSentence> goldSentences = AnnotatedSentencesIO.read(goldFile); List<AnnotatedSentence> testSentences = AnnotatedSentencesIO.read(testFile); if (goldSentences.size() != testSentences.size()) { throw new RuntimeException("Number of sentences in gold and test file are not same"); } for (int i = 0; i < goldSentences.size(); ++i) { AnnotatedSentence goldSentence = goldSentences.get(i); AnnotatedSentence testSentence = testSentences.get(i); compareAnnotations(goldSentence, testSentence); } } public void compareAnnotations(AnnotatedSentence goldSentence, AnnotatedSentence testSentence) { int localTp = 0; int localFp = 0; int localFn = 0; int localTn = 0; if (goldSentence.getIsAnnotatedTags().size() != testSentence.getIsAnnotatedTags().size()) { System.err.println("Size mismatch GOLD: " + goldSentence.getRawText()); System.err.println("Size mismatch TEST: " + testSentence.getRawText()); } int numTags = Math.min(goldSentence.getIsAnnotatedTags().size(), testSentence.getIsAnnotatedTags().size()); for (int i = 0; i < numTags; ++i) { boolean goldTag = goldSentence.getIsAnnotatedTags().get(i); boolean testTag = testSentence.getIsAnnotatedTags().get(i); if (goldTag && testTag) { ++localTp; } else if (goldTag && (!testTag)) { ++localFn; } else if ((!goldTag) && testTag) { ++localFp; } else if ((!goldTag) && (!testTag)) { ++localTn; } } int foldNum = totalSentences % numFolds; tp += localTp; fp += localFp; fn += localFn; tn += localTn; tpFolds[foldNum] += localTp; fpFolds[foldNum] += localFp; fnFolds[foldNum] += localFn; tnFolds[foldNum] += localTn; if (localFp != 0) { System.out.println("FP Gold: " + goldSentence.getRawText()); System.out.println("FP Test: " + testSentence.getRawText()); } if (localFn != 0) { System.out.println("FN Gold: " + goldSentence.getRawText()); System.out.println("FN Test: " + testSentence.getRawText()); } if (localFp == 0 && localFn == 0) { ++perfectMatches; ++perfectMatchesFolds[foldNum]; } ++totalSentences; ++totalSentencesFolds[foldNum]; } public void printStats() { System.out.println("TP: " + tp); System.out.println("FP: " + fp); System.out.println("FN: " + fn); System.out.println("TN: " + tn); double precision = getPrecision(tp, fp); double recall = getRecall(tp, fn); System.out.println("Overall precision: " + precision); System.out.println("Overall recall: " + recall); System.out.println("Overall F1-score: " + getFScore(recall, precision)); System.out.println("Overall Accuracy: " + getAccuracy(tp, fp, fn, tn)); System.out.println("Overall Perfect : " + getPerfectAccuracy(perfectMatches, totalSentences)); double[] recalls = getRecalls(); double[] precisions = getPrecisions(); double[] fScores = getFScores(recalls, precisions); double[] accuracies = getAccuracies(); double[] perfectAccuracies = getPerfectAccuracies(); System.out.println("Average precision: " + Statistics.mean(precisions) + " +/- " + Statistics.stdDev(precisions)); System.out.println("Average recall: " + Statistics.mean(recalls) + " +/- " + Statistics.stdDev(recalls)); System.out.println("Average F1-score: " + Statistics.mean(fScores) + " +/- " + Statistics.stdDev(fScores)); System.out.println("Average Accuracy: " + Statistics.mean(accuracies) + " +/- " + Statistics.stdDev(accuracies)); System.out.println("Average Perfect: " + Statistics.mean(perfectAccuracies) + " +/- " + Statistics.stdDev(perfectAccuracies)); } public double[] getRecalls() { double[] recalls = new double[numFolds]; for (int i = 0; i < numFolds; ++i) { recalls[i] = getRecall(tpFolds[i], fnFolds[i]); } return recalls; } public double[] getPrecisions() { double[] precisions = new double[numFolds]; for (int i = 0; i < numFolds; ++i) { precisions[i] = getPrecision(tpFolds[i], fpFolds[i]); } return precisions; } public double[] getFScores(double[] recalls, double[] precisions) { double[] fScores = new double[numFolds]; for (int i = 0; i < numFolds; ++i) { fScores[i] = getFScore(recalls[i], precisions[i]); } return fScores; } public double[] getAccuracies() { double[] accuracies = new double[numFolds]; for (int i = 0; i < numFolds; ++i) { accuracies[i] = getAccuracy(tpFolds[i], fpFolds[i], fnFolds[i], tnFolds[i]); } return accuracies; } public double[] getPerfectAccuracies() { double[] pcss = new double[numFolds]; for (int i = 0; i < numFolds; ++i) { pcss[i] = getPerfectAccuracy(perfectMatchesFolds[i], totalSentencesFolds[i]); } return pcss; } public double getPerfectAccuracy(double trues, double total) { return trues / total; } public double getAccuracy(double tp, double fp, double fn, double tn) { return (tp + tn) / (tp + fp + fn + tn); } public double getRecall(double tp, double fn) { return tp / (tp + fn); } public double getPrecision(double tp, double fp) { return tp / (tp + fp); } public double getFScore(double recall, double precision) { return 2 * precision * recall / (recall + precision); } }
package org.antlr.intellij.plugin; import com.intellij.execution.filters.TextConsoleBuilder; import com.intellij.execution.filters.TextConsoleBuilderFactory; import com.intellij.execution.ui.ConsoleView; import com.intellij.ide.plugins.IdeaPluginDescriptor; import com.intellij.ide.plugins.PluginManager; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.ProjectComponent; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.editor.event.EditorFactoryAdapter; import com.intellij.openapi.editor.event.EditorFactoryEvent; import com.intellij.openapi.editor.event.EditorMouseAdapter; import com.intellij.openapi.editor.event.EditorMouseEvent; import com.intellij.openapi.extensions.PluginId; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.fileEditor.FileEditorManagerAdapter; import com.intellij.openapi.fileEditor.FileEditorManagerEvent; import com.intellij.openapi.fileEditor.FileEditorManagerListener; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.Task; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileAdapter; import com.intellij.openapi.vfs.VirtualFileEvent; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.openapi.wm.ToolWindow; import com.intellij.openapi.wm.ToolWindowAnchor; import com.intellij.openapi.wm.ToolWindowManager; import com.intellij.ui.content.Content; import com.intellij.ui.content.ContentFactory; import com.intellij.util.messages.MessageBusConnection; import org.antlr.intellij.adaptor.parser.SyntaxErrorListener; import org.antlr.intellij.plugin.parsing.ParsingResult; import org.antlr.intellij.plugin.parsing.ParsingUtils; import org.antlr.intellij.plugin.parsing.RunANTLROnGrammarFile; import org.antlr.intellij.plugin.preview.PreviewPanel; import org.antlr.intellij.plugin.preview.PreviewState; import org.antlr.intellij.plugin.profiler.ProfilerPanel; import org.antlr.v4.parse.ANTLRParser; import org.antlr.v4.tool.Grammar; import org.antlr.v4.tool.LexerGrammar; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.io.File; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; /** This object is the controller for the ANTLR plug-in. It receives * events and can send them on to its contained components. For example, * saving the grammar editor or flipping to a new grammar sends an event * to this object, which forwards on update events to the preview tool window. * * The main components are related to the console tool window forever output and * the main panel of the preview tool window. * * This controller also manages the cache of grammar/editor combinations * needed for the preview window. Updates must be made atomically so that * the grammars and editors are consistently associated with the same window. */ public class ANTLRv4PluginController implements ProjectComponent { public static final String PLUGIN_ID = "org.antlr.intellij.plugin"; public static final Key<GrammarEditorMouseAdapter> EDITOR_MOUSE_LISTENER_KEY = Key.create("EDITOR_MOUSE_LISTENER_KEY"); public static final Logger LOG = Logger.getInstance("ANTLRv4PluginController"); public static final String PREVIEW_WINDOW_ID = "ANTLR Preview"; public static final String CONSOLE_WINDOW_ID = "Tool Output"; public boolean projectIsClosed = false; public Project project; public ConsoleView console; public ToolWindow consoleWindow; public Map<String, PreviewState> grammarToPreviewState = Collections.synchronizedMap(new HashMap<String, PreviewState>()); public ToolWindow previewWindow; // same for all grammar editor public PreviewPanel previewPanel; // same for all grammar editor public MyVirtualFileAdapter myVirtualFileAdapter = new MyVirtualFileAdapter(); public MyFileEditorManagerAdapter myFileEditorManagerAdapter = new MyFileEditorManagerAdapter(); public ANTLRv4PluginController(Project project) { this.project = project; } public static ANTLRv4PluginController getInstance(Project project) { if ( project==null ) { LOG.error("getInstance: project is null"); return null; } ANTLRv4PluginController pc = project.getComponent(ANTLRv4PluginController.class); if ( pc==null ) { LOG.error("getInstance: getComponent() for "+project.getName()+" returns null"); } return pc; } @Override public void initComponent() { } @Override public void projectOpened() { IdeaPluginDescriptor plugin = PluginManager.getPlugin(PluginId.getId(PLUGIN_ID)); String version = "unknown"; if ( plugin!=null ) { version = plugin.getVersion(); } LOG.info("ANTLR 4 Plugin version "+version+", Java version "+ SystemInfo.JAVA_VERSION); // make sure the tool windows are created early createToolWindows(); installListeners(); } public void createToolWindows() { LOG.info("createToolWindows "+project.getName()); ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(project); previewPanel = new PreviewPanel(project); ContentFactory contentFactory = ContentFactory.SERVICE.getInstance(); Content content = contentFactory.createContent(previewPanel, "", false); previewWindow = toolWindowManager.registerToolWindow(PREVIEW_WINDOW_ID, true, ToolWindowAnchor.BOTTOM); previewWindow.getContentManager().addContent(content); previewWindow.setIcon(Icons.FILE); TextConsoleBuilderFactory factory = TextConsoleBuilderFactory.getInstance(); TextConsoleBuilder consoleBuilder = factory.createBuilder(project); this.console = consoleBuilder.getConsole(); JComponent consoleComponent = console.getComponent(); content = contentFactory.createContent(consoleComponent, "", false); consoleWindow = toolWindowManager.registerToolWindow(CONSOLE_WINDOW_ID, true, ToolWindowAnchor.BOTTOM); consoleWindow.getContentManager().addContent(content); consoleWindow.setIcon(Icons.FILE); } @Override public void projectClosed() { LOG.info("projectClosed " + project.getName()); //synchronized ( shutdownLock ) { // They should be called from EDT only so no lock projectIsClosed = true; uninstallListeners(); console.dispose(); for (PreviewState it : grammarToPreviewState.values()) { previewPanel.inputPanel.releaseEditor(it); } previewPanel = null; previewWindow = null; consoleWindow = null; project = null; grammarToPreviewState = null; } // seems that intellij can kill and reload a project w/o user knowing. // a ptr was left around that pointed at a disposed project. led to // problem in switchGrammar. Probably was a listener still attached and trigger // editor listeners released in editorReleased() events. public void uninstallListeners() { VirtualFileManager.getInstance().removeVirtualFileListener(myVirtualFileAdapter); MessageBusConnection msgBus = project.getMessageBus().connect(project); msgBus.disconnect(); } @Override public void disposeComponent() { } @NotNull @Override public String getComponentName() { return "antlr.ProjectComponent"; } // ------------------------------ public void installListeners() { LOG.info("installListeners "+project.getName()); // Listen for .g4 file saves VirtualFileManager.getInstance().addVirtualFileListener(myVirtualFileAdapter); // Listen for editor window changes MessageBusConnection msgBus = project.getMessageBus().connect(project); msgBus.subscribe( FileEditorManagerListener.FILE_EDITOR_MANAGER, myFileEditorManagerAdapter ); EditorFactory factory = EditorFactory.getInstance(); factory.addEditorFactoryListener( new EditorFactoryAdapter() { @Override public void editorCreated(@NotNull EditorFactoryEvent event) { final Editor editor = event.getEditor(); final Document doc = editor.getDocument(); VirtualFile vfile = FileDocumentManager.getInstance().getFile(doc); if ( vfile!=null && vfile.getName().endsWith(".g4") ) { GrammarEditorMouseAdapter listener = new GrammarEditorMouseAdapter(); editor.putUserData(EDITOR_MOUSE_LISTENER_KEY, listener); editor.addEditorMouseListener(listener); } } @Override public void editorReleased(@NotNull EditorFactoryEvent event) { Editor editor = event.getEditor(); if (editor.getProject() != null && editor.getProject() != project) { return; } GrammarEditorMouseAdapter listener = editor.getUserData(EDITOR_MOUSE_LISTENER_KEY); if (listener != null) { editor.removeEditorMouseListener(listener); editor.putUserData(EDITOR_MOUSE_LISTENER_KEY, null); } } } ); } /** The test ANTLR rule action triggers this event. This can occur * only occur when the current editor the showing a grammar, because * that is the only time that the action is enabled. We will see * a file changed event when the project loads the first grammar file. */ public void setStartRuleNameEvent(VirtualFile grammarFile, String startRuleName) { LOG.info("setStartRuleNameEvent " + startRuleName+" "+project.getName()); PreviewState previewState = getPreviewState(grammarFile); previewState.startRuleName = startRuleName; if ( previewPanel!=null ) { previewPanel.getInputPanel().setStartRuleName(grammarFile, startRuleName); // notify the view previewPanel.updateParseTreeFromDoc(grammarFile); } else { LOG.error("setStartRuleNameEvent called before preview panel created"); } } public void grammarFileSavedEvent(VirtualFile grammarFile) { LOG.info("grammarFileSavedEvent "+grammarFile.getPath()+" "+project.getName()); updateGrammarObjectsFromFile(grammarFile); // force reload if ( previewPanel!=null ) { previewPanel.grammarFileSaved(grammarFile); } else { LOG.error("grammarFileSavedEvent called before preview panel created"); } runANTLRTool(grammarFile); } public void currentEditorFileChangedEvent(VirtualFile oldFile, VirtualFile newFile) { LOG.info("currentEditorFileChangedEvent "+(oldFile!=null?oldFile.getPath():"none")+ " -> "+(newFile!=null?newFile.getPath():"none")+" "+project.getName()); if ( newFile==null ) { // all files must be closed I guess return; } if ( newFile.getName().endsWith(".g") ) { LOG.info("currentEditorFileChangedEvent ANTLR 4 cannot handle .g files, only .g4"); previewWindow.hide(null); return; } if ( !newFile.getName().endsWith(".g4") ) { previewWindow.hide(null); return; } PreviewState previewState = getPreviewState(newFile); if ( previewState.g==null && previewState.lg==null ) { // only load grammars if none is there updateGrammarObjectsFromFile(newFile); } if ( previewPanel!=null ) { previewPanel.grammarFileChanged(oldFile, newFile); } } public void mouseEnteredGrammarEditorEvent(VirtualFile vfile, EditorMouseEvent e) { if ( previewPanel!=null ) { ProfilerPanel profilerPanel = previewPanel.getProfilerPanel(); if ( profilerPanel!=null ) { profilerPanel.mouseEnteredGrammarEditorEvent(vfile, e); } } } public void editorFileClosedEvent(VirtualFile vfile) { // hopefully called only from swing EDT String grammarFileName = vfile.getPath(); LOG.info("editorFileClosedEvent "+ grammarFileName+" "+project.getName()); if ( !vfile.getName().endsWith(".g4") ) { previewWindow.hide(null); return; } // Dispose of state, editor, and such for this file PreviewState previewState = grammarToPreviewState.get(grammarFileName); if ( previewState==null ) { // project closing must have done already return; } previewState.g = null; // wack old ref to the Grammar for text in editor previewState.lg = null; previewPanel.closeGrammar(vfile); grammarToPreviewState.remove(grammarFileName); // close tool window previewWindow.hide(null); } /** Make sure to run after updating grammars in previewState */ public void runANTLRTool(final VirtualFile grammarFile) { String title = "ANTLR Code Generation"; boolean canBeCancelled = true; boolean forceGeneration = false; Task gen = new RunANTLROnGrammarFile(grammarFile, project, title, canBeCancelled, forceGeneration); ProgressManager.getInstance().run(gen); } /** Look for state information concerning this grammar file and update * the Grammar objects. This does not necessarily update the grammar file * in the current editor window. Either we are already looking at * this grammar or we will have seen a grammar file changed event. * (I hope!) */ public void updateGrammarObjectsFromFile(VirtualFile grammarFile) { updateGrammarObjectsFromFile_(grammarFile); // if grammarFileName is a separate lexer, we need to look for // its matching parser, if any, that is loaded in an editor // (don't go looking on disk). PreviewState s = getAssociatedParserIfLexer(grammarFile.getPath()); if ( s!=null ) { // try to load lexer again and associate with this parser grammar. // must update parser too as tokens have changed updateGrammarObjectsFromFile_(s.grammarFile); } } public String updateGrammarObjectsFromFile_(VirtualFile grammarFile) { String grammarFileName = grammarFile.getPath(); PreviewState previewState = getPreviewState(grammarFile); Grammar[] grammars = ParsingUtils.loadGrammars(grammarFileName, project); if (grammars != null) { synchronized (previewState) { // build atomically previewState.lg = (LexerGrammar)grammars[0]; previewState.g = grammars[1]; } } return grammarFileName; } public PreviewState getAssociatedParserIfLexer(String grammarFileName) { for (PreviewState s : grammarToPreviewState.values()) { if ( s!=null && s.lg!=null && (grammarFileName.equals(s.lg.fileName)||s.lg==ParsingUtils.BAD_LEXER_GRAMMAR) ) { // s has a lexer with same filename, see if there is a parser grammar // (not a combined grammar) if ( s.g!=null && s.g.getType()==ANTLRParser.PARSER ) { // System.out.println(s.lg.fileName+" vs "+grammarFileName+", g="+s.g.name+", type="+s.g.getTypeString()); return s; } } } return null; } public ParsingResult parseText(final VirtualFile grammarFile, String inputText) throws IOException { String grammarFileName = grammarFile.getPath(); if (!new File(grammarFileName).exists()) { LOG.error("parseText grammar doesn't exit " + grammarFileName); return null; } // Wipes out the console and also any error annotations previewPanel.inputPanel.clearParseErrors(); final PreviewState previewState = getPreviewState(grammarFile); long start = System.nanoTime(); previewState.parsingResult = ParsingUtils.parseText(previewState.g, previewState.lg, previewState.startRuleName, grammarFile, inputText); if ( previewState.parsingResult==null ) { return null; } long stop = System.nanoTime(); previewPanel.profilerPanel.setProfilerData(previewState, stop-start); SyntaxErrorListener syntaxErrorListener = previewState.parsingResult.syntaxErrorListener; previewPanel.inputPanel.showParseErrors(syntaxErrorListener.getSyntaxErrors()); return previewState.parsingResult; } public PreviewPanel getPreviewPanel() { return previewPanel; } public ConsoleView getConsole() { return console; } public ToolWindow getConsoleWindow() { return consoleWindow; } public static void showConsoleWindow(final Project project) { ApplicationManager.getApplication().invokeLater( new Runnable() { @Override public void run() { ANTLRv4PluginController.getInstance(project).getConsoleWindow().show(null); } } ); } public ToolWindow getPreviewWindow() { return previewWindow; } public @NotNull PreviewState getPreviewState(VirtualFile grammarFile) { // make sure only one thread tries to add a preview state object for a given file String grammarFileName = grammarFile.getPath(); // Have we seen this grammar before? PreviewState stateForCurrentGrammar = grammarToPreviewState.get(grammarFileName); if ( stateForCurrentGrammar!=null ) { return stateForCurrentGrammar; // seen this before } // not seen, must create state stateForCurrentGrammar = new PreviewState(project, grammarFile); grammarToPreviewState.put(grammarFileName, stateForCurrentGrammar); return stateForCurrentGrammar; } public Editor getEditor(VirtualFile vfile) { final FileDocumentManager fdm = FileDocumentManager.getInstance(); final Document doc = fdm.getDocument(vfile); if (doc == null) return null; EditorFactory factory = EditorFactory.getInstance(); final Editor[] editors = factory.getEditors(doc, previewPanel.project); if ( editors.length==0 ) { // no editor found for this file. likely an out-of-sequence issue // where Intellij is opening a project and doesn't fire events // in order we'd expect. return null; } return editors[0]; // hope just one } /** Get the state information associated with the grammar in the current * editor window. If there is no grammar in the editor window, return null. * If there is a grammar, return any existing preview state else * create a new one in store in the map. * * Too dangerous; turning off but might be useful later. public @org.jetbrains.annotations.Nullable PreviewState getPreviewState() { VirtualFile currentGrammarFile = getCurrentGrammarFile(); if ( currentGrammarFile==null ) { return null; } String currentGrammarFileName = currentGrammarFile.getPath(); if ( currentGrammarFileName==null ) { return null; // we are not looking at a grammar file } return getPreviewState(currentGrammarFile); } */ // These "get current editor file" routines should only be used // when you are sure the user is in control and is viewing the // right file (i.e., don't use these during project loading etc...) public static VirtualFile getCurrentEditorFile(Project project) { FileEditorManager fmgr = FileEditorManager.getInstance(project); // "If more than one file is selected (split), the file with most recent focused editor is returned first." from IDE doc on method VirtualFile files[] = fmgr.getSelectedFiles(); if ( files.length == 0 ) { return null; } return files[0]; } // public Editor getCurrentGrammarEditor() { // FileEditorManager edMgr = FileEditorManager.getInstance(project); // return edMgr.getSelectedTextEditor(); // } public VirtualFile getCurrentGrammarFile() { return getCurrentGrammarFile(project); } public static VirtualFile getCurrentGrammarFile(Project project) { VirtualFile f = getCurrentEditorFile(project); if ( f==null ) { return null; } if ( f.getName().endsWith(".g4") ) return f; return null; } private class GrammarEditorMouseAdapter extends EditorMouseAdapter { @Override public void mouseClicked(EditorMouseEvent e) { Document doc = e.getEditor().getDocument(); VirtualFile vfile = FileDocumentManager.getInstance().getFile(doc); if ( vfile!=null && vfile.getName().endsWith(".g4") ) { mouseEnteredGrammarEditorEvent(vfile, e); } } } private class MyVirtualFileAdapter extends VirtualFileAdapter { @Override public void contentsChanged(VirtualFileEvent event) { final VirtualFile vfile = event.getFile(); if ( !vfile.getName().endsWith(".g4") ) return; if ( !projectIsClosed ) grammarFileSavedEvent(vfile); } } private class MyFileEditorManagerAdapter extends FileEditorManagerAdapter { @Override public void selectionChanged(FileEditorManagerEvent event) { if ( !projectIsClosed ) currentEditorFileChangedEvent(event.getOldFile(), event.getNewFile()); } @Override public void fileClosed(FileEditorManager source, VirtualFile file) { if ( !projectIsClosed ) editorFileClosedEvent(file); } } }
/* * Copyright 1999-2012 Alibaba Group. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package fm.liu.timo.parser; import org.junit.Assert; import org.junit.Test; import fm.liu.timo.server.parser.ServerParse; import fm.liu.timo.server.parser.ServerParseSelect; import fm.liu.timo.server.parser.ServerParseSet; import fm.liu.timo.server.parser.ServerParseShow; import fm.liu.timo.server.parser.ServerParseStart; /** * @author xianmao.hexm */ public class ServerParserTest { @Test public void testIsBegin() { Assert.assertEquals(ServerParse.BEGIN, ServerParse.parse("begin")); Assert.assertEquals(ServerParse.BEGIN, ServerParse.parse("BEGIN")); Assert.assertEquals(ServerParse.BEGIN, ServerParse.parse("BegIn")); } @Test public void testIsCommit() { Assert.assertEquals(ServerParse.COMMIT, ServerParse.parse("commit")); Assert.assertEquals(ServerParse.COMMIT, ServerParse.parse("COMMIT")); Assert.assertEquals(ServerParse.COMMIT, ServerParse.parse("cOmmiT ")); } @Test public void testIsDelete() { Assert.assertEquals(ServerParse.DELETE, ServerParse.parse("delete ...")); Assert.assertEquals(ServerParse.DELETE, ServerParse.parse("DELETE ...")); Assert.assertEquals(ServerParse.DELETE, ServerParse.parse("DeletE ...")); } @Test public void testIsInsert() { Assert.assertEquals(ServerParse.INSERT, ServerParse.parse("insert ...")); Assert.assertEquals(ServerParse.INSERT, ServerParse.parse("INSERT ...")); Assert.assertEquals(ServerParse.INSERT, ServerParse.parse("InserT ...")); } @Test public void testIsReplace() { Assert.assertEquals(ServerParse.REPLACE, ServerParse.parse("replace ...")); Assert.assertEquals(ServerParse.REPLACE, ServerParse.parse("REPLACE ...")); Assert.assertEquals(ServerParse.REPLACE, ServerParse.parse("rEPLACe ...")); } @Test public void testIsRollback() { Assert.assertEquals(ServerParse.ROLLBACK, ServerParse.parse("rollback")); Assert.assertEquals(ServerParse.ROLLBACK, ServerParse.parse("ROLLBACK")); Assert.assertEquals(ServerParse.ROLLBACK, ServerParse.parse("rolLBACK ")); } @Test public void testIsSelect() { Assert.assertEquals(ServerParse.SELECT, 0xff & ServerParse.parse("select ...")); Assert.assertEquals(ServerParse.SELECT, 0xff & ServerParse.parse("SELECT ...")); Assert.assertEquals(ServerParse.SELECT, 0xff & ServerParse.parse("sELECt ...")); } @Test public void testIsSet() { Assert.assertEquals(ServerParse.SET, 0xff & ServerParse.parse("set ...")); Assert.assertEquals(ServerParse.SET, 0xff & ServerParse.parse("SET ...")); Assert.assertEquals(ServerParse.SET, 0xff & ServerParse.parse("sEt ...")); } @Test public void testIsShow() { Assert.assertEquals(ServerParse.SHOW, 0xff & ServerParse.parse("show ...")); Assert.assertEquals(ServerParse.SHOW, 0xff & ServerParse.parse("SHOW ...")); Assert.assertEquals(ServerParse.SHOW, 0xff & ServerParse.parse("sHOw ...")); } @Test public void testIsStart() { Assert.assertEquals(ServerParse.START, 0xff & ServerParse.parse("start ...")); Assert.assertEquals(ServerParse.START, 0xff & ServerParse.parse("START ...")); Assert.assertEquals(ServerParse.START, 0xff & ServerParse.parse("stART ...")); } @Test public void testIsUpdate() { Assert.assertEquals(ServerParse.UPDATE, ServerParse.parse("update ...")); Assert.assertEquals(ServerParse.UPDATE, ServerParse.parse("UPDATE ...")); Assert.assertEquals(ServerParse.UPDATE, ServerParse.parse("UPDate ...")); } @Test public void testIsShowDatabases() { Assert.assertEquals(ServerParseShow.DATABASES, ServerParseShow.parse("show databases", 4)); Assert.assertEquals(ServerParseShow.DATABASES, ServerParseShow.parse("SHOW DATABASES", 4)); Assert.assertEquals(ServerParseShow.DATABASES, ServerParseShow.parse("SHOW databases ", 4)); } @Test public void testIsShowDataSources() { Assert.assertEquals(ServerParseShow.DATASOURCES, ServerParseShow.parse("show datasources", 4)); Assert.assertEquals(ServerParseShow.DATASOURCES, ServerParseShow.parse("SHOW DATASOURCES", 4)); Assert.assertEquals(ServerParseShow.DATASOURCES, ServerParseShow.parse(" SHOW DATASOURCES ", 6)); } @Test public void testShowTimoStatus() { Assert.assertEquals(ServerParseShow.TIMO_STATUS, ServerParseShow.parse("show timo_status", 4)); Assert.assertEquals(ServerParseShow.TIMO_STATUS, ServerParseShow.parse("show timo_status ", 4)); Assert.assertEquals(ServerParseShow.TIMO_STATUS, ServerParseShow.parse(" SHOW Timo_STATUS", " SHOW".length())); Assert.assertEquals(ServerParseShow.OTHER, ServerParseShow.parse(" show timo_statu", " SHOW".length())); Assert.assertEquals(ServerParseShow.OTHER, ServerParseShow.parse(" show timo_status2", " SHOW".length())); Assert.assertEquals(ServerParseShow.OTHER, ServerParseShow.parse("Show timo_status2 ", "SHOW".length())); } @Test public void testShowTimoCluster() { Assert.assertEquals(ServerParseShow.TIMO_CLUSTER, ServerParseShow.parse("show timo_cluster", 4)); Assert.assertEquals(ServerParseShow.TIMO_CLUSTER, ServerParseShow.parse("Show timo_CLUSTER ", 4)); Assert.assertEquals(ServerParseShow.TIMO_CLUSTER, ServerParseShow.parse(" show Timo_cluster", 5)); Assert.assertEquals(ServerParseShow.OTHER, ServerParseShow.parse(" show timo_clust", 5)); Assert.assertEquals(ServerParseShow.OTHER, ServerParseShow.parse(" show timo_cluster2", 5)); Assert.assertEquals(ServerParseShow.OTHER, ServerParseShow.parse("Show Timo_cluster9 ", 4)); } @Test public void testIsShowOther() { Assert.assertEquals(ServerParseShow.OTHER, ServerParseShow.parse("show ...", 4)); Assert.assertEquals(ServerParseShow.OTHER, ServerParseShow.parse("SHOW ...", 4)); Assert.assertEquals(ServerParseShow.OTHER, ServerParseShow.parse("SHOW ... ", 4)); } @Test public void testIsSetAutocommitOn() { Assert.assertEquals(ServerParseSet.AUTOCOMMIT_ON, ServerParseSet.parse("set autocommit=1", 3)); Assert.assertEquals(ServerParseSet.AUTOCOMMIT_ON, ServerParseSet.parse("set autoCOMMIT = 1", 3)); Assert.assertEquals(ServerParseSet.AUTOCOMMIT_ON, ServerParseSet.parse("SET AUTOCOMMIT=on", 3)); Assert.assertEquals(ServerParseSet.AUTOCOMMIT_ON, ServerParseSet.parse("set autoCOMMIT = ON", 3)); } @Test public void testIsSetAutocommitOff() { Assert.assertEquals(ServerParseSet.AUTOCOMMIT_OFF, ServerParseSet.parse("set autocommit=0", 3)); Assert.assertEquals(ServerParseSet.AUTOCOMMIT_OFF, ServerParseSet.parse("SET AUTOCOMMIT= 0", 3)); Assert.assertEquals(ServerParseSet.AUTOCOMMIT_OFF, ServerParseSet.parse("set autoCOMMIT =OFF", 3)); Assert.assertEquals(ServerParseSet.AUTOCOMMIT_OFF, ServerParseSet.parse("set autoCOMMIT = off", 3)); } @Test public void testIsSetNames() { Assert.assertEquals(ServerParseSet.NAMES, 0xff & ServerParseSet.parse("set names utf8", 3)); Assert.assertEquals(ServerParseSet.NAMES, 0xff & ServerParseSet.parse("SET NAMES UTF8", 3)); Assert.assertEquals(ServerParseSet.NAMES, 0xff & ServerParseSet.parse("set NAMES utf8", 3)); } @Test public void testIsCharacterSetResults() { Assert.assertEquals(ServerParseSet.CHARACTER_SET_RESULTS, 0xff & ServerParseSet.parse("SET character_set_results = NULL", 3)); Assert.assertEquals(ServerParseSet.CHARACTER_SET_RESULTS, 0xff & ServerParseSet.parse("SET CHARACTER_SET_RESULTS= NULL", 3)); Assert.assertEquals(ServerParseSet.CHARACTER_SET_RESULTS, 0xff & ServerParseSet.parse("Set chARActer_SET_RESults = NULL", 3)); Assert.assertEquals(ServerParseSet.CHARACTER_SET_CONNECTION, 0xff & ServerParseSet.parse("Set chARActer_SET_Connection = NULL", 3)); Assert.assertEquals(ServerParseSet.CHARACTER_SET_CLIENT, 0xff & ServerParseSet.parse("Set chARActer_SET_client = NULL", 3)); } @Test public void testIsSetOther() { Assert.assertEquals(ServerParseSet.OTHER, ServerParseSet.parse("set ...", 3)); Assert.assertEquals(ServerParseSet.OTHER, ServerParseSet.parse("SET ...", 3)); Assert.assertEquals(ServerParseSet.OTHER, ServerParseSet.parse("sEt ...", 3)); } @Test public void testIsKill() { Assert.assertEquals(ServerParse.KILL, 0xff & ServerParse.parse(" kill ...")); Assert.assertEquals(ServerParse.KILL, 0xff & ServerParse.parse("kill 111111 ...")); Assert.assertEquals(ServerParse.KILL, 0xff & ServerParse.parse("KILL 1335505632")); } @Test public void testIsKillQuery() { Assert.assertEquals(ServerParse.KILL_QUERY, 0xff & ServerParse.parse(" kill query ...")); Assert.assertEquals(ServerParse.KILL_QUERY, 0xff & ServerParse.parse("kill query 111111 ...")); Assert.assertEquals(ServerParse.KILL_QUERY, 0xff & ServerParse.parse("KILL QUERY 1335505632")); } @Test public void testIsSavepoint() { Assert.assertEquals(ServerParse.SAVEPOINT, ServerParse.parse(" savepoint ...")); Assert.assertEquals(ServerParse.SAVEPOINT, ServerParse.parse("SAVEPOINT ")); Assert.assertEquals(ServerParse.SAVEPOINT, ServerParse.parse(" SAVEpoint a")); } @Test public void testIsUse() { Assert.assertEquals(ServerParse.USE, 0xff & ServerParse.parse(" use ...")); Assert.assertEquals(ServerParse.USE, 0xff & ServerParse.parse("USE ")); Assert.assertEquals(ServerParse.USE, 0xff & ServerParse.parse(" Use a")); } @Test public void testIsStartTransaction() { Assert.assertEquals(ServerParseStart.TRANSACTION, ServerParseStart.parse(" start transaction ...", 6)); Assert.assertEquals(ServerParseStart.TRANSACTION, ServerParseStart.parse("START TRANSACTION", 5)); Assert.assertEquals(ServerParseStart.TRANSACTION, ServerParseStart.parse(" staRT TRANSaction ", 6)); } @Test public void testIsSelectVersionComment() { Assert.assertEquals(ServerParseSelect.VERSION_COMMENT, ServerParseSelect.parse(" select @@version_comment ", 7)); Assert.assertEquals(ServerParseSelect.VERSION_COMMENT, ServerParseSelect.parse("SELECT @@VERSION_COMMENT", 6)); Assert.assertEquals(ServerParseSelect.VERSION_COMMENT, ServerParseSelect.parse(" selECT @@VERSION_comment ", 7)); } @Test public void testIsSelectVersion() { Assert.assertEquals(ServerParseSelect.VERSION, ServerParseSelect.parse(" select version () ", 7)); Assert.assertEquals(ServerParseSelect.VERSION, ServerParseSelect.parse("SELECT VERSION( )", 6)); Assert.assertEquals(ServerParseSelect.VERSION, ServerParseSelect.parse(" selECT VERSION() ", 7)); } @Test public void testIsSelectDatabase() { Assert.assertEquals(ServerParseSelect.DATABASE, ServerParseSelect.parse(" select database() ", 7)); Assert.assertEquals(ServerParseSelect.DATABASE, ServerParseSelect.parse("SELECT DATABASE()", 6)); Assert.assertEquals(ServerParseSelect.DATABASE, ServerParseSelect.parse(" selECT DATABASE() ", 7)); } @Test public void testIsSelectUser() { Assert.assertEquals(ServerParseSelect.USER, ServerParseSelect.parse(" select user() ", 7)); Assert.assertEquals(ServerParseSelect.USER, ServerParseSelect.parse("SELECT USER()", 6)); Assert.assertEquals(ServerParseSelect.USER, ServerParseSelect.parse(" selECT USER() ", 7)); } @Test public void testTxReadUncommitted() { Assert.assertEquals(ServerParseSet.TX_READ_UNCOMMITTED, ServerParseSet.parse( " SET SESSION TRANSACTION ISOLATION LEVEL READ UNCOMMITTED ", " SET".length())); Assert.assertEquals(ServerParseSet.TX_READ_UNCOMMITTED, ServerParseSet.parse( " set session transaction isolation level read uncommitted ", " SET".length())); Assert.assertEquals(ServerParseSet.TX_READ_UNCOMMITTED, ServerParseSet.parse( " set session transaCTION ISOLATION LEvel read uncommitteD ", " SET".length())); } @Test public void testTxReadCommitted() { Assert.assertEquals(ServerParseSet.TX_READ_COMMITTED, ServerParseSet.parse( " SET SESSION TRANSACTION ISOLATION LEVEL READ COMMITTED ", " SET".length())); Assert.assertEquals(ServerParseSet.TX_READ_COMMITTED, ServerParseSet.parse( " set session transaction isolation level read committed ", " SET".length())); Assert.assertEquals(ServerParseSet.TX_READ_COMMITTED, ServerParseSet.parse( " set session transaCTION ISOLATION LEVel read committed ", " SET".length())); } @Test public void testTxRepeatedRead() { Assert.assertEquals(ServerParseSet.TX_REPEATED_READ, ServerParseSet.parse( " SET SESSION TRANSACTION ISOLATION LEVEL REPEATABLE READ ", " SET".length())); Assert.assertEquals(ServerParseSet.TX_REPEATED_READ, ServerParseSet.parse( " set session transaction isolation level repeatable read ", " SET".length())); Assert.assertEquals(ServerParseSet.TX_REPEATED_READ, ServerParseSet.parse( " set session transaction isOLATION LEVEL REPEatable read ", " SET".length())); } @Test public void testTxSerializable() { Assert.assertEquals(ServerParseSet.TX_SERIALIZABLE, ServerParseSet.parse( " SET SESSION TRANSACTION ISOLATION LEVEL SERIALIZABLE ", " SET".length())); Assert.assertEquals(ServerParseSet.TX_SERIALIZABLE, ServerParseSet.parse( " set session transaction isolation level serializable ", " SET".length())); Assert.assertEquals(ServerParseSet.TX_SERIALIZABLE, ServerParseSet.parse( " set session transaction isOLATION LEVEL SERIAlizable ", " SET".length())); } @Test public void testIdentity() { String stmt = "select @@identity"; int indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterIdentity(stmt, stmt.indexOf('i')); Assert.assertEquals(stmt.length(), indexAfterLastInsertIdFunc); Assert.assertEquals(ServerParseSelect.IDENTITY, ServerParseSelect.parse(stmt, 6)); stmt = "select @@identity as id"; Assert.assertEquals(ServerParseSelect.IDENTITY, ServerParseSelect.parse(stmt, 6)); stmt = "select @@identitY id"; Assert.assertEquals(ServerParseSelect.IDENTITY, ServerParseSelect.parse(stmt, 6)); stmt = "select /*foo*/@@identitY id"; Assert.assertEquals(ServerParseSelect.IDENTITY, ServerParseSelect.parse(stmt, 6)); stmt = "select/*foo*/ @@identitY id"; Assert.assertEquals(ServerParseSelect.IDENTITY, ServerParseSelect.parse(stmt, 6)); stmt = "select/*foo*/ @@identitY As id"; Assert.assertEquals(ServerParseSelect.IDENTITY, ServerParseSelect.parse(stmt, 6)); stmt = "select @@identity ,"; Assert.assertEquals(ServerParseSelect.OTHER, ServerParseSelect.parse(stmt, 6)); stmt = "select @@identity as, "; Assert.assertEquals(ServerParseSelect.OTHER, ServerParseSelect.parse(stmt, 6)); stmt = "select @@identity as id , "; Assert.assertEquals(ServerParseSelect.OTHER, ServerParseSelect.parse(stmt, 6)); stmt = "select @@identity ass id "; Assert.assertEquals(ServerParseSelect.OTHER, ServerParseSelect.parse(stmt, 6)); } @Test public void testLastInsertId() { String stmt = " last_insert_iD()"; int indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterLastInsertIdFunc(stmt, stmt.indexOf('l')); Assert.assertEquals(stmt.length(), indexAfterLastInsertIdFunc); stmt = " last_insert_iD ()"; indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterLastInsertIdFunc(stmt, stmt.indexOf('l')); Assert.assertEquals(stmt.length(), indexAfterLastInsertIdFunc); stmt = " last_insert_iD ( /**/ )"; indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterLastInsertIdFunc(stmt, stmt.indexOf('l')); Assert.assertEquals(stmt.length(), indexAfterLastInsertIdFunc); stmt = " last_insert_iD ( ) "; indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterLastInsertIdFunc(stmt, stmt.indexOf('l')); Assert.assertEquals(stmt.lastIndexOf(')') + 1, indexAfterLastInsertIdFunc); stmt = " last_insert_id( )"; indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterLastInsertIdFunc(stmt, stmt.indexOf('l')); Assert.assertEquals(stmt.lastIndexOf(')') + 1, indexAfterLastInsertIdFunc); stmt = "last_iNsert_id( ) "; indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterLastInsertIdFunc(stmt, stmt.indexOf('l')); Assert.assertEquals(stmt.lastIndexOf(')') + 1, indexAfterLastInsertIdFunc); stmt = " last_insert_iD"; indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterLastInsertIdFunc(stmt, stmt.indexOf('l')); Assert.assertEquals(-1, indexAfterLastInsertIdFunc); stmt = " last_insert_i "; indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterLastInsertIdFunc(stmt, stmt.indexOf('l')); Assert.assertEquals(-1, indexAfterLastInsertIdFunc); stmt = " last_insert_i d "; indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterLastInsertIdFunc(stmt, stmt.indexOf('l')); Assert.assertEquals(-1, indexAfterLastInsertIdFunc); stmt = " last_insert_id ( "; indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterLastInsertIdFunc(stmt, stmt.indexOf('l')); Assert.assertEquals(-1, indexAfterLastInsertIdFunc); stmt = " last_insert_id( d) "; indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterLastInsertIdFunc(stmt, stmt.indexOf('l')); Assert.assertEquals(-1, indexAfterLastInsertIdFunc); stmt = " last_insert_id( ) d "; indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterLastInsertIdFunc(stmt, stmt.indexOf('l')); Assert.assertEquals(stmt.lastIndexOf(')') + 1, indexAfterLastInsertIdFunc); stmt = " last_insert_id(d)"; indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterLastInsertIdFunc(stmt, stmt.indexOf('l')); Assert.assertEquals(-1, indexAfterLastInsertIdFunc); stmt = " last_insert_id(#\r\nd) "; indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterLastInsertIdFunc(stmt, stmt.indexOf('l')); Assert.assertEquals(-1, indexAfterLastInsertIdFunc); stmt = " last_insert_id(#\n\r) "; indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterLastInsertIdFunc(stmt, stmt.indexOf('l')); Assert.assertEquals(stmt.lastIndexOf(')') + 1, indexAfterLastInsertIdFunc); stmt = " last_insert_id (#\n\r)"; indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterLastInsertIdFunc(stmt, stmt.indexOf('l')); Assert.assertEquals(stmt.lastIndexOf(')') + 1, indexAfterLastInsertIdFunc); stmt = " last_insert_id(#\n\r)"; indexAfterLastInsertIdFunc = ServerParseSelect.indexAfterLastInsertIdFunc(stmt, stmt.indexOf('l')); Assert.assertEquals(stmt.lastIndexOf(')') + 1, indexAfterLastInsertIdFunc); stmt = "select last_insert_id(#\n\r)"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r) as id"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r) as `id`"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r) as 'id'"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r) id"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r) `id`"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r) 'id'"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r) a"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); // NOTE: this should be invalid, ignore this bug stmt = "select last_insert_id(#\n\r) as"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r) asd"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); // NOTE: this should be invalid, ignore this bug stmt = "select last_insert_id(#\n\r) as 777"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); // NOTE: this should be invalid, ignore this bug stmt = "select last_insert_id(#\n\r) 777"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r)as `77``7`"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r)ass"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r)as 'a'"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r)as 'a\\''"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r)as 'a'''"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r)as 'a\"'"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 6)); stmt = " select last_insert_id(#\n\r) As 'a\"'"; Assert.assertEquals(ServerParseSelect.LAST_INSERT_ID, ServerParseSelect.parse(stmt, 9)); stmt = "select last_insert_id(#\n\r)as 'a\"\\'"; Assert.assertEquals(ServerParseSelect.OTHER, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r)as `77``7` ,"; Assert.assertEquals(ServerParseSelect.OTHER, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r)as `77`7`"; Assert.assertEquals(ServerParseSelect.OTHER, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r) as,"; Assert.assertEquals(ServerParseSelect.OTHER, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r) ass a"; Assert.assertEquals(ServerParseSelect.OTHER, ServerParseSelect.parse(stmt, 6)); stmt = "select last_insert_id(#\n\r) as 'a"; Assert.assertEquals(ServerParseSelect.OTHER, ServerParseSelect.parse(stmt, 6)); } }
/******************************************************************************* * Copyright (c) 2013, Regents of the University of California * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted provided * that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this list of conditions * and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, this list of conditions * and the following disclaimer in the documentation and/or other materials provided with the * distribution. * * None of the name of the Regents of the University of California, or the names of its * contributors may be used to endorse or promote products derived from this software without specific * prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ******************************************************************************/ package calico.plugins.iip.graph.layout; import java.awt.Dimension; import java.awt.Point; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import calico.components.CCanvas; import calico.controllers.CCanvasController; import calico.plugins.iip.CCanvasLink; import calico.plugins.iip.CCanvasLinkAnchor; import calico.plugins.iip.CIntentionCell; import calico.plugins.iip.controllers.CCanvasLinkController; import calico.plugins.iip.controllers.CIntentionCellController; class CIntentionCluster { static double getUnitSpan() { return new CIntentionCluster(-1L).getOccupiedSpan(); } //git commit test private static final SliceSorter SLICE_SORTER = new SliceSorter(); static final int RING_SEPARATION = 20 + CIntentionLayout.INTENTION_CELL_DIAMETER; static final Dimension CLUSTER_UNIT_SIZE = new Dimension(CIntentionLayout.INTENTION_CELL_SIZE.width /*+ 20*/, CIntentionLayout.INTENTION_CELL_SIZE.height /*+ 20*/); private final List<CIntentionRing> rings = new ArrayList<CIntentionRing>(); private final List<Double> ringRadii = new ArrayList<Double>(); private final Map<Long, CIntentionSlice> slicesByRootCanvasId = new LinkedHashMap<Long, CIntentionSlice>(); private final long rootCanvasId; // transitory values per layout execution private final Point location = new Point(); private final Dimension layoutSize = new Dimension(); private boolean populated = false; public CIntentionCluster(long rootCanvasId) { this.rootCanvasId = rootCanvasId; } private void initializeRings() { rings.clear(); ringRadii.clear(); } long getRootCanvasId() { return rootCanvasId; } double getOccupiedSpan() { if (!populated) populateCluster(); double clusterRadius; getRingRadii(); if (ringRadii.isEmpty()) { clusterRadius = CIntentionLayout.INTENTION_CELL_DIAMETER; } else { clusterRadius = ringRadii.get(ringRadii.size() - 1) + (CIntentionLayout.INTENTION_CELL_DIAMETER / 2.0); } return 2 * clusterRadius; } Point getLocation() { return location; } void reset() { populated = false; } void describeMaxProjectedSpans(StringBuilder buffer) { buffer.append("["); for (CIntentionRing ring : rings) { int maxProjectedSpan = 0; for (CIntentionSlice slice : slicesByRootCanvasId.values()) { if (slice.getMaxProjectedSpan(ring.getIndex()) > maxProjectedSpan) { maxProjectedSpan = slice.getMaxProjectedSpan(ring.getIndex()); } } buffer.append(ring.getIndex()); buffer.append(": "); buffer.append(maxProjectedSpan); buffer.append("; "); } buffer.append("]"); } void populateCluster() { if (populated) return; initializeRings(); int totalInOrbit = 0; List<CIntentionSlice> slices = new ArrayList<CIntentionSlice>(); for (long anchorId : CCanvasLinkController.getInstance().getAnchorIdsForCanvasId(rootCanvasId)) { long linkedCanvasId = CCanvasLinkController.getInstance().getOpposite(anchorId).getCanvasId(); if (CIntentionCellController.getInstance().getCellByCanvasId(linkedCanvasId).isPinned()) continue; if (linkedCanvasId < 0L) { continue; } CIntentionSlice slice = new CIntentionSlice(linkedCanvasId); slices.add(slice); traverseAndPopulate(-1L, linkedCanvasId, 0, slice); totalInOrbit += slice.size(); } slicesByRootCanvasId.clear(); // Collections.sort(slices, SLICE_SORTER); for (CIntentionSlice slice : slices) { slicesByRootCanvasId.put(slice.getRootCanvasId(), slice); } weighSlices(totalInOrbit); populated = true; } List<Double> getRingRadii() { if (ringRadii.size() < rings.size()) { ringRadii.clear(); double lastRingRadius = 0.0; for (CIntentionRing ring : rings) { int ringSpan = 0; for (CIntentionSlice slice : slicesByRootCanvasId.values()) { if (slice.getMaxProjectedSpan(ring.getIndex()) > ringSpan) { ringSpan = slice.getMaxProjectedSpan(ring.getIndex()); } } double ringRadius = ringSpan / (2 * Math.PI); if (ringRadius < (lastRingRadius + RING_SEPARATION)) { ringRadius = (lastRingRadius + RING_SEPARATION); ringSpan = (int) (2 * Math.PI * ringRadius); } ringRadii.add(ringRadius); lastRingRadius = ringRadius; } } return ringRadii; } Dimension getLayoutSize() { return layoutSize; } void setLocation(Point newLocation) { location.setLocation(newLocation); } CIntentionClusterLayout layoutClusterAsCircles(Point clusterCenter) { if (!populated) populateCluster(); CIntentionClusterLayout layout = new CIntentionClusterLayout(this); layout.addCanvas(rootCanvasId, CIntentionLayout.centerCanvasAt(clusterCenter.x, clusterCenter.y)); getRingRadii(); // make sure they match the rings for (int i = 0; i < ringRadii.size(); i++) { double ringRadius = ringRadii.get(i); int ringSpan = (int) (2 * Math.PI * ringRadius); int sliceStart = 0; CIntentionArcTransformer arcTransformer = null; for (CIntentionSlice slice : slicesByRootCanvasId.values()) { if (arcTransformer == null) { arcTransformer = new CIntentionArcTransformer(clusterCenter, ringRadius, ringSpan, slice.calculateLayoutSpan(ringSpan)); } slice.layoutArc(arcTransformer, i, ringSpan, sliceStart, layout, (i == 0) ? null : ringRadii.get(i - 1)); sliceStart += slice.getLayoutSpan(); } } if (ringRadii.isEmpty()) { layoutSize.setSize(CIntentionLayout.INTENTION_CELL_DIAMETER, CIntentionLayout.INTENTION_CELL_DIAMETER); } else { layoutSize.setSize((int) (ringRadii.get(ringRadii.size() - 1) * 2), (int) (ringRadii.get(ringRadii.size() - 1) * 2)); } return layout; } public List<CIntentionCell> getAllCanvasesInCluster() { List<CIntentionCell> set = new ArrayList<CIntentionCell>(); for (long anchorId : CCanvasLinkController.getInstance().getAnchorIdsForCanvasId(rootCanvasId)) { long linkedCanvasId = CCanvasLinkController.getInstance().getOpposite(anchorId).getCanvasId(); if (linkedCanvasId < 0L) { continue; } set.add(CIntentionCellController.getInstance().getCellByCanvasId(linkedCanvasId)); getAllCanvasesInCluster(linkedCanvasId, set); } return set; } private void getAllCanvasesInCluster(long canvasId, List<CIntentionCell> set) { set.add(CIntentionCellController.getInstance().getCellByCanvasId(canvasId)); for (long anchorId : CCanvasLinkController.getInstance().getAnchorIdsForCanvasId(canvasId)) { CCanvasLinkAnchor anchor = CCanvasLinkController.getInstance().getAnchor(anchorId); CCanvasLink link = CCanvasLinkController.getInstance().getLink(anchor.getLinkId()); if (link.getAnchorB().getId() == anchorId) { continue; } long linkedCanvasId = CCanvasLinkController.getInstance().getOpposite(anchorId).getCanvasId(); if (linkedCanvasId < 0L) { continue; // this is not a canvas, nothing is here } getAllCanvasesInCluster(linkedCanvasId, set); } } private void traverseAndPopulate(long parentCanvasId, long canvasId, int ringIndex, CIntentionSlice slice) { CIntentionRing ring = getRing(ringIndex); ring.addCanvas(canvasId); slice.addCanvas(parentCanvasId, canvasId, ringIndex); for (long anchorId : CCanvasLinkController.getInstance().getAnchorIdsForCanvasId(canvasId)) { CCanvasLinkAnchor anchor = CCanvasLinkController.getInstance().getAnchor(anchorId); CCanvasLink link = CCanvasLinkController.getInstance().getLink(anchor.getLinkId()); if (link.getAnchorB().getId() == anchorId) { continue; } long linkedCanvasId = CCanvasLinkController.getInstance().getOpposite(anchorId).getCanvasId(); if (CIntentionCellController.getInstance().getCellByCanvasId(linkedCanvasId).isPinned()) continue; if (linkedCanvasId < 0L) { continue; // this is not a canvas, nothing is here } traverseAndPopulate(canvasId, linkedCanvasId, ringIndex + 1, slice); } } private void weighSlices(int totalInOrbit) { if (totalInOrbit == 0) { return; } for (CIntentionSlice slice : slicesByRootCanvasId.values()) { slice.setPopulationWeight(totalInOrbit); } double minimumRingRadius = 0.0; double equalSliceWeight = 1.0 / (double) slicesByRootCanvasId.size(); for (CIntentionRing ring : rings) { minimumRingRadius += RING_SEPARATION; double minimumRingSpan = 2 * Math.PI * minimumRingRadius; int maxCellsInMinRingSpan = (int) (minimumRingSpan / CIntentionLayout.INTENTION_CELL_DIAMETER); boolean ringCrowded = ring.size() > maxCellsInMinRingSpan; int maxCellsInEqualSliceSpan = (maxCellsInMinRingSpan / slicesByRootCanvasId.size()); boolean equalSlicesCrowded = false; for (CIntentionSlice slice : slicesByRootCanvasId.values()) { if (slice.arcSize(ring.getIndex()) > maxCellsInEqualSliceSpan) { equalSlicesCrowded = true; break; } } for (CIntentionSlice slice : slicesByRootCanvasId.values()) { double arcWeight; if (ringCrowded) { arcWeight = slice.arcSize(ring.getIndex()) / (double) ring.size(); } else if (equalSlicesCrowded) { arcWeight = (slice.arcSize(ring.getIndex()) * CIntentionLayout.INTENTION_CELL_DIAMETER) / minimumRingSpan; } else { arcWeight = equalSliceWeight; } slice.setArcWeight(ring.getIndex(), arcWeight); } } double sumOfMaxWeights = 0.0; for (CIntentionSlice slice : slicesByRootCanvasId.values()) { slice.calculateMaxArcWeight(); sumOfMaxWeights += slice.getMaxArcWeight(); } double reductionRatio = 1.0 / Math.max(1.0, sumOfMaxWeights); for (CIntentionSlice slice : slicesByRootCanvasId.values()) { slice.setWeight(slice.getMaxArcWeight() * reductionRatio); } // what percentage of the minimum ring span is occupied by slice a? If it is less than the weighted percentage, // then it only needs that much. // Distributions: // 1. weighted // 2. equal // 3. by occupancy at minimum ring size // the idea is to choose a distribution per ring, normalize each one, and then balance maximi per slice } private CIntentionRing getRing(int ringIndex) { for (int i = rings.size(); i <= ringIndex; i++) { rings.add(new CIntentionRing(i)); } return rings.get(ringIndex); } private static class SliceSorter implements Comparator<CIntentionSlice> { public int compare(CIntentionSlice first, CIntentionSlice second) { CCanvas firstCanvas = CCanvasController.canvases.get(first.getRootCanvasId()); CCanvas secondCanvas = CCanvasController.canvases.get(second.getRootCanvasId()); return firstCanvas.getIndex() - secondCanvas.getIndex(); } } }
/* * RADOS Java - Java bindings for librados * * Copyright (C) 2013 Wido den Hollander <wido@42on.com> * Copyright (C) 2014 1&1 - Behar Veliqi <behar.veliqi@1und1.de> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package com.ceph.rados; import com.ceph.rados.exceptions.RadosException; import com.ceph.rados.jna.RadosClusterInfo; import com.sun.jna.ptr.IntByReference; import com.sun.jna.ptr.PointerByReference; import com.sun.jna.Pointer; import com.sun.jna.Memory; import com.sun.jna.Native; import java.io.File; import java.util.concurrent.Callable; import static com.ceph.rados.Library.rados; public class Rados extends RadosBase { protected Pointer clusterPtr; private boolean connected; /** * Construct a RADOS Object which invokes rados_create * * @param id * the cephx id to authenticate with */ public Rados(String id) { PointerByReference clusterPtr = new PointerByReference(); rados.rados_create(clusterPtr, id); this.clusterPtr = clusterPtr.getValue(); } /** * Construct a RADOS Object which invokes rados_create2 * * @param clustername The name of the cluster (usually "ceph"). * @param name The name of the user (e.g., client.admin, client.user) * @param flags Flag options (future use). */ public Rados (String clustername, String name, long flags) { PointerByReference clusterPtr = new PointerByReference(); rados.rados_create2(clusterPtr, clustername, name, flags); this.clusterPtr = clusterPtr.getValue(); } /** * Construct a RADOS Object which invokes rados_create */ public Rados() { this(null); } /** * Some methods should not be called when not connected * or vise versa */ private void verifyConnected(boolean required) throws RadosException { if (required && !this.connected) { throw new RadosException("This method should not be called in a disconnected state."); } if (!required && this.connected) { throw new RadosException("This method should not be called in a connected state."); } } /** * Read a Ceph configuration file * * @param file * A file object with the path to a ceph.conf * @throws RadosException */ public void confReadFile(final File file) throws RadosException { this.verifyConnected(false); handleReturnCode(new Callable<Integer>() { @Override public Integer call() throws Exception { return rados.rados_conf_read_file(clusterPtr, file.getAbsolutePath()); } }, "Failed reading configuration file %s", file.getAbsolutePath()); } /** * Set a RADOS configuration option * * @param option * the name of the option * @param value * the value configuration value * @throws RadosException */ public void confSet(final String option, final String value) throws RadosException { this.verifyConnected(false); handleReturnCode(new Callable<Integer>() { @Override public Integer call() throws Exception { return rados.rados_conf_set(clusterPtr, option, value); } }, "Could not set configuration option %s", option); } /** * Retrieve a RADOS configuration option's value * * @param option * the name of the option * @return * the value of the option * @throws RadosException */ public String confGet(final String option) throws RadosException { final byte[] buf = new byte[256]; handleReturnCode(new Callable<Integer>() { @Override public Integer call() throws Exception { return rados.rados_conf_get(clusterPtr, option, buf, buf.length); } }, "Unable to retrieve the value of configuration option %s", option); return Native.toString(buf); } /** * Connect to the Ceph cluster * * @throws RadosException */ public void connect() throws RadosException { handleReturnCode(new Callable<Integer>() { @Override public Integer call() throws Exception { return rados.rados_connect(clusterPtr); } }, "Failed to connect to the Ceph cluster"); this.connected = true; } /** * Get the cluster's fsid * * @return * A string containing the cluster's fsid * @throws RadosException */ public String clusterFsid() throws RadosException { this.verifyConnected(true); final byte[] buf = new byte[256]; handleReturnCode(new Callable<Integer>() { @Override public Integer call() throws Exception { return rados.rados_cluster_fsid(clusterPtr, buf, buf.length); } }, "Failed to retrieve the cluster's fsid"); return Native.toString(buf); } /** * Get the cluster stats * * @return RadosClusterInfo * @throws RadosException */ public RadosClusterInfo clusterStat() throws RadosException { this.verifyConnected(true); final RadosClusterInfo result = new RadosClusterInfo(); handleReturnCode(new Callable<Integer>() { @Override public Integer call() throws Exception { return rados.rados_cluster_stat(clusterPtr, result); } }, "Failed to retrieve cluster's status"); return result; } /** * Create a RADOS pool * * @param name * the name of the pool to be created * @throws RadosException */ public void poolCreate(final String name) throws RadosException { this.verifyConnected(true); handleReturnCode(new Callable<Integer>() { @Override public Integer call() throws Exception { return rados.rados_pool_create(clusterPtr, name); } }, "Failed to create pool %s", name); } /** * Create a RADOS pool and set a auid * * @param name * the name of the pool to be created * @param auid * the owner ID for the new pool * @throws RadosException */ public void poolCreate(final String name, final long auid) throws RadosException { this.verifyConnected(true); handleReturnCode(new Callable<Integer>() { @Override public Integer call() throws Exception { return rados.rados_pool_create_with_auid(clusterPtr, name, auid); } }, "Failed to create pool %s with auid %s", name, auid); } /** * Create a RADOS pool and set a auid and crushrule * * @param name * the name of the pool to be created * @param auid * the owner ID for the new pool * @param crushrule * the crushrule for this pool * @throws RadosException */ public void poolCreate(final String name, final long auid, final long crushrule) throws RadosException { this.verifyConnected(true); handleReturnCode(new Callable<Integer>() { @Override public Integer call() throws Exception { return rados.rados_pool_create_with_all(clusterPtr, name, auid, crushrule); } }, "Failed to create pool %s with auid %s and crushrule %s", name, auid, crushrule); } /** * Delete a RADOS pool * * @param name * the name of the pool to be deleted * @throws RadosException */ public void poolDelete(final String name) throws RadosException { this.verifyConnected(true); handleReturnCode(new Callable<Integer>() { @Override public Integer call() throws Exception { return rados.rados_pool_delete(clusterPtr, name); } }, "Failed to delete pool %s", name); } /** * Finalize the Rados connection */ public void finalize() throws Throwable { rados.rados_shutdown(this.clusterPtr); super.finalize(); } /** * List all the RADOS pools * * @return String[] list of pools * @throws RadosException */ public String[] poolList() throws RadosException { this.verifyConnected(true); byte[] temp_buf = new byte[0]; int len = rados.rados_pool_list(this.clusterPtr, temp_buf, temp_buf.length); final byte[] buf = getPoolList(len); return new String(buf).split("\0"); } private byte[] getPoolList(int len) throws RadosException { final byte[] buf = new byte[len]; handleReturnCode(new Callable<Integer>() { @Override public Integer call() throws Exception { return rados.rados_pool_list(clusterPtr, buf, buf.length); } }, "Failed to retrieve list of pools"); return buf; } /** * Get the ID of a RADOS pool * * @param name * The name of the pool * @return long * @throws RadosException */ public long poolLookup(final String name) throws RadosException { return handleReturnCode(new Callable<Long>() { @Override public Long call() throws Exception { return rados.rados_pool_lookup(Rados.this.clusterPtr, name); } }, "Failed to retrieve id of the pool"); } /** * Get the name of a RADOS pool * * @param id * The id of the pool * @return String * @throws RadosException */ public String poolReverseLookup(final long id) throws RadosException { final byte[] buf = new byte[512]; handleReturnCode(new Callable<Integer>() { @Override public Integer call() throws Exception { return rados.rados_pool_reverse_lookup(clusterPtr, id, buf, buf.length); } }, "Failed to fetch name of the pool"); return new String(buf).trim(); } /** * Create a IoCTX * * @param pool * The name of the RADOS pool * @return IoCTX * @throws RadosException */ public IoCTX ioCtxCreate(final String pool) throws RadosException { final Pointer p = new Memory(Pointer.SIZE); handleReturnCode(new Callable<Integer>() { @Override public Integer call() throws Exception { return rados.rados_ioctx_create(clusterPtr, pool, p); } }, "Failed to create the IoCTX for pool %s", pool); return new IoCTX(p); } /** * Destroy a IoCTX * * @param io * A IoCTX object */ public void ioCtxDestroy(IoCTX io) { rados.rados_ioctx_destroy(io.getPointer()); } /** * Get the global unique ID of the current connection * * @return long */ public long getInstanceId() throws RadosException { this.verifyConnected(true); return rados.rados_get_instance_id(this.clusterPtr); } /** * Get the librados version * * @return a int array with the minor, major and extra version */ public static int[] getVersion() { IntByReference minor = new IntByReference(); IntByReference major = new IntByReference(); IntByReference extra = new IntByReference(); rados.rados_version(minor, major, extra); return new int[]{minor.getValue(), major.getValue(), extra.getValue()}; } /** * Shuts rados down */ public void shutDown() { if (this.clusterPtr != null) { rados.rados_shutdown(this.clusterPtr); this.clusterPtr = null; } } }
/* Copyright 2015 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.google.security.zynamics.binnavi.Database.PostgreSQL; import com.google.common.base.Preconditions; import com.google.security.zynamics.binnavi.Database.CConnection; import com.google.security.zynamics.binnavi.Database.CTableNames; import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntLoadDataException; import com.google.security.zynamics.binnavi.Database.Interfaces.SQLProvider; import com.google.security.zynamics.binnavi.Gui.Users.CUserManager; import java.sql.ResultSet; import java.sql.SQLException; /** * This class provides the SQL queries that are necessary to convert data from the exporter tables * into BinNavi data. */ public final class PostgreSQLDataImporter { /** * Do not instantiate this class. */ private PostgreSQLDataImporter() { // You are not supposed to instantiate this class } /** * Determines the architecture string of a raw module. * * The raw module ID must refer to a raw module that is stored in the database connected to by the * connection argument. * * @param connection The connection to the database. * @param rawModuleId The ID of the raw module whose architecture is determined. * * @return The architecture string of the given module. * * @throws SQLException Thrown if the architecture string could not be read. */ private static String getArchitecture(final CConnection connection, final int rawModuleId) throws SQLException { Preconditions.checkNotNull(connection, "IE00207: provider argument can not be null"); try (ResultSet resultSet = connection.executeQuery("SELECT architecture FROM modules WHERE id = " + rawModuleId, true)) { while (resultSet.next()) { return PostgreSQLHelpers.readString(resultSet, "architecture"); } throw new SQLException("Error: Could not determine architecture of new module"); } } /** * Connects expression trees with their expression tree nodes. * * @param connection Connection to the SQL database. * @param rawModuleId ID of the raw module the. * @throws SQLException Thrown if the connection failed. */ public static void connectExpressionTrees(final CConnection connection, final int moduleId, final int rawModuleId) throws SQLException { Preconditions.checkNotNull(connection, "IE00208: provider argument can not be null"); final String query = "INSERT INTO " + CTableNames.EXPRESSION_TREE_MAPPING_TABLE + " (SELECT " + moduleId + ", expression_tree_id, expression_node_id " + " FROM ex_" + rawModuleId + "_expression_tree_nodes)"; connection.executeUpdate(query, true); } /** * Imports the address references table. * * @param connection Connection to the SQL database. * @param rawModuleId ID of the raw module from which to import the data. * @param moduleId ID of the BinNavi module where the data is imported to. * @throws SQLException Thrown if the data could not be imported. */ public static void importAddressReferences(final CConnection connection, final int rawModuleId, final int moduleId) throws SQLException { Preconditions.checkNotNull(connection, "IE00266: connection argument can not be null"); Preconditions.checkArgument(rawModuleId >= 0, "Error: raw module id can only be a positive number."); Preconditions.checkArgument(moduleId >= 0, "Error: module if can only be a positive number"); final String query = "INSERT INTO " + CTableNames.ADDRESS_REFERENCES_TABLE + " (module_id, address, position, expression_id, type, target) " + " SELECT " + moduleId + ", address, position, expression_node_id, " + " (ENUM_RANGE(NULL::address_reference_type))[type + 1], destination " + " FROM ex_" + rawModuleId + "_address_references WHERE position IS NOT NULL AND expression_node_id IS NOT NULL;"; connection.executeUpdate(query, true); } /** * Imports the base types. * * @param connection Connection to the SQL database. * @param rawModuleId ID of the raw module from which to import the data. * @param moduleId ID of the BinNavi module where the data is imported to. * @throws SQLException Thrown if the data could not be imported. */ public static void importBaseTypes(final CConnection connection, final int rawModuleId, final int moduleId) throws SQLException { final String query = "INSERT INTO " + CTableNames.BASE_TYPES_TABLE + " SELECT " + moduleId + ", id, name, size, pointer, signed " + "FROM " + String.format(CTableNames.RAW_BASE_TYPES, rawModuleId); connection.executeUpdate(query, true); final String updateSequence = String.format("SELECT setval('bn_base_types_id_seq', " + "COALESCE((SELECT MAX(id) + 1 FROM %s), 1), false) from %s", CTableNames.BASE_TYPES_TABLE, CTableNames.BASE_TYPES_TABLE); connection.executeQuery(updateSequence, true); } /** * Imports the expression substitutions table. * * @param connection Connection to the SQL database. * @param rawModuleId ID of the raw module from which to import the data. * @param moduleId ID of the BinNavi module where the data is imported to. * @throws SQLException Thrown if the data could not be imported. */ public static void importExpressionSubstitutions(final CConnection connection, final int rawModuleId, final int moduleId) throws SQLException { Preconditions.checkNotNull(connection, "IE00433: connection argument can not be null"); final String query = "INSERT INTO " + CTableNames.EXPRESSION_SUBSTITUTIONS_TABLE + " (module_id, address, position, expression_id, replacement) " + " SELECT " + moduleId + " , address, position, expression_node_id, replacement " + " FROM ex_" + rawModuleId + "_expression_substitutions"; connection.executeUpdate(query, true); } /** * Imports the expression tree table. * * @param connection Connection to the SQL database. * @param moduleId ID of the BinNavi module where the data is imported to. * @throws SQLException Thrown if the data could not be imported. */ public static void importExpressionTree(final CConnection connection, final int moduleId, final int rawModuleId) throws SQLException { Preconditions.checkNotNull(connection, "IE00434: connection argument can not be null"); final String query = "INSERT INTO " + CTableNames.EXPRESSION_TREE_TABLE + "(module_id, id, type, symbol, immediate, position, parent_id)" + " SELECT " + moduleId + ", id, type, symbol, immediate, position, parent_id " + " FROM ex_" + rawModuleId + "_expression_nodes"; connection.executeUpdate(query, true); } /** * Imports the expressions table tree. * * @param connection Connection to the SQL database. * @param moduleId ID of the raw module. * * @throws SQLException Thrown if the data could not be imported. */ public static void importExpressionTrees(final CConnection connection, final int moduleId, final int rawModuleId) throws SQLException { final String query = "INSERT INTO " + CTableNames.EXPRESSION_TREE_IDS_TABLE + " SELECT " + moduleId + ", id " + " FROM ex_" + rawModuleId + "_expression_trees"; connection.executeUpdate(query, true); } /** * Imports the expression type substitutions. * * @param connection Connection to the SQL database. * @param rawModuleId ID of the raw module from which to import the data. * @param moduleId ID of the BinNavi module where the data is imported to. * @throws SQLException Thrown if the data could not be imported. */ public static void importExpressionTypes(final CConnection connection, final int rawModuleId, final int moduleId) throws SQLException { final String query = "INSERT INTO " + CTableNames.EXPRESSION_TYPES_TABLE + " SELECT " + moduleId + ", address, \"position\", \"offset\", expression_id, type" + " FROM " + String.format(CTableNames.RAW_EXPRESSION_TYPES_TABLE, rawModuleId); connection.executeUpdate(query, true); } /** * Imports the functions table. * * @param connection Connection to the SQL database. * @param rawModuleId ID of the raw module from which to import the data. * @param moduleId ID of the BinNavi module where the data is imported to. * * @throws SQLException Thrown if the data could not be imported. */ public static void importFunctions(final CConnection connection, final int rawModuleId, final int moduleId) throws SQLException { Preconditions.checkNotNull(connection, "IE00435: connection argument can not be null"); Preconditions.checkArgument(rawModuleId >= 0, "Error: raw module id can only be a positive number."); Preconditions.checkArgument(moduleId >= 0, "Error: module if can only be a positive number"); final String query = "INSERT INTO " + CTableNames.FUNCTIONS_TABLE + " (module_id, address, name, original_name, type, description, " + "parent_module_name, parent_module_id, parent_module_function, comment_id) " + " SELECT " + moduleId + ", address, demangled_name, name, (ENUM_RANGE(NULL::function_type))[type + 1], " + "'', module_name, null, null, null " + " FROM ex_" + rawModuleId + "_functions"; connection.executeUpdate(query, true); } /** * Imports the instructions table. * * @param provider The instance that provides access to a database. * @param rawModuleId ID of the raw module from which to import the data. * @param moduleId ID of the BinNavi module where the data is imported to. * * @throws SQLException Thrown if the data could not be imported. * @throws CouldntLoadDataException if the user manager could not be loaded. */ public static void importInstructions(final SQLProvider provider, final int rawModuleId, final int moduleId) throws SQLException, CouldntLoadDataException { final String architecture = getArchitecture(provider.getConnection(), rawModuleId); final int userId = CUserManager.get(provider).getCurrentActiveUser().getUserId(); final String query = "WITH comments_to_id(id, address, comment) AS " + " ( " + " SELECT nextval('bn_comments_id_seq'::regclass), address, comment " + " FROM ex_" + rawModuleId + "_address_comments " + " ), comments_table AS ( " + " INSERT INTO " + CTableNames.COMMENTS_TABLE + " (id, parent_id, user_id, comment_text) " + " SELECT id, null, " + userId + ", comment " + " FROM comments_to_id " + " ) " + " INSERT INTO " + CTableNames.INSTRUCTIONS_TABLE + " (module_id, address, mnemonic, data, native, architecture, comment_id) " + " SELECT " + moduleId + ", isn.address, mnemonic, data, true, '" + architecture + "', com.id " + " FROM ex_" + rawModuleId + "_instructions AS isn " + " LEFT JOIN comments_to_id AS com ON com.address = isn.address; "; provider.getConnection().executeUpdate(query, true); } /** * Imports the operands table. * * @param connection Connection to the SQL database. * @param rawModuleId ID of the raw module from which to import the data. * @param moduleId ID of the BinNavi module where the data is imported to. * @throws SQLException Thrown if the data could not be imported. */ public static void importOperands(final CConnection connection, final int rawModuleId, final int moduleId) throws SQLException { final String query = "INSERT INTO " + CTableNames.OPERANDS_TABLE + " SELECT " + moduleId + ", address, expression_tree_id, position " + " FROM ex_" + rawModuleId + "_operands"; connection.executeUpdate(query, true); } /** * Imports the type members. * * @param connection The connection to the SQL database. * @param rawModuleId The Id of the raw module from which to import the data. * @param moduleId The Id of the BinNavi module where data is imported to. * @throws SQLException Thrown if the data could not be imported. */ public static void importTypes(final CConnection connection, final int rawModuleId, final int moduleId) throws SQLException { final String query = "INSERT INTO " + CTableNames.TYPE_MEMBERS_TABLE + " SELECT " + moduleId + ", id, name, base_type, parent_id, position, argument, number_of_elements" + " FROM " + String.format(CTableNames.RAW_TYPES, rawModuleId); connection.executeUpdate(query, true); final String updateSequence = String.format( "SELECT setval('bn_types_id_seq', " + "COALESCE((SELECT MAX(id) + 1 FROM %s), 1), false) from %s", CTableNames.TYPE_MEMBERS_TABLE, CTableNames.TYPE_MEMBERS_TABLE); connection.executeQuery(updateSequence, true); } }
package org.jenkinsci.plugins.ghprb; import java.io.IOException; import java.io.PrintStream; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.lang.reflect.Field; import hudson.model.AbstractBuild; import hudson.model.FreeStyleBuild; import hudson.model.ItemGroup; import hudson.model.StreamBuildListener; import hudson.model.FreeStyleProject; import hudson.model.Run; import hudson.model.Result; import org.jenkinsci.plugins.ghprb.GhprbTrigger.DescriptorImpl; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.jvnet.hudson.test.JenkinsRule; import org.kohsuke.github.GHPullRequest; import org.kohsuke.github.GHPullRequestCommitDetail; import org.kohsuke.github.GHPullRequestCommitDetail.Commit; import org.kohsuke.github.GHUser; import org.kohsuke.github.GitUser; import org.kohsuke.github.PagedIterable; import org.kohsuke.github.PagedIterator; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.runners.MockitoJUnitRunner; import com.coravy.hudson.plugins.github.GithubProjectProperty; import static org.fest.assertions.Assertions.assertThat; import static org.mockito.BDDMockito.given; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.anyInt; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.doNothing; @RunWith(MockitoJUnitRunner.class) public class GhprbPullRequestMergeTest { @Rule public JenkinsRule jenkinsRule = new JenkinsRule(); private FreeStyleProject project = mock(FreeStyleProject.class); private AbstractBuild<?, ?> build = mock(FreeStyleBuild.class); @Mock private GhprbPullRequest pullRequest; @Mock private GHPullRequest pr; @Mock private GitUser committer; @Mock private GHUser triggerSender; @Mock private GhprbCause cause; @Mock private Ghprb helper; @Mock private GhprbRepository repo; @Mock private StreamBuildListener listener; @Mock private ItemGroup<?> parent; private final String triggerPhrase = "ok to merge"; private final String nonTriggerPhrase = "This phrase is not the trigger phrase"; private final String adminList = "admin"; private final String adminLogin = "admin"; private final String nonAdminLogin = "nonadmin"; private final String committerName = "committer"; private final String nonCommitterName = "noncommitter"; private final String mergeComment = "merge"; private final Integer pullId = 1; private Map<String, Object> triggerValues; @Before public void beforeTest() throws Exception { triggerValues = new HashMap<String, Object>(10); triggerValues.put("adminlist", adminList); triggerValues.put("triggerPhrase", triggerPhrase); GhprbTrigger trigger = spy(GhprbTestUtil.getTrigger(triggerValues)); ConcurrentMap<Integer, GhprbPullRequest> pulls = new ConcurrentHashMap<Integer, GhprbPullRequest>(1); pulls.put(pullId, pullRequest); Map<String, ConcurrentMap<Integer, GhprbPullRequest>> jobs = new HashMap<String, ConcurrentMap<Integer, GhprbPullRequest>>(1); jobs.put("project", pulls); GithubProjectProperty projectProperty = new GithubProjectProperty("https://github.com/jenkinsci/ghprb-plugin"); DescriptorImpl descriptor = trigger.getDescriptor(); PrintStream logger = mock(PrintStream.class); given(parent.getFullName()).willReturn(""); given(project.getParent()).willReturn(parent); given(project.getTrigger(GhprbTrigger.class)).willReturn(trigger); given(project.getName()).willReturn("project"); given(project.getProperty(GithubProjectProperty.class)).willReturn(projectProperty); given(project.isDisabled()).willReturn(false); given(build.getCause(GhprbCause.class)).willReturn(cause); given(build.getResult()).willReturn(Result.SUCCESS); given(build.getParent()).willCallRealMethod(); given(pullRequest.getPullRequest()).willReturn(pr); given(cause.getPullID()).willReturn(pullId); given(cause.isMerged()).willReturn(true); given(cause.getTriggerSender()).willReturn(triggerSender); given(cause.getCommitAuthor()).willReturn(committer); given(listener.getLogger()).willReturn(logger); doNothing().when(repo).addComment(anyInt(), anyString()); doNothing().when(logger).println(); Field parentField = Run.class.getDeclaredField("project"); parentField.setAccessible(true); parentField.set(build, project); Field jobsField = descriptor.getClass().getDeclaredField("jobs"); jobsField.setAccessible(true); jobsField.set(descriptor, jobs); helper = spy(new Ghprb(project, trigger, pulls)); trigger.setHelper(helper); given(helper.getRepository()).willReturn(repo); given(helper.isBotUser(any(GHUser.class))).willReturn(false); } @After public void afterClass() { } private void setupConditions(String triggerLogin, String committerName, String comment) throws IOException { given(triggerSender.getLogin()).willReturn(triggerLogin); given(triggerSender.getName()).willReturn(committerName); given(committer.getName()).willReturn(this.committerName); PagedIterator<GHPullRequestCommitDetail> itr = Mockito.mock(PagedIterator.class); PagedIterable pagedItr = Mockito.mock(PagedIterable.class); Commit commit = mock(Commit.class); GHPullRequestCommitDetail commitDetail = mock(GHPullRequestCommitDetail.class); given(pr.listCommits()).willReturn(pagedItr); given(pagedItr.iterator()).willReturn(itr); given(itr.hasNext()).willReturn(true, false); given(itr.next()).willReturn(commitDetail); given(commitDetail.getCommit()).willReturn(commit); given(commit.getCommitter()).willReturn(committer); given(cause.getCommentBody()).willReturn(comment); } private GhprbPullRequestMerge setupMerger(boolean onlyTriggerPhrase, boolean onlyAdminsMerge, boolean disallowOwnCode) { GhprbPullRequestMerge merger = spy(new GhprbPullRequestMerge(mergeComment, onlyTriggerPhrase, onlyAdminsMerge, disallowOwnCode)); merger.setHelper(helper); return merger; } @Test public void testApproveMerge() throws Exception { boolean onlyTriggerPhrase = false; boolean onlyAdminsMerge = false; boolean disallowOwnCode = false; GhprbPullRequestMerge merger = setupMerger(onlyTriggerPhrase, onlyAdminsMerge, disallowOwnCode); setupConditions(nonAdminLogin, committerName, triggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(true); setupConditions(adminLogin, nonCommitterName, triggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(true); setupConditions(adminLogin, committerName, nonTriggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(true); setupConditions(nonAdminLogin, nonCommitterName, triggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(true); setupConditions(nonAdminLogin, nonCommitterName, nonTriggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(true); setupConditions(adminLogin, nonCommitterName, nonTriggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(true); setupConditions(nonAdminLogin, nonCommitterName, nonTriggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(true); setupConditions(adminLogin, committerName, triggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(true); } @Test public void testAdminMerge() throws Exception { boolean onlyTriggerPhrase = false; boolean onlyAdminsMerge = true; boolean disallowOwnCode = false; GhprbPullRequestMerge merger = setupMerger(onlyTriggerPhrase, onlyAdminsMerge, disallowOwnCode); setupConditions(adminLogin, committerName, triggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(true); setupConditions(nonAdminLogin, committerName, triggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(false); } @Test public void testTriggerMerge() throws Exception { boolean onlyTriggerPhrase = true; boolean onlyAdminsMerge = false; boolean disallowOwnCode = false; GhprbPullRequestMerge merger = setupMerger(onlyTriggerPhrase, onlyAdminsMerge, disallowOwnCode); setupConditions(adminLogin, committerName, triggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(true); setupConditions(adminLogin, committerName, nonTriggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(false); } @Test public void testOwnCodeMerge() throws Exception { boolean onlyTriggerPhrase = false; boolean onlyAdminsMerge = false; boolean disallowOwnCode = true; GhprbPullRequestMerge merger = setupMerger(onlyTriggerPhrase, onlyAdminsMerge, disallowOwnCode); setupConditions(adminLogin, nonCommitterName, triggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(true); setupConditions(adminLogin, committerName, triggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(false); } @Test public void testDenyMerge() throws Exception { boolean onlyTriggerPhrase = true; boolean onlyAdminsMerge = true; boolean disallowOwnCode = true; GhprbPullRequestMerge merger = setupMerger(onlyTriggerPhrase, onlyAdminsMerge, disallowOwnCode); setupConditions(nonAdminLogin, nonCommitterName, triggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(false); setupConditions(adminLogin, committerName, triggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(false); setupConditions(adminLogin, nonCommitterName, nonTriggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(false); setupConditions(nonAdminLogin, nonCommitterName, triggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(false); setupConditions(nonAdminLogin, nonCommitterName, nonTriggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(false); setupConditions(adminLogin, committerName, nonTriggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(false); setupConditions(nonAdminLogin, committerName, nonTriggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(false); setupConditions(adminLogin, nonCommitterName, triggerPhrase); assertThat(merger.perform(build, null, listener)).isEqualTo(true); } }
/** * Copyright (c) <year> Keifer Miller * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of Ink Bar nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. **/ package com.keifermiller.inkbar.activities; import android.app.AlertDialog; import android.app.Dialog; import android.content.DialogInterface; import android.content.DialogInterface.OnClickListener; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.PixelFormat; import android.net.Uri; import android.os.Bundle; import android.os.Handler; import android.preference.PreferenceManager; import android.support.v4.app.FragmentActivity; import android.view.View; import android.view.ViewGroup.LayoutParams; import android.view.ViewStub; import android.widget.RelativeLayout; import com.keifermiller.inkbar.R; /** * @author keifer * * Takes care of displaying a license dialog at start if needed. Also * manages theme changes. * */ public abstract class IBActivity extends FragmentActivity implements SharedPreferences.OnSharedPreferenceChangeListener { /* (non-Javadoc) * @see android.app.Activity#onAttachedToWindow() */ @Override public void onAttachedToWindow() { super.onAttachedToWindow(); this.getWindow().setFormat(PixelFormat.RGBA_8888); } /* * (non-Javadoc) * * @see android.support.v4.app.FragmentActivity#setContentView(int) */ @Override public void setContentView(int layoutResId) { RelativeLayout iBContent = (RelativeLayout) View.inflate(this, R.layout.ib_activity_layout, null); ViewStub stub = (ViewStub) iBContent.findViewById(R.id.view_stub); stub.setLayoutResource(layoutResId); stub.inflate(); super.setContentView(iBContent, new RelativeLayout.LayoutParams( RelativeLayout.LayoutParams.FILL_PARENT, RelativeLayout.LayoutParams.FILL_PARENT)); } /* * (non-Javadoc) * * @see * android.support.v4.app.FragmentActivity#setContentView(android.view.View, * android.view.ViewGroup.LayoutParams) */ @Override public void setContentView(View view, LayoutParams params) { RelativeLayout iBContent = (RelativeLayout) View.inflate(this, R.layout.ib_activity_layout, null); iBContent.addView(view, params); super.setContentView(iBContent); } /* * (non-Javadoc) * * @see * android.support.v4.app.FragmentActivity#setContentView(android.view.View) */ @Override public void setContentView(View view) { RelativeLayout iBContent = (RelativeLayout) View.inflate(this, R.layout.ib_activity_layout, null); iBContent.addView(view); super.setContentView(iBContent); } private Handler mHandler; public final static String HIDDEN_PREFES_NAME = "hidden_preferences"; private String mTheme; private SharedPreferences mPrefs; /* * (non-Javadoc) * * @see android.support.v4.app.FragmentActivity#onCreate(android.os.Bundle) */ @Override protected void onCreate(Bundle savedInstanceState) { mPrefs = PreferenceManager.getDefaultSharedPreferences(this); if (mTheme == null) { mTheme = "DAY"; } if (mPrefs.getString("theme", "DAY").equals("NIGHT")) { setTheme(R.style.Dark); mTheme = "NIGHT"; } else { setTheme(R.style.Light); mTheme = "DAY"; } super.onCreate(savedInstanceState); setHandler(new Handler()); SharedPreferences hiddenPrefes = getSharedPreferences( HIDDEN_PREFES_NAME, 0); boolean licenseAgreed = hiddenPrefes .getBoolean("license_agreed", false); boolean usageShown = hiddenPrefes.getBoolean("usage_shown", false); if (!usageShown) { showDialog(R.id.usage_dialog); } if (!licenseAgreed) { showDialog(R.id.license_dialog); } } /* * (non-Javadoc) * * @see android.support.v4.app.FragmentActivity#onStart() */ @Override protected void onStart() { super.onStart(); mPrefs.registerOnSharedPreferenceChangeListener(this); updateTheme(); } /* * (non-Javadoc) * * @see android.support.v4.app.FragmentActivity#onStop() */ @Override protected void onStop() { mPrefs.unregisterOnSharedPreferenceChangeListener(this); super.onStop(); } /* * (non-Javadoc) * * @see android.app.Activity#onCreateDialog(int) */ @Override protected Dialog onCreateDialog(int id) { AlertDialog dialog; switch (id) { case R.id.license_dialog: dialog = new AlertDialog.Builder(this) .setCancelable(false) .setTitle("License") .setMessage(R.string.license_dialog_message) .setNegativeButton(R.string.license_dialog_decline, new OnClickListener() { @Override public void onClick(DialogInterface arg0, int arg1) { System.exit(0); } }) .setNeutralButton(R.string.license_dialog_viewsource, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { startActivity(new Intent( Intent.ACTION_VIEW, Uri.parse("https://github.com/KeiferMiller/InkBar"))); System.exit(0); } }) .setPositiveButton(R.string.license_dialog_accept, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { getSharedPreferences(HIDDEN_PREFES_NAME, 0) .edit() .putBoolean("license_agreed", true) .commit(); } }).create(); break; case R.id.usage_dialog: dialog = new AlertDialog.Builder(this) .setCancelable(false) .setTitle("Usage") .setMessage(R.string.usage_dialog_message) .setPositiveButton(R.string.usage_dialog_accept, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { getSharedPreferences(HIDDEN_PREFES_NAME, 0) .edit() .putBoolean("usage_shown", true) .commit(); } }).create(); break; default: dialog = null; } return dialog; } /* * (non-Javadoc) * * @see android.content.SharedPreferences.OnSharedPreferenceChangeListener# * onSharedPreferenceChanged(android.content.SharedPreferences, * java.lang.String) */ @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { if (key.equals("theme")) { updateTheme(); } } /** * Fetches the user selected theme from mPrefs ("DAY" if none is selected). * * If a new theme is selected, sets the new theme, updates mTheme to reflect * this, and forces redraw via recreate(). */ private void updateTheme() { String value = mPrefs.getString("theme", "DAY"); if (mTheme == null || !mTheme.equals(value)) { if (value.equals("NIGHT")) { setTheme(R.style.Dark); mTheme = "NIGHT"; } else { setTheme(R.style.Light); mTheme = "DAY"; } // recreate(); finish(); startActivity(getIntent()); } } /** * @return the mHandler */ synchronized public Handler getHandler() { return mHandler; } /** * @param mHandler * the mHandler to set */ synchronized public void setHandler(Handler mHandler) { this.mHandler = mHandler; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.glue.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/StartImportLabelsTaskRun" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class StartImportLabelsTaskRunRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The unique identifier of the machine learning transform. * </p> */ private String transformId; /** * <p> * The Amazon Simple Storage Service (Amazon S3) path from where you import the labels. * </p> */ private String inputS3Path; /** * <p> * Indicates whether to overwrite your existing labels. * </p> */ private Boolean replaceAllLabels; /** * <p> * The unique identifier of the machine learning transform. * </p> * * @param transformId * The unique identifier of the machine learning transform. */ public void setTransformId(String transformId) { this.transformId = transformId; } /** * <p> * The unique identifier of the machine learning transform. * </p> * * @return The unique identifier of the machine learning transform. */ public String getTransformId() { return this.transformId; } /** * <p> * The unique identifier of the machine learning transform. * </p> * * @param transformId * The unique identifier of the machine learning transform. * @return Returns a reference to this object so that method calls can be chained together. */ public StartImportLabelsTaskRunRequest withTransformId(String transformId) { setTransformId(transformId); return this; } /** * <p> * The Amazon Simple Storage Service (Amazon S3) path from where you import the labels. * </p> * * @param inputS3Path * The Amazon Simple Storage Service (Amazon S3) path from where you import the labels. */ public void setInputS3Path(String inputS3Path) { this.inputS3Path = inputS3Path; } /** * <p> * The Amazon Simple Storage Service (Amazon S3) path from where you import the labels. * </p> * * @return The Amazon Simple Storage Service (Amazon S3) path from where you import the labels. */ public String getInputS3Path() { return this.inputS3Path; } /** * <p> * The Amazon Simple Storage Service (Amazon S3) path from where you import the labels. * </p> * * @param inputS3Path * The Amazon Simple Storage Service (Amazon S3) path from where you import the labels. * @return Returns a reference to this object so that method calls can be chained together. */ public StartImportLabelsTaskRunRequest withInputS3Path(String inputS3Path) { setInputS3Path(inputS3Path); return this; } /** * <p> * Indicates whether to overwrite your existing labels. * </p> * * @param replaceAllLabels * Indicates whether to overwrite your existing labels. */ public void setReplaceAllLabels(Boolean replaceAllLabels) { this.replaceAllLabels = replaceAllLabels; } /** * <p> * Indicates whether to overwrite your existing labels. * </p> * * @return Indicates whether to overwrite your existing labels. */ public Boolean getReplaceAllLabels() { return this.replaceAllLabels; } /** * <p> * Indicates whether to overwrite your existing labels. * </p> * * @param replaceAllLabels * Indicates whether to overwrite your existing labels. * @return Returns a reference to this object so that method calls can be chained together. */ public StartImportLabelsTaskRunRequest withReplaceAllLabels(Boolean replaceAllLabels) { setReplaceAllLabels(replaceAllLabels); return this; } /** * <p> * Indicates whether to overwrite your existing labels. * </p> * * @return Indicates whether to overwrite your existing labels. */ public Boolean isReplaceAllLabels() { return this.replaceAllLabels; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getTransformId() != null) sb.append("TransformId: ").append(getTransformId()).append(","); if (getInputS3Path() != null) sb.append("InputS3Path: ").append(getInputS3Path()).append(","); if (getReplaceAllLabels() != null) sb.append("ReplaceAllLabels: ").append(getReplaceAllLabels()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof StartImportLabelsTaskRunRequest == false) return false; StartImportLabelsTaskRunRequest other = (StartImportLabelsTaskRunRequest) obj; if (other.getTransformId() == null ^ this.getTransformId() == null) return false; if (other.getTransformId() != null && other.getTransformId().equals(this.getTransformId()) == false) return false; if (other.getInputS3Path() == null ^ this.getInputS3Path() == null) return false; if (other.getInputS3Path() != null && other.getInputS3Path().equals(this.getInputS3Path()) == false) return false; if (other.getReplaceAllLabels() == null ^ this.getReplaceAllLabels() == null) return false; if (other.getReplaceAllLabels() != null && other.getReplaceAllLabels().equals(this.getReplaceAllLabels()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getTransformId() == null) ? 0 : getTransformId().hashCode()); hashCode = prime * hashCode + ((getInputS3Path() == null) ? 0 : getInputS3Path().hashCode()); hashCode = prime * hashCode + ((getReplaceAllLabels() == null) ? 0 : getReplaceAllLabels().hashCode()); return hashCode; } @Override public StartImportLabelsTaskRunRequest clone() { return (StartImportLabelsTaskRunRequest) super.clone(); } }
package org.hibernate.brmeyer.demo.entity.eager; import java.util.ArrayList; import java.util.List; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.ManyToMany; import javax.persistence.OneToMany; @Entity public class User { @Id @GeneratedValue private int id; private String address1; private String address2; private String city; private String email; private String firstName; private String lastName; private String phone; private String state; private String zip; @ManyToMany(fetch = FetchType.EAGER) private List<Community> communityMemberships = new ArrayList<Community>(); @OneToMany(mappedBy = "creator", fetch = FetchType.EAGER) private List<Community> communitiesCreated = new ArrayList<Community>(); @ManyToMany(fetch = FetchType.EAGER) private List<Skill> skills = new ArrayList<Skill>(); @ManyToMany(fetch = FetchType.EAGER) private List<Tool> tools = new ArrayList<Tool>(); @OneToMany(mappedBy = "submitter", fetch = FetchType.EAGER) private List<Project> projectsSubmitted = new ArrayList<Project>(); @OneToMany(mappedBy = "organizer", fetch = FetchType.EAGER) private List<Project> projectsOrganized = new ArrayList<Project>(); @ManyToMany(mappedBy = "volunteers", fetch = FetchType.EAGER) private List<Project> projectsVolunteered = new ArrayList<Project>(); @OneToMany(mappedBy = "submitter", fetch = FetchType.EAGER) private List<Comment> comments = new ArrayList<Comment>(); @OneToMany(mappedBy = "user", fetch = FetchType.EAGER) private List<Donation> donations = new ArrayList<Donation>(); @OneToMany(mappedBy = "organizer", fetch = FetchType.EAGER) private List<ServiceEvent> serviceEventsOrganized = new ArrayList<ServiceEvent>(); public int getId() { return id; } public void setId(int id) { this.id = id; } public String getAddress1() { return address1; } public void setAddress1(String address1) { this.address1 = address1; } public String getAddress2() { return address2; } public void setAddress2(String address2) { this.address2 = address2; } public String getCity() { return city; } public void setCity(String city) { this.city = city; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public String getFirstName() { return firstName; } public void setFirstName(String firstName) { this.firstName = firstName; } public String getLastName() { return lastName; } public void setLastName(String lastName) { this.lastName = lastName; } public String getPhone() { return phone; } public void setPhone(String phone) { this.phone = phone; } public String getState() { return state; } public void setState(String state) { this.state = state; } public String getZip() { return zip; } public void setZip(String zip) { this.zip = zip; } public List<Community> getCommunityMemberships() { return communityMemberships; } public void setCommunityMemberships(List<Community> communityMemberships) { this.communityMemberships = communityMemberships; } public List<Community> getCommunitiesCreated() { return communitiesCreated; } public void setCommunitiesCreated(List<Community> communitiesCreated) { this.communitiesCreated = communitiesCreated; } public List<Skill> getSkills() { return skills; } public void setSkills(List<Skill> skills) { this.skills = skills; } public List<Tool> getTools() { return tools; } public void setTools(List<Tool> tools) { this.tools = tools; } public List<Project> getProjectsSubmitted() { return projectsSubmitted; } public void setProjectsSubmitted(List<Project> projectsSubmitted) { this.projectsSubmitted = projectsSubmitted; } public List<Project> getProjectsOrganized() { return projectsOrganized; } public void setProjectsOrganized(List<Project> projectsOrganized) { this.projectsOrganized = projectsOrganized; } public List<Project> getProjectsVolunteered() { return projectsVolunteered; } public void setProjectsVolunteered(List<Project> projectsVolunteered) { this.projectsVolunteered = projectsVolunteered; } public List<Comment> getComments() { return comments; } public void setComments(List<Comment> comments) { this.comments = comments; } public List<Donation> getDonations() { return donations; } public void setDonations(List<Donation> donations) { this.donations = donations; } public List<ServiceEvent> getServiceEventsOrganized() { return serviceEventsOrganized; } public void setServiceEventsOrganized(List<ServiceEvent> serviceEventsOrganized) { this.serviceEventsOrganized = serviceEventsOrganized; } }
package ws.zettabyte.weirdscience.tileentity; import net.minecraft.block.Block; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.network.packet.Packet; import net.minecraft.network.packet.Packet132TileEntityData; import net.minecraftforge.common.Configuration; import net.minecraftforge.common.ForgeDirection; import net.minecraftforge.fluids.Fluid; import net.minecraftforge.fluids.FluidEvent; import net.minecraftforge.fluids.FluidStack; import net.minecraftforge.fluids.FluidTankInfo; import net.minecraftforge.fluids.IFluidHandler; import net.minecraftforge.fluids.IFluidTank; import ws.zettabyte.weirdscience.block.BlockMetaTank; import ws.zettabyte.zettalib.ContentRegistry; import ws.zettabyte.zettalib.baseclasses.TileEntityBase; import ws.zettabyte.zettalib.interfaces.IConfiggable; import ws.zettabyte.zettalib.interfaces.IRegistrable; public class TileEntityBloodDonation extends TileEntityBase implements IFluidHandler, IFluidTank, IConfiggable, IRegistrable { protected FluidStack fluidTank; protected static int capacity = 0; protected static int outputSpeed = 0; public TileEntityBloodDonation() { super(); outputSpeed = 1000; } @Override public void readFromNBT(NBTTagCompound tag) { super.readFromNBT(tag); if (!tag.hasKey("Empty")) { FluidStack fluid = FluidStack.loadFluidStackFromNBT(tag); if (fluid != null) { fluidTank = fluid; } } } @Override public void writeToNBT(NBTTagCompound tag) { super.writeToNBT(tag); if (fluidTank != null) { fluidTank.writeToNBT(tag); } else { tag.setString("Empty", ""); } } @Override public Packet getDescriptionPacket() { NBTTagCompound nbt = new NBTTagCompound(); writeToNBT(nbt); return new Packet132TileEntityData(xCoord, yCoord, zCoord, 1, nbt); } private static Fluid bloodFluid; public static void setBloodFluid(Fluid newfluid) { bloodFluid = newfluid; } public static void setStorageCap(int setMax) { capacity = setMax; } @Override public int fill(ForgeDirection from, FluidStack resource, boolean doFill) { return fill(resource, doFill); } @Override public FluidStack drain(ForgeDirection from, FluidStack resource, boolean doDrain) { return drain(from, resource.amount, doDrain); } @Override public FluidStack drain(ForgeDirection from, int maxDrain, boolean doDrain) { return drain(maxDrain, doDrain); } @Override public boolean canFill(ForgeDirection from, Fluid fluid) { // TODO Auto-generated method stub return true; } @Override public boolean canDrain(ForgeDirection from, Fluid fluid) { // TODO Auto-generated method stub return true; } @Override public FluidTankInfo[] getTankInfo(ForgeDirection from) { // TODO Auto-generated method stub return new FluidTankInfo[] { getInfo() }; } @Override public FluidStack getFluid() { // TODO Auto-generated method stub return fluidTank; } @Override public int getFluidAmount() { if (fluidTank == null) { return 0; } return fluidTank.amount; } @Override public int getCapacity() { return capacity; } @Override public FluidTankInfo getInfo() { return new FluidTankInfo(this); } public int fillFromBlock(FluidStack resource, boolean doFill) { //Is our blood fluid set? if (bloodFluid != null) { //Is our resource set? if (resource == null) { return 0; } //Make sure our resource is blood. if(bloodFluid.getName().contentEquals(resource.getFluid().getName())) { //Get simulation values. if (!doFill) { if (fluidTank == null) { return Math.min(capacity, resource.amount); } return Math.min(capacity - fluidTank.amount, resource.amount); } //Create the fluid tank if it's empty. if (fluidTank == null) { fluidTank = new FluidStack(resource, Math.min(capacity, resource.amount)); //Some network thing. FluidEvent.fireEvent(new FluidEvent.FluidFillingEvent(fluidTank, this.worldObj, this.xCoord, this.yCoord, this.zCoord, this)); return fluidTank.amount; } int filled; if ((fluidTank.amount + resource.amount) < capacity) { //Will we still be under capacity with this new influx of resources? fluidTank.amount += resource.amount; filled = resource.amount; } else { //Over capacity? //Get the difference between current and capacity, that's what we're filling. filled = capacity - fluidTank.amount; fluidTank.amount = capacity; } if (fluidTank != null) { //Some network thing. FluidEvent.fireEvent(new FluidEvent.FluidFillingEvent(fluidTank, this.worldObj, this.xCoord, this.yCoord, this.zCoord, this)); } updateTank(); return filled; } else { return 0; } } else { return 0; } } @Override public int fill(FluidStack resource, boolean doFill) { return 0; } //If doDrain is false, the drain is only simulated //to get the FluidStack that would be returned. @Override public FluidStack drain(int maxDrain, boolean doDrain) { if ((fluidTank == null) || (maxDrain == 0)) { return null; } int drained = maxDrain; if (fluidTank.amount <= drained) { drained = fluidTank.amount; } FluidStack stack = new FluidStack(fluidTank, drained); if (doDrain) { fluidTank.amount -= drained; if (fluidTank.amount <= 0) { fluidTank = null; } updateTank(); FluidEvent.fireEvent(new FluidEvent.FluidDrainingEvent(fluidTank, this.worldObj, this.xCoord, this.yCoord, this.zCoord, this)); } return stack; } @Override public void updateEntity() { super.updateEntity(); //Clientside is for suckers. if(!worldObj.isRemote) { //Do we have blood to dispense? if(fluidTank != null) { //Attempt to dump tank into surrounding Forge fluid handlers. if(fluidTank != null) { ForgeDirection dir; IFluidHandler adjFluidHandler; for(int i = 0; i < 6; ++i) { dir = ForgeDirection.VALID_DIRECTIONS[i]; adjFluidHandler = this.adjFluidHandlers[i]; if(adjFluidHandler != null) { FluidStack toDrain = new FluidStack(fluidTank.getFluid(), fluidTank.amount); drain(adjFluidHandler.fill(dir.getOpposite(), toDrain, true), true); updateTank(); if(fluidTank == null) { break; } } } } } } } @Override public void doConfig(Configuration config, ContentRegistry cr) { capacity = config.get("Blood", "Blood Donation Station internal capacity", 1000).getInt(); outputSpeed = config.get("Blood", "Blood Donation Station output rate per tick", 500).getInt(); } @Override public String getEnglishName() { // TODO Auto-generated method stub return "Blood Donation Station"; } @Override public String getGameRegistryName() { // TODO Auto-generated method stub return "bloodDonation"; } @Override public boolean isEnabled() { return true; } public void updateTank() { if(!worldObj.isRemote) { if(Block.blocksList[worldObj.getBlockId(xCoord, yCoord, zCoord)] instanceof BlockMetaTank) { BlockMetaTank bmt = (BlockMetaTank)(Block.blocksList[worldObj.getBlockId(xCoord, yCoord, zCoord)]); if(fluidTank == null) { bmt.setMetaByFillPercent(worldObj, xCoord, yCoord, zCoord, 0); } else { bmt.setMetaByFillPercent(worldObj, xCoord, yCoord, zCoord, (fluidTank.amount*100)/capacity); } } } } }
/* * Copyright 2013 Bazaarvoice, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.bazaarvoice.jolt; import com.bazaarvoice.jolt.common.Optional; import com.bazaarvoice.jolt.common.SpecStringParser; import com.bazaarvoice.jolt.exception.SpecException; import com.bazaarvoice.jolt.modifier.function.Function; import com.google.common.collect.Lists; import org.testng.annotations.BeforeClass; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import java.io.IOException; import java.lang.reflect.Field; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @SuppressWarnings( "deprecated" ) public class ModifierTest { enum TemplatrTestCase { OVERWRITR { @Override Modifier getTemplatr( final Object spec ) { return new Modifier.Overwritr( spec ); } }, DEFAULTR { @Override Modifier getTemplatr( final Object spec ) { return new Modifier.Defaultr(spec); } }, DEFINR { @Override Modifier getTemplatr( final Object spec ) { return new Modifier.Definr( spec ); } }; abstract Modifier getTemplatr(Object spec); } @BeforeClass @SuppressWarnings( "unchecked" ) public void setup() throws Exception { // accessing built ins such that we can test a custom impl of function // this is a special test case, and not a recommended approach of using function Field f = Modifier.class.getDeclaredField("STOCK_FUNCTIONS"); f.setAccessible( true ); Map<String, Function> BUILT_INS = (Map<String, Function>) f.get( null ); BUILT_INS.put( "minLabelComputation", new MinLabelComputation() ); BUILT_INS.put( "maxLabelComputation", new MaxLabelComputation() ); } @DataProvider public Iterator<Object[]> getTestCases() { List<Object[]> testCases = Lists.newLinkedList(); testCases.add( new Object[]{"/json/modifier/mapLiteral.json"} ); testCases.add( new Object[]{"/json/modifier/mapLiteralWithNullInput.json"} ); testCases.add( new Object[]{"/json/modifier/mapLiteralWithMissingInput.json"} ); testCases.add( new Object[]{"/json/modifier/mapLiteralWithEmptyInput.json"} ); testCases.add( new Object[]{"/json/modifier/arrayElementAt.json"} ); testCases.add( new Object[]{"/json/modifier/arrayLiteral.json"} ); testCases.add( new Object[]{"/json/modifier/arrayLiteralWithNullInput.json"} ); testCases.add( new Object[]{"/json/modifier/arrayLiteralWithEmptyInput.json"} ); testCases.add( new Object[]{"/json/modifier/arrayLiteralWithMissingInput.json"} ); testCases.add( new Object[]{"/json/modifier/simple.json"} ); testCases.add( new Object[]{"/json/modifier/simpleArray.json"} ); testCases.add( new Object[]{"/json/modifier/arrayObject.json"} ); testCases.add( new Object[]{"/json/modifier/simpleMapNullToArray.json"} ); testCases.add( new Object[]{"/json/modifier/simpleMapRuntimeNull.json"} ); testCases.add( new Object[]{"/json/modifier/simpleLookup.json"} ); testCases.add( new Object[]{"/json/modifier/complexLookup.json"} ); testCases.add( new Object[]{"/json/modifier/simpleArrayLookup.json"} ); testCases.add( new Object[]{"/json/modifier/complexArrayLookup.json"} ); testCases.add( new Object[]{"/json/modifier/valueCheckSimpleArray.json"} ); testCases.add( new Object[]{"/json/modifier/valueCheckSimpleArrayNullInput.json"} ); testCases.add( new Object[]{"/json/modifier/valueCheckSimpleArrayEmptyInput.json"} ); testCases.add( new Object[]{"/json/modifier/valueCheckSimpleMap.json"} ); testCases.add( new Object[]{"/json/modifier/valueCheckSimpleMapNullInput.json"} ); testCases.add( new Object[]{"/json/modifier/valueCheckSimpleMapEmptyInput.json"} ); testCases.add( new Object[]{"/json/modifier/simpleMapOpOverride.json"} ); testCases.add( new Object[]{"/json/modifier/simpleArrayOpOverride.json"} ); testCases.add( new Object[]{"/json/modifier/testListOfFunction.json"} ); return testCases.iterator(); } @Test (dataProvider = "getTestCases") public void testOverwritrTransform(String testFile) throws Exception { doTest( testFile, TemplatrTestCase.OVERWRITR ); } @Test (dataProvider = "getTestCases") public void testDefaultrTransform(String testFile) throws Exception { doTest( testFile, TemplatrTestCase.DEFAULTR ); } @Test (dataProvider = "getTestCases") public void testDefinrTransform(String testFile) throws Exception { doTest( testFile, TemplatrTestCase.DEFINR ); } public void doTest(String testFile, TemplatrTestCase testCase) throws Exception { Map<String, Object> testUnit = JsonUtils.classpathToMap( testFile ); Object input = testUnit.get( "input" ); Object spec = testUnit.get( "spec" ); Object context = testUnit.get( "context" ); Object expected = testUnit.get( testCase.name() ); if(expected != null) { Modifier modifier = testCase.getTemplatr( spec ); Object actual = modifier.transform( input, (Map<String, Object>) context ); JoltTestUtil.runArrayOrderObliviousDiffy( testCase.name() + " failed case " + testFile, expected, actual ); } } @DataProvider public Iterator<Object[]> getSpecValidationTestCases() { List<Object[]> testCases = Lists.newLinkedList(); List<Object> testObjects = JsonUtils.classpathToList( "/json/modifier/validation/specThatShouldFail.json" ); for(TemplatrTestCase testCase: TemplatrTestCase.values()) { for(Object specObj: testObjects) { testCases.add( new Object[] {testCase, specObj} ); } } return testCases.iterator(); } @Test(expectedExceptions = SpecException.class, dataProvider = "getSpecValidationTestCases") public void testInvalidSpecs(TemplatrTestCase testCase, Object spec) { testCase.getTemplatr( spec ); } @DataProvider public Iterator<Object[]> getFunctionTests() { List<Object[]> testCases = Lists.newLinkedList(); testCases.add( new Object[]{"/json/modifier/functions/stringsSplitTest.json", TemplatrTestCase.OVERWRITR}); testCases.add( new Object[]{"/json/modifier/functions/padStringsTest.json", TemplatrTestCase.OVERWRITR}); testCases.add( new Object[]{"/json/modifier/functions/stringsTests.json", TemplatrTestCase.OVERWRITR}); testCases.add( new Object[]{"/json/modifier/functions/mathTests.json", TemplatrTestCase.OVERWRITR} ); testCases.add( new Object[]{"/json/modifier/functions/arrayTests.json", TemplatrTestCase.OVERWRITR} ); testCases.add( new Object[]{"/json/modifier/functions/sizeTests.json", TemplatrTestCase.OVERWRITR} ); testCases.add( new Object[]{"/json/modifier/functions/labelsLookupTest.json", TemplatrTestCase.DEFAULTR} ); testCases.add( new Object[]{"/json/modifier/functions/valueTests.json", TemplatrTestCase.OVERWRITR } ); testCases.add( new Object[]{"/json/modifier/functions/squashNullsTests.json", TemplatrTestCase.OVERWRITR } ); return testCases.iterator(); } @Test (dataProvider = "getFunctionTests") public void testFunctions(String testFile, TemplatrTestCase testCase) throws Exception { doTest( testFile, testCase); } @DataProvider public Iterator<Object[]> fnArgParseTestCases(){ List<Object[]> testCases = Lists.newLinkedList(); testCases.add( new Object[] {"fn(abc,efg,pqr)", new String[] {"fn", "abc", "efg", "pqr"} } ); testCases.add( new Object[] {"fn(abc,@(1,2),pqr)", new String[] {"fn", "abc", "@(1,2)", "pqr"} } ); testCases.add( new Object[] {"fn(abc,efg,pqr,)", new String[] {"fn", "abc", "efg", "pqr", ""} } ); testCases.add( new Object[] {"fn(abc,,@(1,,2),,pqr,,)", new String[] {"fn", "abc", "","@(1,,2)","", "pqr", "", ""} } ); testCases.add( new Object[] {"fn(abc,'e,f,g',pqr)", new String[] {"fn", "abc", "'e,f,g'", "pqr"} } ); testCases.add( new Object[] {"fn(abc,'e(,f,)g',pqr)", new String[] {"fn", "abc", "'e(,f,)g'", "pqr"} } ); return testCases.iterator(); } @Test( dataProvider = "fnArgParseTestCases") public void testFunctionArgParse(String argString, String[] expected) throws Exception { List<String> actual = SpecStringParser.parseFunctionArgs( argString ); JoltTestUtil.runArrayOrderObliviousDiffy(" failed case " + argString, expected, actual ); } @Test public void testModifierFirstElementArray() throws IOException { Map<String, Object> input = new HashMap<String, Object>() {{ put("input", new Integer[]{5, 4}); }}; Map<String, Object> spec = new HashMap<String, Object>() {{ put("first", "=firstElement(@(1,input))"); }}; Map<String, Object> expected = new HashMap<String, Object>() {{ put("input", new Integer[]{5, 4}); put("first", 5); }}; Modifier modifier = new Modifier.Overwritr( spec ); Object actual = modifier.transform( input, null ); JoltTestUtil.runArrayOrderObliviousDiffy( "failed modifierFirstElementArray", expected, actual ); } @SuppressWarnings( "unused" ) public static final class MinLabelComputation implements Function { @Override @SuppressWarnings( "unchecked" ) public Optional<Object> apply( final Object... args ) { Map<String, String> valueLabels = (Map<String, String>) args[0]; Integer min = Integer.MAX_VALUE; Set<String> valueLabelKeys = valueLabels.keySet(); for (String labelKey: valueLabelKeys ) { Integer val = null; try { val = Integer.parseInt( labelKey ); } catch(Exception ignored) {} if(val != null) { min = Math.min( val, min ); } } return Optional.<Object>of( valueLabels.get( min.toString() ) ); } } @SuppressWarnings( "unused" ) public static final class MaxLabelComputation implements Function { @Override @SuppressWarnings( "unchecked" ) public Optional<Object> apply( final Object... args ) { Map<String, String> valueLabels = (Map<String, String>) args[0]; Integer max = Integer.MIN_VALUE; Set<String> valueLabelKeys = valueLabels.keySet(); for (String labelKey: valueLabelKeys ) { Integer val = null; try { val = Integer.parseInt( labelKey ); } catch(Exception ignored) {} if(val != null) { max = Math.max( val, max ); } } return Optional.<Object>of( valueLabels.get( max.toString() ) ); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.securityinsights.implementation; import com.azure.core.annotation.BodyParam; import com.azure.core.annotation.Delete; import com.azure.core.annotation.ExpectedResponses; import com.azure.core.annotation.Get; import com.azure.core.annotation.HeaderParam; import com.azure.core.annotation.Headers; import com.azure.core.annotation.Host; import com.azure.core.annotation.HostParam; import com.azure.core.annotation.PathParam; import com.azure.core.annotation.Put; import com.azure.core.annotation.QueryParam; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceInterface; import com.azure.core.annotation.ServiceMethod; import com.azure.core.annotation.UnexpectedResponseExceptionType; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.RestProxy; import com.azure.core.management.exception.ManagementException; import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.securityinsights.fluent.WatchlistsClient; import com.azure.resourcemanager.securityinsights.fluent.models.WatchlistInner; import com.azure.resourcemanager.securityinsights.models.WatchlistList; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in WatchlistsClient. */ public final class WatchlistsClientImpl implements WatchlistsClient { private final ClientLogger logger = new ClientLogger(WatchlistsClientImpl.class); /** The proxy service used to perform REST calls. */ private final WatchlistsService service; /** The service client containing this operation class. */ private final SecurityInsightsImpl client; /** * Initializes an instance of WatchlistsClientImpl. * * @param client the instance of the service client containing this operation class. */ WatchlistsClientImpl(SecurityInsightsImpl client) { this.service = RestProxy.create(WatchlistsService.class, client.getHttpPipeline(), client.getSerializerAdapter()); this.client = client; } /** * The interface defining all the services for SecurityInsightsWatchlists to be used by the proxy service to perform * REST calls. */ @Host("{$host}") @ServiceInterface(name = "SecurityInsightsWatc") private interface WatchlistsService { @Headers({"Content-Type: application/json"}) @Get( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights" + "/workspaces/{workspaceName}/providers/Microsoft.SecurityInsights/watchlists") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<WatchlistList>> list( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("workspaceName") String workspaceName, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights" + "/workspaces/{workspaceName}/providers/Microsoft.SecurityInsights/watchlists/{watchlistAlias}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<WatchlistInner>> get( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("workspaceName") String workspaceName, @PathParam("watchlistAlias") String watchlistAlias, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Delete( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights" + "/workspaces/{workspaceName}/providers/Microsoft.SecurityInsights/watchlists/{watchlistAlias}") @ExpectedResponses({200, 204}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<Void>> delete( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("workspaceName") String workspaceName, @PathParam("watchlistAlias") String watchlistAlias, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Put( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.OperationalInsights" + "/workspaces/{workspaceName}/providers/Microsoft.SecurityInsights/watchlists/{watchlistAlias}") @ExpectedResponses({200, 201}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<WatchlistInner>> createOrUpdate( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("workspaceName") String workspaceName, @PathParam("watchlistAlias") String watchlistAlias, @BodyParam("application/json") WatchlistInner watchlist, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get("{nextLink}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<WatchlistList>> listNext( @PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint, @HeaderParam("Accept") String accept, Context context); } /** * Gets all watchlists, without watchlist items. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return all watchlists, without watchlist items along with {@link PagedResponse} on successful completion of * {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<WatchlistInner>> listSinglePageAsync(String resourceGroupName, String workspaceName) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .list( this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), resourceGroupName, workspaceName, accept, context)) .<PagedResponse<WatchlistInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Gets all watchlists, without watchlist items. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return all watchlists, without watchlist items along with {@link PagedResponse} on successful completion of * {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<WatchlistInner>> listSinglePageAsync( String resourceGroupName, String workspaceName, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .list( this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), resourceGroupName, workspaceName, accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } /** * Gets all watchlists, without watchlist items. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return all watchlists, without watchlist items. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<WatchlistInner> listAsync(String resourceGroupName, String workspaceName) { return new PagedFlux<>( () -> listSinglePageAsync(resourceGroupName, workspaceName), nextLink -> listNextSinglePageAsync(nextLink)); } /** * Gets all watchlists, without watchlist items. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return all watchlists, without watchlist items. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<WatchlistInner> listAsync(String resourceGroupName, String workspaceName, Context context) { return new PagedFlux<>( () -> listSinglePageAsync(resourceGroupName, workspaceName, context), nextLink -> listNextSinglePageAsync(nextLink, context)); } /** * Gets all watchlists, without watchlist items. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return all watchlists, without watchlist items. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<WatchlistInner> list(String resourceGroupName, String workspaceName) { return new PagedIterable<>(listAsync(resourceGroupName, workspaceName)); } /** * Gets all watchlists, without watchlist items. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return all watchlists, without watchlist items. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<WatchlistInner> list(String resourceGroupName, String workspaceName, Context context) { return new PagedIterable<>(listAsync(resourceGroupName, workspaceName, context)); } /** * Gets a watchlist, without its watchlist items. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param watchlistAlias Watchlist Alias. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a watchlist, without its watchlist items along with {@link Response} on successful completion of {@link * Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<WatchlistInner>> getWithResponseAsync( String resourceGroupName, String workspaceName, String watchlistAlias) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (watchlistAlias == null) { return Mono.error(new IllegalArgumentException("Parameter watchlistAlias is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .get( this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), resourceGroupName, workspaceName, watchlistAlias, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Gets a watchlist, without its watchlist items. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param watchlistAlias Watchlist Alias. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a watchlist, without its watchlist items along with {@link Response} on successful completion of {@link * Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<WatchlistInner>> getWithResponseAsync( String resourceGroupName, String workspaceName, String watchlistAlias, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (watchlistAlias == null) { return Mono.error(new IllegalArgumentException("Parameter watchlistAlias is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .get( this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), resourceGroupName, workspaceName, watchlistAlias, accept, context); } /** * Gets a watchlist, without its watchlist items. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param watchlistAlias Watchlist Alias. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a watchlist, without its watchlist items on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<WatchlistInner> getAsync(String resourceGroupName, String workspaceName, String watchlistAlias) { return getWithResponseAsync(resourceGroupName, workspaceName, watchlistAlias) .flatMap( (Response<WatchlistInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Gets a watchlist, without its watchlist items. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param watchlistAlias Watchlist Alias. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a watchlist, without its watchlist items. */ @ServiceMethod(returns = ReturnType.SINGLE) public WatchlistInner get(String resourceGroupName, String workspaceName, String watchlistAlias) { return getAsync(resourceGroupName, workspaceName, watchlistAlias).block(); } /** * Gets a watchlist, without its watchlist items. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param watchlistAlias Watchlist Alias. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a watchlist, without its watchlist items along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<WatchlistInner> getWithResponse( String resourceGroupName, String workspaceName, String watchlistAlias, Context context) { return getWithResponseAsync(resourceGroupName, workspaceName, watchlistAlias, context).block(); } /** * Delete a watchlist. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param watchlistAlias Watchlist Alias. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<Void>> deleteWithResponseAsync( String resourceGroupName, String workspaceName, String watchlistAlias) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (watchlistAlias == null) { return Mono.error(new IllegalArgumentException("Parameter watchlistAlias is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .delete( this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), resourceGroupName, workspaceName, watchlistAlias, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Delete a watchlist. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param watchlistAlias Watchlist Alias. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<Void>> deleteWithResponseAsync( String resourceGroupName, String workspaceName, String watchlistAlias, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (watchlistAlias == null) { return Mono.error(new IllegalArgumentException("Parameter watchlistAlias is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .delete( this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), resourceGroupName, workspaceName, watchlistAlias, accept, context); } /** * Delete a watchlist. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param watchlistAlias Watchlist Alias. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return A {@link Mono} that completes when a successful response is received. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Void> deleteAsync(String resourceGroupName, String workspaceName, String watchlistAlias) { return deleteWithResponseAsync(resourceGroupName, workspaceName, watchlistAlias) .flatMap((Response<Void> res) -> Mono.empty()); } /** * Delete a watchlist. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param watchlistAlias Watchlist Alias. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) public void delete(String resourceGroupName, String workspaceName, String watchlistAlias) { deleteAsync(resourceGroupName, workspaceName, watchlistAlias).block(); } /** * Delete a watchlist. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param watchlistAlias Watchlist Alias. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> deleteWithResponse( String resourceGroupName, String workspaceName, String watchlistAlias, Context context) { return deleteWithResponseAsync(resourceGroupName, workspaceName, watchlistAlias, context).block(); } /** * Creates or updates a watchlist and its watchlist items (bulk creation, e.g. through text/csv content type). To * create a Watchlist and its items, we should call this endpoint with rawContent and contentType properties. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param watchlistAlias Watchlist Alias. * @param watchlist The watchlist. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return represents a Watchlist in Azure Security Insights along with {@link Response} on successful completion of * {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<WatchlistInner>> createOrUpdateWithResponseAsync( String resourceGroupName, String workspaceName, String watchlistAlias, WatchlistInner watchlist) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (watchlistAlias == null) { return Mono.error(new IllegalArgumentException("Parameter watchlistAlias is required and cannot be null.")); } if (watchlist == null) { return Mono.error(new IllegalArgumentException("Parameter watchlist is required and cannot be null.")); } else { watchlist.validate(); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .createOrUpdate( this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), resourceGroupName, workspaceName, watchlistAlias, watchlist, accept, context)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Creates or updates a watchlist and its watchlist items (bulk creation, e.g. through text/csv content type). To * create a Watchlist and its items, we should call this endpoint with rawContent and contentType properties. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param watchlistAlias Watchlist Alias. * @param watchlist The watchlist. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return represents a Watchlist in Azure Security Insights along with {@link Response} on successful completion of * {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<WatchlistInner>> createOrUpdateWithResponseAsync( String resourceGroupName, String workspaceName, String watchlistAlias, WatchlistInner watchlist, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (workspaceName == null) { return Mono.error(new IllegalArgumentException("Parameter workspaceName is required and cannot be null.")); } if (watchlistAlias == null) { return Mono.error(new IllegalArgumentException("Parameter watchlistAlias is required and cannot be null.")); } if (watchlist == null) { return Mono.error(new IllegalArgumentException("Parameter watchlist is required and cannot be null.")); } else { watchlist.validate(); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .createOrUpdate( this.client.getEndpoint(), this.client.getApiVersion(), this.client.getSubscriptionId(), resourceGroupName, workspaceName, watchlistAlias, watchlist, accept, context); } /** * Creates or updates a watchlist and its watchlist items (bulk creation, e.g. through text/csv content type). To * create a Watchlist and its items, we should call this endpoint with rawContent and contentType properties. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param watchlistAlias Watchlist Alias. * @param watchlist The watchlist. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return represents a Watchlist in Azure Security Insights on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<WatchlistInner> createOrUpdateAsync( String resourceGroupName, String workspaceName, String watchlistAlias, WatchlistInner watchlist) { return createOrUpdateWithResponseAsync(resourceGroupName, workspaceName, watchlistAlias, watchlist) .flatMap( (Response<WatchlistInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Creates or updates a watchlist and its watchlist items (bulk creation, e.g. through text/csv content type). To * create a Watchlist and its items, we should call this endpoint with rawContent and contentType properties. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param watchlistAlias Watchlist Alias. * @param watchlist The watchlist. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return represents a Watchlist in Azure Security Insights. */ @ServiceMethod(returns = ReturnType.SINGLE) public WatchlistInner createOrUpdate( String resourceGroupName, String workspaceName, String watchlistAlias, WatchlistInner watchlist) { return createOrUpdateAsync(resourceGroupName, workspaceName, watchlistAlias, watchlist).block(); } /** * Creates or updates a watchlist and its watchlist items (bulk creation, e.g. through text/csv content type). To * create a Watchlist and its items, we should call this endpoint with rawContent and contentType properties. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param workspaceName The name of the workspace. * @param watchlistAlias Watchlist Alias. * @param watchlist The watchlist. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return represents a Watchlist in Azure Security Insights along with {@link Response}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<WatchlistInner> createOrUpdateWithResponse( String resourceGroupName, String workspaceName, String watchlistAlias, WatchlistInner watchlist, Context context) { return createOrUpdateWithResponseAsync(resourceGroupName, workspaceName, watchlistAlias, watchlist, context) .block(); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list all the watchlists along with {@link PagedResponse} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<WatchlistInner>> listNextSinglePageAsync(String nextLink) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext(context -> service.listNext(nextLink, this.client.getEndpoint(), accept, context)) .<PagedResponse<WatchlistInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return list all the watchlists along with {@link PagedResponse} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<WatchlistInner>> listNextSinglePageAsync(String nextLink, Context context) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .listNext(nextLink, this.client.getEndpoint(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } }
/* * //****************************************************************** * // * // Copyright 2016 Samsung Electronics All Rights Reserved. * // * //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= * // * // Licensed under the Apache License, Version 2.0 (the "License"); * // you may not use this file except in compliance with the License. * // You may obtain a copy of the License at * // * // http://www.apache.org/licenses/LICENSE-2.0 * // * // Unless required by applicable law or agreed to in writing, software * // distributed under the License is distributed on an "AS IS" BASIS, * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * // See the License for the specific language governing permissions and * // limitations under the License. * // * //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= */ package org.iotivity.cloud.ciserver.resources; import static java.util.concurrent.TimeUnit.SECONDS; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; import java.util.ArrayList; import java.util.HashMap; import java.util.concurrent.CountDownLatch; import org.iotivity.cloud.base.device.CoapDevice; import org.iotivity.cloud.base.device.IRequestChannel; import org.iotivity.cloud.base.protocols.IRequest; import org.iotivity.cloud.base.protocols.IResponse; import org.iotivity.cloud.base.protocols.MessageBuilder; import org.iotivity.cloud.base.protocols.coap.CoapRequest; import org.iotivity.cloud.base.protocols.coap.CoapResponse; import org.iotivity.cloud.base.protocols.enums.ContentFormat; import org.iotivity.cloud.base.protocols.enums.RequestMethod; import org.iotivity.cloud.base.protocols.enums.ResponseStatus; import org.iotivity.cloud.ciserver.Constants; import org.iotivity.cloud.ciserver.DeviceServerSystem; import org.iotivity.cloud.ciserver.DeviceServerSystem.CoapDevicePool; import org.iotivity.cloud.ciserver.resources.RouteResource.AccountReceiveHandler; import org.iotivity.cloud.ciserver.resources.RouteResource.DefaultResponseHandler; import org.iotivity.cloud.ciserver.resources.RouteResource.LinkInterfaceHandler; import org.iotivity.cloud.util.Cbor; import org.junit.Before; import org.junit.Test; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; public class RouteResourceTest { private static final String RELAY_URI = "/di"; private static final String RESOURCE_URI = "/a/light/0"; private static final String VERIFY_URI = "/oic/acl/verify"; private String mDiServer = "resourceServerId"; private CoapDevice mSourceDevice = mock( CoapDevice.class); private CoapDevice mTargetDevice = mock( CoapDevice.class); private IResponse mRes = null; private IRequest mReq = null; private DeviceServerSystem mDeviceServerSystem = new DeviceServerSystem(); private final CountDownLatch mLatch = new CountDownLatch( 1); private Cbor<HashMap<String, Object>> mCbor = new Cbor<>(); private IRequestChannel mTargetChannel = mock( IRequestChannel.class); @Mock(name = "mASServer") IRequestChannel mRequestChannelASServer; @Mock CoapDevicePool coapDevicePool; @Mock IRequestChannel requestChannel; @InjectMocks RouteResource diHandler = new RouteResource( coapDevicePool); IRequest requestDefault = makePutRequest(); IRequest requestLinkInterface = makePutLinkInterfaceRequest(); @InjectMocks LinkInterfaceHandler linkInterfaceHandler = diHandler.new LinkInterfaceHandler( "targetDeviceId", mSourceDevice, requestLinkInterface); @InjectMocks DefaultResponseHandler defaultResponseHandler = diHandler.new DefaultResponseHandler( "targetDeviceId", mSourceDevice); @InjectMocks AccountReceiveHandler accountDefaultReceiveHandler = diHandler.new AccountReceiveHandler( mSourceDevice, requestDefault); @InjectMocks AccountReceiveHandler accountLinkInterfaceReceiveHandler = diHandler.new AccountReceiveHandler( mSourceDevice, requestLinkInterface); @Before public void setUp() throws Exception { mRes = null; mReq = null; Mockito.doReturn("sourceDeviceId").when(mSourceDevice).getDeviceId(); Mockito.doReturn("targetDeviceId").when(mTargetDevice).getDeviceId(); MockitoAnnotations.initMocks(this); mDeviceServerSystem.addResource(diHandler); Mockito.doReturn(mTargetDevice).when(coapDevicePool) .queryDevice(Mockito.anyString()); Mockito.doReturn(mTargetChannel).when(mTargetDevice) .getRequestChannel(); // callback mock Mockito.doAnswer(new Answer<Object>() { @Override public CoapResponse answer(InvocationOnMock invocation) throws Throwable { Object[] args = invocation.getArguments(); CoapResponse resp = (CoapResponse) args[0]; mRes = resp; mLatch.countDown(); return null; } }).when(mSourceDevice).sendResponse(Mockito.anyObject()); Mockito.doAnswer(new Answer<Object>() { @Override public CoapRequest answer(InvocationOnMock invocation) throws Throwable { Object[] args = invocation.getArguments(); CoapRequest request = (CoapRequest) args[0]; System.out.println( "\t----------payload : " + request.getPayloadString()); System.out.println( "\t----------uripath : " + request.getUriPath()); System.out.println( "\t---------uriquery : " + request.getUriQuery()); mReq = request; mLatch.countDown(); return null; } }).when(mTargetChannel).sendRequest(Mockito.any(IRequest.class), Mockito.any(CoapDevice.class)); Mockito.doAnswer(new Answer<Object>() { @Override public CoapRequest answer(InvocationOnMock invocation) throws Throwable { Object[] args = invocation.getArguments(); CoapRequest request = (CoapRequest) args[0]; System.out.println( "\t----------payload : " + request.getPayloadString()); System.out.println( "\t----------uripath : " + request.getUriPath()); System.out.println( "\t---------uriquery : " + request.getUriQuery()); mReq = request; mLatch.countDown(); return null; } }).when(mRequestChannelASServer).sendRequest( Mockito.any(IRequest.class), Mockito.any(CoapDevice.class)); } @Test public void testOnDefaultRequestReceived() throws InterruptedException { IRequest request = makePutRequest(); diHandler.onDefaultRequestReceived(mSourceDevice, request); assertTrue(mReq.getMethod().equals(RequestMethod.GET)); assertTrue(mReq.getUriPath().equals(VERIFY_URI)); assertTrue(mLatch.await(1L, SECONDS)); } @Test public void testOnDefaultResponseHandleronResponseReceived() throws InterruptedException { IResponse response = makeContentResponse(); defaultResponseHandler.onResponseReceived(response); assertEquals(mRes, response); assertTrue(mLatch.await(1L, SECONDS)); } @Test public void testOnAccountReceiveHandlerDeniedonResponseReceived() throws InterruptedException { IResponse response = makeVerifyDeniedContentResponse(); accountDefaultReceiveHandler.onResponseReceived(response); } @Test public void testOnAccountReceiveHandlerDefaultonResponseReceived() throws InterruptedException { IResponse response = makeVerifyAllowedContentResponse(); accountDefaultReceiveHandler.onResponseReceived(response); assertEquals(mReq, requestDefault); assertTrue(mLatch.await(1L, SECONDS)); } @Test public void testOnLinkInterfaceResponseHandleronResponseReceived() throws InterruptedException { IResponse response = makeVerifyAllowedContentResponse(); accountLinkInterfaceReceiveHandler.onResponseReceived(response); assertEquals(mReq, requestLinkInterface); assertTrue(mLatch.await(1L, SECONDS)); } @Test public void testOnAccountReceiveHandlerLinkInterfaceonResponseReceived() throws InterruptedException { IResponse response = makeContentLinkResponse(); linkInterfaceHandler.onResponseReceived(response); assertEquals(mRes, response); assertTrue(mLatch.await(1L, SECONDS)); } private IRequest makePutRequest() { HashMap<String, Object> payloadData = new HashMap<>(); payloadData.put("state", true); payloadData.put("power", 6); IRequest request = MessageBuilder.createRequest(RequestMethod.PUT, RELAY_URI + "/" + mDiServer + RESOURCE_URI, null, ContentFormat.APPLICATION_CBOR, mCbor.encodingPayloadToCbor(payloadData)); return request; } private IRequest makePutLinkInterfaceRequest() { HashMap<String, Object> payloadData = new HashMap<>(); payloadData.put("state", true); payloadData.put("power", 6); IRequest request = MessageBuilder.createRequest(RequestMethod.PUT, RELAY_URI + "/" + mDiServer + RESOURCE_URI, Constants.RS_INTERFACE + "=" + Constants.LINK_INTERFACE, ContentFormat.APPLICATION_CBOR, mCbor.encodingPayloadToCbor(payloadData)); return request; } private IResponse makeContentResponse() { HashMap<String, Object> payloadData = new HashMap<>(); payloadData.put("state", true); payloadData.put("power", 6); IResponse response = MessageBuilder.createResponse(makeGetRequest(), ResponseStatus.CONTENT, ContentFormat.APPLICATION_CBOR, mCbor.encodingPayloadToCbor(payloadData)); return response; } private IResponse makeVerifyAllowedContentResponse() { HashMap<String, Object> payloadData = new HashMap<>(); payloadData.put("gp", "Allowed"); IResponse response = MessageBuilder.createResponse(makeGetRequest(), ResponseStatus.CONTENT, ContentFormat.APPLICATION_CBOR, mCbor.encodingPayloadToCbor(payloadData)); return response; } private IResponse makeVerifyDeniedContentResponse() { HashMap<String, Object> payloadData = new HashMap<>(); payloadData.put("gp", "Denied"); IResponse response = MessageBuilder.createResponse(makeGetRequest(), ResponseStatus.CONTENT, ContentFormat.APPLICATION_CBOR, mCbor.encodingPayloadToCbor(payloadData)); return response; } private IResponse makeContentLinkResponse() { HashMap<String, Object> payloadData = new HashMap<>(); ArrayList<HashMap<String, Object>> linkPayload = new ArrayList<>(); payloadData.put("href", "hrefsample1"); linkPayload.add(payloadData); linkPayload.add(payloadData); linkPayload.add(payloadData); IResponse response = MessageBuilder.createResponse(makeGetRequest(), ResponseStatus.CONTENT, ContentFormat.APPLICATION_CBOR, mCbor.encodingPayloadToCbor(linkPayload)); return response; } private IRequest makeGetRequest() { IRequest request = MessageBuilder.createRequest(RequestMethod.PUT, RELAY_URI + "/" + mDiServer + RESOURCE_URI, null); return request; } }
/* * Copyright 2000-2016 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.server.widgetsetutils; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import com.google.gwt.core.client.GWT; import com.google.gwt.core.client.RunAsyncCallback; import com.google.gwt.core.ext.Generator; import com.google.gwt.core.ext.GeneratorContext; import com.google.gwt.core.ext.TreeLogger; import com.google.gwt.core.ext.TreeLogger.Type; import com.google.gwt.core.ext.UnableToCompleteException; import com.google.gwt.core.ext.typeinfo.JClassType; import com.google.gwt.core.ext.typeinfo.JMethod; import com.google.gwt.core.ext.typeinfo.JParameterizedType; import com.google.gwt.core.ext.typeinfo.JPrimitiveType; import com.google.gwt.core.ext.typeinfo.JType; import com.google.gwt.core.ext.typeinfo.NotFoundException; import com.google.gwt.core.ext.typeinfo.TypeOracle; import com.google.gwt.user.rebind.ClassSourceFileComposerFactory; import com.google.gwt.user.rebind.SourceWriter; import com.vaadin.client.JsArrayObject; import com.vaadin.client.ServerConnector; import com.vaadin.client.annotations.OnStateChange; import com.vaadin.client.communication.JsonDecoder; import com.vaadin.client.metadata.ConnectorBundleLoader; import com.vaadin.client.metadata.ConnectorBundleLoader.CValUiInfo; import com.vaadin.client.metadata.InvokationHandler; import com.vaadin.client.metadata.OnStateChangeMethod; import com.vaadin.client.metadata.ProxyHandler; import com.vaadin.client.metadata.TypeData; import com.vaadin.client.metadata.TypeDataStore; import com.vaadin.client.metadata.TypeDataStore.MethodAttribute; import com.vaadin.client.ui.UnknownComponentConnector; import com.vaadin.client.ui.UnknownExtensionConnector; import com.vaadin.server.widgetsetutils.metadata.ClientRpcVisitor; import com.vaadin.server.widgetsetutils.metadata.ConnectorBundle; import com.vaadin.server.widgetsetutils.metadata.ConnectorInitVisitor; import com.vaadin.server.widgetsetutils.metadata.GeneratedSerializer; import com.vaadin.server.widgetsetutils.metadata.OnStateChangeVisitor; import com.vaadin.server.widgetsetutils.metadata.Property; import com.vaadin.server.widgetsetutils.metadata.RendererVisitor; import com.vaadin.server.widgetsetutils.metadata.ServerRpcVisitor; import com.vaadin.server.widgetsetutils.metadata.StateInitVisitor; import com.vaadin.server.widgetsetutils.metadata.TypeVisitor; import com.vaadin.server.widgetsetutils.metadata.WidgetInitVisitor; import com.vaadin.shared.annotations.DelegateToWidget; import com.vaadin.shared.annotations.NoLayout; import com.vaadin.shared.communication.ClientRpc; import com.vaadin.shared.communication.ServerRpc; import com.vaadin.shared.ui.Connect; import com.vaadin.shared.ui.Connect.LoadStyle; import com.vaadin.tools.CvalAddonsChecker; import com.vaadin.tools.CvalChecker; import com.vaadin.tools.CvalChecker.InvalidCvalException; import com.vaadin.tools.ReportUsage; public class ConnectorBundleLoaderFactory extends Generator { /** * Special SourceWriter that approximates the number of written bytes to * support splitting long methods into shorter chunks to avoid hitting the * 65535 byte limit. */ private class SplittingSourceWriter implements SourceWriter { private final SourceWriter target; private final String baseName; private final int splitSize; private final List<String> methodNames; // Seems to be undercounted by about 15% private int approximateChars = 0; private int wrapCount = 0; public SplittingSourceWriter(SourceWriter target, String baseName, int splitSize) { this.target = target; this.baseName = baseName; this.splitSize = splitSize; methodNames = new ArrayList<>(); methodNames.add(baseName); } @Override public void beginJavaDocComment() { target.beginJavaDocComment(); addChars(10); } private void addChars(int i) { approximateChars += i; } private void addChars(String s) { addChars(s.length()); } private void addChars(String s, Object[] args) { addChars(String.format(s, args)); } @Override public void commit(TreeLogger logger) { target.commit(logger); } @Override public void endJavaDocComment() { target.endJavaDocComment(); addChars(10); } @Override public void indent() { target.indent(); addChars(10); } @Override public void indentln(String s) { target.indentln(s); addChars(s); } @Override public void indentln(String s, Object... args) { target.indentln(s, args); addChars(s, args); } @Override public void outdent() { target.outdent(); } @Override public void print(String s) { target.print(s); addChars(s); } @Override public void print(String s, Object... args) { target.print(s, args); addChars(s, args); } @Override public void println() { target.println(); addChars(5); } @Override public void println(String s) { target.println(s); addChars(s); } @Override public void println(String s, Object... args) { target.println(s, args); addChars(s, args); } public void splitIfNeeded() { splitIfNeeded(false, null); } public void splitIfNeeded(boolean isNative, String params) { if (approximateChars > splitSize) { String newMethod = baseName + wrapCount++; String args = params == null ? "" : params; if (isNative) { outdent(); println("}-*/;"); // To support fields of type long (#13692) println("@com.google.gwt.core.client.UnsafeNativeLong"); println("private native void %s(%s) /*-{", newMethod, args); } else { println("%s();", newMethod); outdent(); println("}"); println("private void %s(%s) {", newMethod, args); } methodNames.add(newMethod); indent(); approximateChars = 0; } } public List<String> getMethodNames() { return Collections.unmodifiableList(methodNames); } } static { ReportUsage.checkForUpdatesInBackgroundThread(); } private CvalAddonsChecker cvalChecker = new CvalAddonsChecker(); @Override public String generate(TreeLogger logger, GeneratorContext context, String typeName) throws UnableToCompleteException { TypeOracle typeOracle = context.getTypeOracle(); try { JClassType classType = typeOracle.getType(typeName); String packageName = classType.getPackage().getName(); String className = classType.getSimpleSourceName() + "Impl"; generateClass(logger, context, packageName, className, typeName); return packageName + "." + className; } catch (UnableToCompleteException e) { // Just rethrow throw e; } catch (Exception e) { logger.log(Type.ERROR, getClass() + " failed", e); throw new UnableToCompleteException(); } } private void generateClass(TreeLogger logger, GeneratorContext context, String packageName, String className, String requestedType) throws Exception { PrintWriter printWriter = context.tryCreate(logger, packageName, className); if (printWriter == null) { return; } List<CValUiInfo> cvalInfos = null; try { if (cvalChecker != null) { cvalInfos = cvalChecker.run(); // Don't run twice cvalChecker = null; } } catch (InvalidCvalException e) { System.err.println("\n\n\n\n" + CvalChecker.LINE); for (String line : e.getMessage().split("\n")) { System.err.println(line); } System.err.println(CvalChecker.LINE + "\n\n\n\n"); System.exit(1); throw new UnableToCompleteException(); } List<ConnectorBundle> bundles = buildBundles(logger, context.getTypeOracle()); ClassSourceFileComposerFactory composer = new ClassSourceFileComposerFactory( packageName, className); composer.setSuperclass(requestedType); SourceWriter w = composer.createSourceWriter(context, printWriter); w.println("public void init() {"); w.indent(); for (ConnectorBundle bundle : bundles) { detectBadProperties(bundle, logger); String name = bundle.getName(); boolean isEager = name .equals(ConnectorBundleLoader.EAGER_BUNDLE_NAME); w.print("addAsyncBlockLoader(new AsyncBundleLoader(\""); w.print(escape(name)); w.print("\", "); w.print("new String[] {"); for (Entry<JClassType, Set<String>> entry : bundle.getIdentifiers() .entrySet()) { Set<String> identifiers = entry.getValue(); for (String id : identifiers) { w.print("\""); w.print(escape(id)); w.print("\","); } } w.println("}) {"); w.indent(); w.print("protected void load(final "); w.print(TypeDataStore.class.getName()); w.println(" store) {"); w.indent(); if (!isEager) { w.print(GWT.class.getName()); w.print(".runAsync("); } w.println("new %s() {", RunAsyncCallback.class.getName()); w.indent(); w.println("public void onSuccess() {"); w.indent(); w.println("load();"); w.println("%s.get().setLoaded(getName());", ConnectorBundleLoader.class.getName()); // Close onSuccess method w.outdent(); w.println("}"); w.println("private void load() {"); w.indent(); String loadNativeJsBundle = "loadJsBundle"; printBundleData(logger, w, bundle, loadNativeJsBundle); // Close load method w.outdent(); w.println("}"); // Separate method for loading native JS stuff (e.g. callbacks) String loadNativeJsMethodName = "loadNativeJs"; // To support fields of type long (#13692) w.println("@com.google.gwt.core.client.UnsafeNativeLong"); w.println("private native void %s(%s store) /*-{", loadNativeJsMethodName, TypeDataStore.class.getName()); w.indent(); List<String> jsMethodNames = printJsBundleData(logger, w, bundle, loadNativeJsMethodName); w.outdent(); w.println("}-*/;"); // Call all generated native method inside one Java method to avoid // refercences inside native methods to each other w.println("private void %s(%s store) {", loadNativeJsBundle, TypeDataStore.class.getName()); w.indent(); printLoadJsBundleData(w, loadNativeJsBundle, jsMethodNames); w.outdent(); w.println("}"); // onFailure method declaration starts w.println("public void onFailure(Throwable reason) {"); w.indent(); w.println("%s.get().setLoadFailure(getName(), reason);", ConnectorBundleLoader.class.getName()); w.outdent(); w.println("}"); // Close new RunAsyncCallback() {} w.outdent(); w.print("}"); if (isEager) { w.println(".onSuccess();"); } else { w.println(");"); } // Close load method w.outdent(); w.println("}"); // Close add(new ... w.outdent(); w.println("});"); } if (cvalInfos != null && !cvalInfos.isEmpty()) { w.println("{"); for (CValUiInfo c : cvalInfos) { if ("evaluation".equals(c.type)) { w.println("cvals.add(new CValUiInfo(\"" + c.product + "\", \"" + c.version + "\", \"" + c.widgetset + "\", null));"); } } w.println("}"); } w.outdent(); w.println("}"); w.commit(logger); } private void printLoadJsBundleData(SourceWriter w, String methodName, List<String> methods) { SplittingSourceWriter writer = new SplittingSourceWriter(w, methodName, 30000); for (String method : methods) { writer.println("%s(store);", method); writer.splitIfNeeded(); } } private void detectBadProperties(ConnectorBundle bundle, TreeLogger logger) throws UnableToCompleteException { Map<JClassType, Set<String>> definedProperties = new HashMap<>(); for (Property property : bundle.getNeedsProperty()) { JClassType beanType = property.getBeanType(); Set<String> usedPropertyNames = definedProperties.get(beanType); if (usedPropertyNames == null) { usedPropertyNames = new HashSet<>(); definedProperties.put(beanType, usedPropertyNames); } String name = property.getName(); if (!usedPropertyNames.add(name)) { logger.log(Type.ERROR, beanType.getQualifiedSourceName() + " has multiple properties with the name " + name + ". This can happen if there are multiple " + "setters with identical names ignoring case."); throw new UnableToCompleteException(); } if (!property.hasAccessorMethods()) { logger.log(Type.ERROR, beanType.getQualifiedSourceName() + " has the property '" + name + "' without getter defined."); throw new UnableToCompleteException(); } } } private List<String> printJsBundleData(TreeLogger logger, SourceWriter w, ConnectorBundle bundle, String methodName) { SplittingSourceWriter writer = new SplittingSourceWriter(w, methodName, 30000); Set<Property> needsProperty = bundle.getNeedsProperty(); for (Property property : needsProperty) { writer.println("var data = {"); writer.indent(); if (property.getAnnotation(NoLayout.class) != null) { writer.println("noLayout: 1, "); } writer.println("setter: function(bean, value) {"); writer.indent(); property.writeSetterBody(logger, writer, "bean", "value"); writer.outdent(); writer.println("},"); writer.println("getter: function(bean) {"); writer.indent(); property.writeGetterBody(logger, writer, "bean"); writer.outdent(); writer.println("}"); writer.outdent(); writer.println("};"); // Method declaration writer.print( "store.@%s::setPropertyData(Ljava/lang/Class;Ljava/lang/String;Lcom/google/gwt/core/client/JavaScriptObject;)", TypeDataStore.class.getName()); writer.println("(@%s::class, '%s', data);", property.getBeanType().getQualifiedSourceName(), property.getName()); writer.println(); writer.splitIfNeeded(true, String.format("%s store", TypeDataStore.class.getName())); } return writer.getMethodNames(); } private void printBundleData(TreeLogger logger, SourceWriter sourceWriter, ConnectorBundle bundle, String loadNativeJsMethodName) throws UnableToCompleteException { // Split into new load method when reaching approximately 30000 bytes SplittingSourceWriter w = new SplittingSourceWriter(sourceWriter, "load", 30000); writeSuperClasses(w, bundle); writeIdentifiers(w, bundle); writeGwtConstructors(w, bundle); writeReturnTypes(w, bundle); writeInvokers(logger, w, bundle); writeParamTypes(w, bundle); writeProxys(w, bundle); writeMethodAttributes(logger, w, bundle); w.println("%s(store);", loadNativeJsMethodName); // Must use Java code to generate Type data (because of Type[]), doing // this after the JS property data has been initialized writePropertyTypes(logger, w, bundle); writeSerializers(logger, w, bundle); writePresentationTypes(w, bundle); writeDelegateToWidget(logger, w, bundle); writeOnStateChangeHandlers(logger, w, bundle); } private void writeOnStateChangeHandlers(TreeLogger logger, SplittingSourceWriter w, ConnectorBundle bundle) throws UnableToCompleteException { Map<JClassType, Set<JMethod>> needsOnStateChangeHandler = bundle .getNeedsOnStateChangeHandler(); for (Entry<JClassType, Set<JMethod>> entry : needsOnStateChangeHandler .entrySet()) { JClassType connector = entry.getKey(); TreeLogger typeLogger = logger.branch(Type.DEBUG, "Generating @OnStateChange support for " + connector.getName()); // Build map to speed up error checking Map<String, Property> stateProperties = new HashMap<>(); JClassType stateType = ConnectorBundle .findInheritedMethod(connector, "getState").getReturnType() .isClassOrInterface(); for (Property property : bundle.getProperties(stateType)) { stateProperties.put(property.getName(), property); } for (JMethod method : entry.getValue()) { TreeLogger methodLogger = typeLogger.branch(Type.DEBUG, "Processing method " + method.getName()); if (method.isPublic() || method.isProtected()) { methodLogger.log(Type.ERROR, "@OnStateChange is only supported for methods with private or default visibility."); throw new UnableToCompleteException(); } OnStateChange onStateChange = method .getAnnotation(OnStateChange.class); String[] properties = onStateChange.value(); if (properties.length == 0) { methodLogger.log(Type.ERROR, "There are no properties to listen to"); throw new UnableToCompleteException(); } // Verify that all properties do exist for (String propertyName : properties) { if (!stateProperties.containsKey(propertyName)) { methodLogger.log(Type.ERROR, "State class has no property named " + propertyName); throw new UnableToCompleteException(); } } if (method.getParameters().length != 0) { methodLogger.log(Type.ERROR, "Method should accept zero parameters"); throw new UnableToCompleteException(); } // new OnStateChangeMethod(Class declaringClass, String // methodName, String[], properties) w.print("store.addOnStateChangeMethod(%s, new %s(", getClassLiteralString(connector), OnStateChangeMethod.class.getName()); if (!connector.equals(method.getEnclosingType())) { w.print("%s, ", getClassLiteralString(method.getEnclosingType())); } w.print("\"%s\", ", method.getName()); w.print("new String[] {"); for (String propertyName : properties) { w.print("\"%s\", ", propertyName); } w.print("}"); w.println("));"); w.splitIfNeeded(); } } } private void writeSuperClasses(SplittingSourceWriter w, ConnectorBundle bundle) { List<JClassType> needsSuperclass = new ArrayList<>( bundle.getNeedsSuperclass()); // Emit in hierarchy order to ensure superclass is defined when // referenced Collections.sort(needsSuperclass, new Comparator<JClassType>() { @Override public int compare(JClassType type1, JClassType type2) { int depthDiff = getDepth(type1) - getDepth(type2); if (depthDiff != 0) { return depthDiff; } else { // Just something to get a stable compare return type1.getName().compareTo(type2.getName()); } } private int getDepth(JClassType type) { int depth = 0; while (type != null) { depth++; type = type.getSuperclass(); } return depth; } }); for (JClassType jClassType : needsSuperclass) { JClassType superclass = jClassType.getSuperclass(); while (superclass != null && !superclass.isPublic()) { superclass = superclass.getSuperclass(); } String classLiteralString; if (superclass == null) { classLiteralString = "null"; } else { classLiteralString = getClassLiteralString(superclass); } w.println("store.setSuperClass(%s, %s);", getClassLiteralString(jClassType), classLiteralString); } } private void writeDelegateToWidget(TreeLogger logger, SplittingSourceWriter w, ConnectorBundle bundle) { Map<JClassType, Set<Property>> needsDelegateToWidget = bundle .getNeedsDelegateToWidget(); for (Entry<JClassType, Set<Property>> entry : needsDelegateToWidget .entrySet()) { JClassType beanType = entry.getKey(); for (Property property : entry.getValue()) { w.println("store.setDelegateToWidget(%s, \"%s\", \"%s\");", getClassLiteralString(beanType), // property.getBeanType()), property.getName(), property.getAnnotation(DelegateToWidget.class).value()); } w.splitIfNeeded(); } } private void writeSerializers(TreeLogger logger, SplittingSourceWriter w, ConnectorBundle bundle) throws UnableToCompleteException { Map<JType, GeneratedSerializer> serializers = bundle.getSerializers(); for (Entry<JType, GeneratedSerializer> entry : serializers.entrySet()) { JType type = entry.getKey(); GeneratedSerializer serializer = entry.getValue(); w.print("store.setSerializerFactory("); writeClassLiteral(w, type); w.print(", "); w.println("new Invoker() {"); w.indent(); w.println("public Object invoke(Object target, Object[] params) {"); w.indent(); serializer.writeSerializerInstantiator(logger, w); w.outdent(); w.println("}"); w.outdent(); w.print("}"); w.println(");"); w.splitIfNeeded(); } } private void writePresentationTypes(SplittingSourceWriter w, ConnectorBundle bundle) { Map<JClassType, JType> presentationTypes = bundle .getPresentationTypes(); for (Entry<JClassType, JType> entry : presentationTypes.entrySet()) { w.print("store.setPresentationType("); writeClassLiteral(w, entry.getKey()); w.print(", "); writeClassLiteral(w, entry.getValue()); w.println(");"); w.splitIfNeeded(); } } private void writePropertyTypes(TreeLogger logger, SplittingSourceWriter w, ConnectorBundle bundle) { Set<Property> properties = bundle.getNeedsProperty(); for (Property property : properties) { w.print("store.setPropertyType("); writeClassLiteral(w, property.getBeanType()); w.print(", \""); w.print(escape(property.getName())); w.print("\", "); writeTypeCreator(w, property.getPropertyType()); w.println(");"); w.splitIfNeeded(); } } private void writeMethodAttributes(TreeLogger logger, SplittingSourceWriter w, ConnectorBundle bundle) { for (Entry<JClassType, Map<JMethod, Set<MethodAttribute>>> typeEntry : bundle .getMethodAttributes().entrySet()) { JClassType type = typeEntry.getKey(); for (Entry<JMethod, Set<MethodAttribute>> methodEntry : typeEntry .getValue().entrySet()) { JMethod method = methodEntry.getKey(); Set<MethodAttribute> attributes = methodEntry.getValue(); for (MethodAttribute attribute : attributes) { w.println("store.setMethodAttribute(%s, \"%s\", %s.%s);", getClassLiteralString(type), method.getName(), MethodAttribute.class.getCanonicalName(), attribute.name()); w.splitIfNeeded(); } } } } private void writeProxys(SplittingSourceWriter w, ConnectorBundle bundle) { Set<JClassType> needsProxySupport = bundle.getNeedsProxySupport(); for (JClassType type : needsProxySupport) { w.print("store.setProxyHandler("); writeClassLiteral(w, type); w.print(", new "); w.print(ProxyHandler.class.getCanonicalName()); w.println("() {"); w.indent(); w.println("public Object createProxy(final " + InvokationHandler.class.getName() + " handler) {"); w.indent(); w.print("return new "); w.print(type.getQualifiedSourceName()); w.println("() {"); w.indent(); JMethod[] methods = type.getOverridableMethods(); for (JMethod method : methods) { if (method.isAbstract()) { w.print("public "); w.print(method.getReturnType().getQualifiedSourceName()); w.print(" "); w.print(method.getName()); w.print("("); JType[] types = method.getParameterTypes(); for (int i = 0; i < types.length; i++) { if (i != 0) { w.print(", "); } w.print(types[i].getQualifiedSourceName()); w.print(" p"); w.print(Integer.toString(i)); } w.println(") {"); w.indent(); if (!method.getReturnType().getQualifiedSourceName() .equals("void")) { w.print("return "); } w.print("handler.invoke(this, "); w.print(TypeData.class.getCanonicalName()); w.print(".getType("); writeClassLiteral(w, type); w.print(").getMethod(\""); w.print(escape(method.getName())); w.print("\"), new Object [] {"); for (int i = 0; i < types.length; i++) { w.print("p" + i + ", "); } w.println("});"); w.outdent(); w.println("}"); } } w.outdent(); w.println("};"); w.outdent(); w.println("}"); w.outdent(); w.println("});"); w.splitIfNeeded(); } } private void writeParamTypes(SplittingSourceWriter w, ConnectorBundle bundle) { Map<JClassType, Set<JMethod>> needsParamTypes = bundle .getNeedsParamTypes(); for (Entry<JClassType, Set<JMethod>> entry : needsParamTypes .entrySet()) { JClassType type = entry.getKey(); Set<JMethod> methods = entry.getValue(); for (JMethod method : methods) { w.print("store.setParamTypes("); writeClassLiteral(w, type); w.print(", \""); w.print(escape(method.getName())); w.print("\", new Type[] {"); for (JType parameter : method.getParameterTypes()) { ConnectorBundleLoaderFactory.writeTypeCreator(w, parameter); w.print(", "); } w.println("});"); w.splitIfNeeded(); } } } private void writeInvokers(TreeLogger logger, SplittingSourceWriter w, ConnectorBundle bundle) throws UnableToCompleteException { Map<JClassType, Set<JMethod>> needsInvoker = bundle.getNeedsInvoker(); for (Entry<JClassType, Set<JMethod>> entry : needsInvoker.entrySet()) { JClassType type = entry.getKey(); TreeLogger typeLogger = logger.branch(Type.DEBUG, "Creating invokers for " + type); Set<JMethod> methods = entry.getValue(); for (JMethod method : methods) { w.print("store.setInvoker("); writeClassLiteral(w, type); w.print(", \""); w.print(escape(method.getName())); w.print("\","); if (method.isPublic()) { typeLogger.log(Type.DEBUG, "Invoking " + method.getName() + " using java"); writeJavaInvoker(w, type, method); } else { TreeLogger methodLogger = typeLogger.branch(Type.DEBUG, "Invoking " + method.getName() + " using jsni"); // Must use JSNI to access non-public methods writeJsniInvoker(methodLogger, w, type, method); } w.println(");"); w.splitIfNeeded(); } } } private void writeJsniInvoker(TreeLogger logger, SplittingSourceWriter w, JClassType type, JMethod method) throws UnableToCompleteException { w.println("new JsniInvoker() {"); w.indent(); w.println( "protected native Object jsniInvoke(Object target, %s<Object> params) /*-{ ", JsArrayObject.class.getName()); w.indent(); JType returnType = method.getReturnType(); boolean hasReturnType = !"void" .equals(returnType.getQualifiedSourceName()); // Note that void is also a primitive type boolean hasPrimitiveReturnType = hasReturnType && returnType.isPrimitive() != null; if (hasReturnType) { w.print("return "); if (hasPrimitiveReturnType) { // Integer.valueOf(expression); w.print("@%s::valueOf(%s)(", returnType.isPrimitive().getQualifiedBoxedSourceName(), returnType.getJNISignature()); // Implementation tested briefly, but I don't dare leave it // enabled since we are not using it in the framework and we // have not tests for it. logger.log(Type.ERROR, "JSNI invocation is not yet supported for methods with " + "primitive return types. Change your method " + "to public to be able to use conventional" + " Java invoking instead."); throw new UnableToCompleteException(); } } JType[] parameterTypes = method.getParameterTypes(); w.print("target.@%s::" + method.getName() + "(*)(", method.getEnclosingType().getQualifiedSourceName()); for (int i = 0; i < parameterTypes.length; i++) { if (i != 0) { w.print(", "); } w.print("params[" + i + "]"); JPrimitiveType primitive = parameterTypes[i].isPrimitive(); if (primitive != null) { // param.intValue(); w.print(".@%s::%sValue()()", primitive.getQualifiedBoxedSourceName(), primitive.getQualifiedSourceName()); } } if (hasPrimitiveReturnType) { assert hasReturnType; w.print(")"); } w.println(");"); if (!hasReturnType) { w.println("return null;"); } w.outdent(); w.println("}-*/;"); w.outdent(); w.print("}"); } private void writeJavaInvoker(SplittingSourceWriter w, JClassType type, JMethod method) { w.println("new Invoker() {"); w.indent(); w.println("public Object invoke(Object target, Object[] params) {"); w.indent(); JType returnType = method.getReturnType(); boolean hasReturnType = !"void" .equals(returnType.getQualifiedSourceName()); if (hasReturnType) { w.print("return "); } JType[] parameterTypes = method.getParameterTypes(); w.print("((" + type.getQualifiedSourceName() + ") target)." + method.getName() + "("); for (int i = 0; i < parameterTypes.length; i++) { JType parameterType = parameterTypes[i]; if (i != 0) { w.print(", "); } String parameterTypeName = getBoxedTypeName(parameterType); if (parameterTypeName.startsWith("elemental.json.Json")) { // Need to pass through native method to allow casting Object to // JSO if the value is a string w.print("%s.<%s>obj2jso(params[%d])", JsonDecoder.class.getCanonicalName(), parameterTypeName, i); } else { w.print("(" + parameterTypeName + ") params[" + i + "]"); } } w.println(");"); if (!hasReturnType) { w.println("return null;"); } w.outdent(); w.println("}"); w.outdent(); w.print("}"); } private void writeReturnTypes(SplittingSourceWriter w, ConnectorBundle bundle) { Map<JClassType, Set<JMethod>> methodReturnTypes = bundle .getMethodReturnTypes(); for (Entry<JClassType, Set<JMethod>> entry : methodReturnTypes .entrySet()) { JClassType type = entry.getKey(); Set<JMethod> methods = entry.getValue(); for (JMethod method : methods) { // setReturnType(Class<?> type, String methodName, Type // returnType) w.print("store.setReturnType("); writeClassLiteral(w, type); w.print(", \""); w.print(escape(method.getName())); w.print("\", "); writeTypeCreator(w, method.getReturnType()); w.println(");"); w.splitIfNeeded(); } } } private void writeGwtConstructors(SplittingSourceWriter w, ConnectorBundle bundle) { Set<JClassType> constructors = bundle.getGwtConstructors(); for (JClassType type : constructors) { w.print("store.setConstructor("); writeClassLiteral(w, type); w.println(", new Invoker() {"); w.indent(); w.println("public Object invoke(Object target, Object[] params) {"); w.indent(); w.print("return "); w.print(GWT.class.getName()); w.print(".create("); writeClassLiteral(w, type); w.println(");"); w.outdent(); w.println("}"); w.outdent(); w.println("});"); w.splitIfNeeded(); } } public static void writeClassLiteral(SourceWriter w, JType type) { w.print(getClassLiteralString(type)); } public static String getClassLiteralString(JType type) { return type.getQualifiedSourceName() + ".class"; } private void writeIdentifiers(SplittingSourceWriter w, ConnectorBundle bundle) { Map<JClassType, Set<String>> identifiers = bundle.getIdentifiers(); for (Entry<JClassType, Set<String>> entry : identifiers.entrySet()) { Set<String> ids = entry.getValue(); JClassType type = entry.getKey(); for (String id : ids) { w.print("store.setClass(\""); w.print(escape(id)); w.print("\", "); writeClassLiteral(w, type); w.println(");"); w.splitIfNeeded(); } } } private List<ConnectorBundle> buildBundles(TreeLogger logger, TypeOracle typeOracle) throws NotFoundException, UnableToCompleteException { Map<LoadStyle, Collection<JClassType>> connectorsByLoadStyle = new HashMap<>(); for (LoadStyle loadStyle : LoadStyle.values()) { connectorsByLoadStyle.put(loadStyle, new ArrayList<JClassType>()); } // Find all types with a valid mapping Collection<JClassType> selectedTypes = getConnectorsForWidgetset(logger, typeOracle); // Group by load style for (JClassType connectorSubtype : selectedTypes) { LoadStyle loadStyle = getLoadStyle(connectorSubtype); if (loadStyle != null) { connectorsByLoadStyle.get(loadStyle).add(connectorSubtype); } } List<ConnectorBundle> bundles = new ArrayList<>(); Collection<TypeVisitor> visitors = getVisitors(typeOracle); ConnectorBundle eagerBundle = new ConnectorBundle( ConnectorBundleLoader.EAGER_BUNDLE_NAME, visitors, typeOracle); TreeLogger eagerLogger = logger.branch(Type.TRACE, "Populating eager bundle"); // Eager connectors and all RPC interfaces are loaded by default eagerBundle.processTypes(eagerLogger, connectorsByLoadStyle.get(LoadStyle.EAGER)); eagerBundle.processType(eagerLogger, typeOracle .findType(UnknownComponentConnector.class.getCanonicalName())); eagerBundle.processType(eagerLogger, typeOracle .findType(UnknownExtensionConnector.class.getCanonicalName())); eagerBundle.processSubTypes(eagerLogger, typeOracle.getType(ClientRpc.class.getName())); eagerBundle.processSubTypes(eagerLogger, typeOracle.getType(ServerRpc.class.getName())); bundles.add(eagerBundle); ConnectorBundle deferredBundle = new ConnectorBundle( ConnectorBundleLoader.DEFERRED_BUNDLE_NAME, eagerBundle); TreeLogger deferredLogger = logger.branch(Type.TRACE, "Populating deferred bundle"); deferredBundle.processTypes(deferredLogger, connectorsByLoadStyle.get(LoadStyle.DEFERRED)); bundles.add(deferredBundle); Collection<JClassType> lazy = connectorsByLoadStyle.get(LoadStyle.LAZY); for (JClassType type : lazy) { ConnectorBundle bundle = new ConnectorBundle( type.getQualifiedSourceName(), eagerBundle); TreeLogger subLogger = logger.branch(Type.TRACE, "Populating " + type.getName() + " bundle"); bundle.processType(subLogger, type); bundles.add(bundle); } Collection<JClassType> none = connectorsByLoadStyle.get(LoadStyle.NONE); for (JClassType type : none) { logger.log(Type.TRACE, "Ignoring " + type.getName() + " with LoadStyle.NONE"); } return bundles; } /** * Returns the connector types that should be included in the widgetset. * This method can be overridden to create a widgetset only containing * selected connectors. * <p> * The default implementation finds all type implementing * {@link ServerConnector} that have a @{@link Connect} annotation. It also * checks that multiple connectors aren't connected to the same server-side * class. * * @param logger * the logger to which information can be logged * @param typeOracle * the type oracle that can be used for finding types * @return a collection of all the connector types that should be included * in the widgetset * @throws UnableToCompleteException * if the operation fails */ protected Collection<JClassType> getConnectorsForWidgetset( TreeLogger logger, TypeOracle typeOracle) throws UnableToCompleteException { JClassType serverConnectorType; try { serverConnectorType = typeOracle .getType(ServerConnector.class.getName()); } catch (NotFoundException e) { logger.log(Type.ERROR, "Can't find " + ServerConnector.class.getName()); throw new UnableToCompleteException(); } JClassType[] types = serverConnectorType.getSubtypes(); Map<String, JClassType> mappings = new TreeMap<>(); // Keep track of what has happened to avoid logging intermediate state Map<JClassType, List<JClassType>> replaced = new TreeMap<>( ConnectorBundle.jClassComparator); for (JClassType type : types) { Connect connectAnnotation = type.getAnnotation(Connect.class); if (connectAnnotation == null) { continue; } String identifier = connectAnnotation.value().getCanonicalName(); JClassType previousMapping = mappings.put(identifier, type); if (previousMapping != null) { // There are multiple mappings, pick the subclass JClassType subclass; JClassType superclass; if (previousMapping.isAssignableFrom(type)) { subclass = type; superclass = previousMapping; } else if (type.isAssignableFrom(previousMapping)) { subclass = previousMapping; superclass = type; } else { // Neither inherits from the other - this is a conflict logger.log(Type.ERROR, "Conflicting @Connect mappings detected for " + identifier + ": " + type.getQualifiedSourceName() + " and " + previousMapping.getQualifiedSourceName() + ". There can only be multiple @Connect mappings for the same server-side type if one is the subclass of the other."); throw new UnableToCompleteException(); } mappings.put(identifier, subclass); // Inherit any previous replacements List<JClassType> previousReplacements = replaced .remove(superclass); if (previousReplacements == null) { previousReplacements = new ArrayList<>(); } previousReplacements.add(superclass); replaced.put(subclass, previousReplacements); } } // Log the final set of replacements for (Entry<JClassType, List<JClassType>> entry : replaced.entrySet()) { String msg = entry.getKey().getQualifiedSourceName() + " replaces "; List<JClassType> list = entry.getValue(); for (int i = 0; i < list.size(); i++) { if (i != 0) { msg += ", "; } msg += list.get(i).getQualifiedSourceName(); } logger.log(Type.INFO, msg); } // Return the types of the final mapping return mappings.values(); } private Collection<TypeVisitor> getVisitors(TypeOracle oracle) throws NotFoundException { List<TypeVisitor> visitors = Arrays.<TypeVisitor> asList( new ConnectorInitVisitor(), new StateInitVisitor(), new WidgetInitVisitor(), new RendererVisitor(), new ClientRpcVisitor(), new ServerRpcVisitor(), new OnStateChangeVisitor()); for (TypeVisitor typeVisitor : visitors) { typeVisitor.init(oracle); } return visitors; } protected LoadStyle getLoadStyle(JClassType connectorType) { Connect annotation = connectorType.getAnnotation(Connect.class); return annotation.loadStyle(); } public static String getBoxedTypeName(JType type) { if (type.isPrimitive() != null) { // Used boxed types for primitives return type.isPrimitive().getQualifiedBoxedSourceName(); } else { return type.getErasedType().getQualifiedSourceName(); } } public static void writeTypeCreator(SourceWriter sourceWriter, JType type) { String typeName = ConnectorBundleLoaderFactory.getBoxedTypeName(type); JParameterizedType parameterized = type.isParameterized(); if (parameterized != null) { sourceWriter.print("new Type(\"" + typeName + "\", "); sourceWriter.print("new Type[] {"); JClassType[] typeArgs = parameterized.getTypeArgs(); for (JClassType jClassType : typeArgs) { writeTypeCreator(sourceWriter, jClassType); sourceWriter.print(", "); } sourceWriter.print("}"); } else { sourceWriter.print("new Type(" + typeName + ".class"); } sourceWriter.print(")"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.bookkeeper.client; import static org.apache.bookkeeper.common.concurrent.FutureUtils.result; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import java.util.concurrent.CompletableFuture; import org.apache.bookkeeper.client.api.WriteAdvHandle; import org.apache.bookkeeper.client.api.WriteFlag; import org.apache.bookkeeper.client.api.WriteHandle; import org.apache.bookkeeper.net.BookieId; import org.junit.Test; /** * Client side tests on deferred sync write flag. */ public class DeferredSyncTest extends MockBookKeeperTestCase { static final byte[] PASSWORD = "password".getBytes(); static final ByteBuf DATA = Unpooled.wrappedBuffer("foobar".getBytes()); static final int NUM_ENTRIES = 100; @Test public void testAddEntryLastAddConfirmedDoesNotAdvance() throws Exception { try (WriteHandle wh = result(newCreateLedgerOp() .withEnsembleSize(3) .withWriteQuorumSize(3) .withAckQuorumSize(2) .withPassword(PASSWORD) .withWriteFlags(WriteFlag.DEFERRED_SYNC) .execute())) { for (int i = 0; i < NUM_ENTRIES - 1; i++) { result(wh.appendAsync(DATA.retainedDuplicate())); } long lastEntryID = result(wh.appendAsync(DATA.retainedDuplicate())); assertEquals(NUM_ENTRIES - 1, lastEntryID); assertEquals(NUM_ENTRIES - 1, wh.getLastAddPushed()); assertEquals(-1, wh.getLastAddConfirmed()); } } @Test public void testAddEntryLastAddConfirmedAdvanceWithForce() throws Exception { try (WriteHandle wh = result(newCreateLedgerOp() .withEnsembleSize(3) .withWriteQuorumSize(3) .withAckQuorumSize(2) .withPassword(PASSWORD) .withWriteFlags(WriteFlag.DEFERRED_SYNC) .execute())) { for (int i = 0; i < NUM_ENTRIES - 1; i++) { result(wh.appendAsync(DATA.retainedDuplicate())); } long lastEntryID = result(wh.appendAsync(DATA.retainedDuplicate())); assertEquals(NUM_ENTRIES - 1, lastEntryID); assertEquals(NUM_ENTRIES - 1, wh.getLastAddPushed()); assertEquals(-1, wh.getLastAddConfirmed()); result(wh.force()); assertEquals(NUM_ENTRIES - 1, wh.getLastAddConfirmed()); } } @Test public void testForceOnWriteAdvHandle() throws Exception { try (WriteAdvHandle wh = result(newCreateLedgerOp() .withEnsembleSize(3) .withWriteQuorumSize(3) .withAckQuorumSize(2) .withPassword(PASSWORD) .withWriteFlags(WriteFlag.DEFERRED_SYNC) .makeAdv() .execute())) { CompletableFuture<Long> w0 = wh.writeAsync(0, DATA.retainedDuplicate()); CompletableFuture<Long> w2 = wh.writeAsync(2, DATA.retainedDuplicate()); CompletableFuture<Long> w3 = wh.writeAsync(3, DATA.retainedDuplicate()); result(w0); result(wh.force()); assertEquals(0, wh.getLastAddConfirmed()); CompletableFuture<Long> w1 = wh.writeAsync(1, DATA.retainedDuplicate()); result(w3); assertTrue(w1.isDone()); assertTrue(w2.isDone()); CompletableFuture<Long> w5 = wh.writeAsync(5, DATA.retainedDuplicate()); result(wh.force()); assertEquals(3, wh.getLastAddConfirmed()); wh.writeAsync(4, DATA.retainedDuplicate()); result(w5); result(wh.force()); assertEquals(5, wh.getLastAddConfirmed()); } } @Test public void testForceRequiresFullEnsemble() throws Exception { try (WriteHandle wh = result(newCreateLedgerOp() .withEnsembleSize(3) .withWriteQuorumSize(2) .withAckQuorumSize(2) .withPassword(PASSWORD) .withWriteFlags(WriteFlag.DEFERRED_SYNC) .execute())) { for (int i = 0; i < NUM_ENTRIES - 1; i++) { result(wh.appendAsync(DATA.retainedDuplicate())); } long lastEntryID = result(wh.appendAsync(DATA.retainedDuplicate())); assertEquals(NUM_ENTRIES - 1, lastEntryID); assertEquals(NUM_ENTRIES - 1, wh.getLastAddPushed()); assertEquals(-1, wh.getLastAddConfirmed()); BookieId bookieAddress = wh.getLedgerMetadata().getEnsembleAt(wh.getLastAddPushed()).get(0); killBookie(bookieAddress); // write should succeed (we still have 2 bookies out of 3) result(wh.appendAsync(DATA.retainedDuplicate())); // force cannot go, it must be acknowledged by all of the bookies in the ensamble try { result(wh.force()); } catch (BKException.BKBookieException failed) { } // bookie comes up again, force must succeed startKilledBookie(bookieAddress); result(wh.force()); } } @Test public void testForceWillAdvanceLacOnlyUpToLastAcknoledgedWrite() throws Exception { try (WriteHandle wh = result(newCreateLedgerOp() .withEnsembleSize(3) .withWriteQuorumSize(3) .withAckQuorumSize(3) .withPassword(PASSWORD) .withWriteFlags(WriteFlag.DEFERRED_SYNC) .execute())) { for (int i = 0; i < NUM_ENTRIES - 1; i++) { result(wh.appendAsync(DATA.retainedDuplicate())); } long lastEntryIdBeforeSuspend = result(wh.appendAsync(DATA.retainedDuplicate())); assertEquals(NUM_ENTRIES - 1, lastEntryIdBeforeSuspend); assertEquals(-1, wh.getLastAddConfirmed()); // one bookie will stop sending acks for forceLedger BookieId bookieAddress = wh.getLedgerMetadata().getEnsembleAt(wh.getLastAddPushed()).get(0); suspendBookieForceLedgerAcks(bookieAddress); // start and complete a force, lastAddConfirmed cannot be "lastAddPushedAfterSuspendedWrite" // because the write has not yet been acknowledged by AckQuorumSize Bookies CompletableFuture<?> forceResult = wh.force(); assertEquals(-1, wh.getLastAddConfirmed()); // send an entry and receive ack long lastEntry = wh.append(DATA.retainedDuplicate()); // receive the ack for forceLedger resumeBookieWriteAcks(bookieAddress); result(forceResult); // now LastAddConfirmed will be equals to the last confirmed entry // before force() started assertEquals(lastEntryIdBeforeSuspend, wh.getLastAddConfirmed()); result(wh.force()); assertEquals(lastEntry, wh.getLastAddConfirmed()); } } @Test public void testForbiddenEnsembleChange() throws Exception { try (WriteHandle wh = result(newCreateLedgerOp() .withEnsembleSize(1) .withWriteQuorumSize(1) .withAckQuorumSize(1) .withPassword(PASSWORD) .withWriteFlags(WriteFlag.DEFERRED_SYNC) .execute())) { for (int i = 0; i < NUM_ENTRIES - 1; i++) { wh.append(DATA.retainedDuplicate()); } assertEquals(1, availableBookies.size()); // kill the only bookie in the ensamble killBookie(wh.getLedgerMetadata().getEnsembleAt(wh.getLastAddPushed()).get(0)); assertEquals(0, availableBookies.size()); startNewBookie(); assertEquals(1, availableBookies.size()); try { // we cannot switch to the new bookie with DEFERRED_SYNC wh.append(DATA.retainedDuplicate()); fail("since ensemble change is disable we cannot be able to write any more"); } catch (BKException.BKWriteException ex) { // expected } LedgerHandle lh = (LedgerHandle) wh; assertFalse(lh.hasDelayedWriteFailedBookies()); } } @Test(expected = BKException.BKLedgerClosedException.class) public void testCannotIssueForceOnClosedLedgerHandle() throws Exception { WriteHandle wh = result(newCreateLedgerOp() .withEnsembleSize(1) .withWriteQuorumSize(1) .withAckQuorumSize(1) .withPassword(PASSWORD) .withWriteFlags(WriteFlag.DEFERRED_SYNC) .execute()); wh.close(); result(wh.force()); } }
/* * Copyright (c) 2016 Nike, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.nike.cerberus.dao; import com.google.common.collect.Lists; import com.nike.cerberus.mapper.AwsIamRoleMapper; import com.nike.cerberus.record.AwsIamRoleKmsKeyRecord; import com.nike.cerberus.record.AwsIamRolePermissionRecord; import com.nike.cerberus.record.AwsIamRoleRecord; import org.junit.Before; import org.junit.Test; import java.time.OffsetDateTime; import java.time.ZoneId; import java.util.List; import java.util.Optional; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class AwsIamRoleDaoTest { private final String awsIamRoleArn = "IAM_ROLE_ARN"; private final String awsRegion = "us-west-2"; private final String safeDepositBoxId = "SDB_ID"; private final String roleId = "ROLE_ID"; private final String iamRoleId = "IAM_ROLE_ID"; private final String iamRolePermissionId = "IAM_ROLE_PERMISSION_ID"; private final String iamRoleKmsKeyId = "IAM_ROLE_KMS_KEY_ID"; private final String awsKmsKeyId = "arn:aws:kms:us-west-2:ACCOUNT_ID:key/GUID"; private final String createdBy = "system"; private final String lastUpdatedBy = "system"; private final OffsetDateTime createdTs = OffsetDateTime.now(ZoneId.of("UTC")); private final OffsetDateTime lastUpdatedTs = OffsetDateTime.now(ZoneId.of("UTC")); private final AwsIamRoleRecord awsIamRoleRecord = new AwsIamRoleRecord() .setId(iamRoleId) .setAwsIamRoleArn(awsIamRoleArn) .setCreatedBy(createdBy) .setLastUpdatedBy(lastUpdatedBy) .setCreatedTs(createdTs) .setLastUpdatedTs(lastUpdatedTs); private final AwsIamRolePermissionRecord awsIamRolePermissionRecord = new AwsIamRolePermissionRecord() .setId(iamRolePermissionId) .setAwsIamRoleId(iamRoleId) .setSdboxId(safeDepositBoxId) .setRoleId(roleId) .setCreatedBy(createdBy) .setLastUpdatedBy(lastUpdatedBy) .setCreatedTs(createdTs) .setLastUpdatedTs(lastUpdatedTs); private final List<AwsIamRolePermissionRecord> awsIamRolePermissionRecordList = Lists.newArrayList(awsIamRolePermissionRecord); private final AwsIamRoleKmsKeyRecord awsIamRoleKmsKeyRecord = new AwsIamRoleKmsKeyRecord() .setId(iamRoleKmsKeyId) .setAwsIamRoleId(iamRoleId) .setAwsRegion(awsRegion) .setAwsKmsKeyId(awsKmsKeyId) .setCreatedBy(createdBy) .setLastUpdatedBy(lastUpdatedBy) .setCreatedTs(createdTs) .setLastUpdatedTs(lastUpdatedTs); private AwsIamRoleMapper awsIamRoleMapper; private AwsIamRoleDao subject; @Before public void setUp() throws Exception { awsIamRoleMapper = mock(AwsIamRoleMapper.class); subject = new AwsIamRoleDao(awsIamRoleMapper); } @Test public void getIamRole_by_id_returns_record_when_found() { when(awsIamRoleMapper.getIamRoleById(iamRoleId)).thenReturn(awsIamRoleRecord); final Optional<AwsIamRoleRecord> actual = subject.getIamRoleById(iamRoleId); assertThat(actual.isPresent()).isTrue(); assertThat(actual.get()).isEqualTo(awsIamRoleRecord); } @Test public void getIamRole_by_id_returns_empty_when_record_not_found() { when(awsIamRoleMapper.getIamRoleById(iamRoleId)).thenReturn(null); final Optional<AwsIamRoleRecord> actual = subject.getIamRoleById(iamRoleId); assertThat(actual.isPresent()).isFalse(); } @Test public void getIamRole_returns_record_when_found() { String arn = "arn"; when(awsIamRoleMapper.getIamRole(arn)).thenReturn(awsIamRoleRecord); final Optional<AwsIamRoleRecord> actual = subject.getIamRole(arn); assertThat(actual.isPresent()).isTrue(); assertThat(actual.get()).isEqualTo(awsIamRoleRecord); } @Test public void getIamRole_returns_empty_when_record_not_found() { when(awsIamRoleMapper.getIamRole(awsIamRoleArn)).thenReturn(null); final Optional<AwsIamRoleRecord> actual = subject.getIamRole(awsIamRoleArn); assertThat(actual.isPresent()).isFalse(); } @Test public void getIamRole_with_arn_returns_record_when_found() { when(awsIamRoleMapper.getIamRole(awsIamRoleArn)).thenReturn(awsIamRoleRecord); final Optional<AwsIamRoleRecord> actual = subject.getIamRole(awsIamRoleArn); assertThat(actual.isPresent()).isTrue(); assertThat(actual.get()).isEqualTo(awsIamRoleRecord); } @Test public void getIamRole_with_arn_returns_empty_when_record_not_found() { String arn = "arn"; when(awsIamRoleMapper.getIamRole(arn)).thenReturn(null); final Optional<AwsIamRoleRecord> actual = subject.getIamRole(arn); assertThat(actual.isPresent()).isFalse(); } @Test public void createIamRole_returns_record_count() { final int recordCount = 1; when(awsIamRoleMapper.createIamRole(awsIamRoleRecord)).thenReturn(recordCount); final int actualCount = subject.createIamRole(awsIamRoleRecord); assertThat(actualCount).isEqualTo(recordCount); } @Test public void getIamRolePermissions_returns_list_of_records() { when(awsIamRoleMapper.getIamRolePermissions(safeDepositBoxId)).thenReturn(awsIamRolePermissionRecordList); List<AwsIamRolePermissionRecord> actual = subject.getIamRolePermissions(safeDepositBoxId); assertThat(actual).isNotEmpty(); assertThat(actual).hasSameElementsAs(awsIamRolePermissionRecordList); } @Test public void createIamRolePermission_returns_record_count() { final int recordCount = 1; when(awsIamRoleMapper.createIamRolePermission(awsIamRolePermissionRecord)).thenReturn(recordCount); final int actualCount = subject.createIamRolePermission(awsIamRolePermissionRecord); assertThat(actualCount).isEqualTo(recordCount); } @Test public void updateIamRolePermission_returns_record_count() { final int recordCount = 1; when(awsIamRoleMapper.updateIamRolePermission(awsIamRolePermissionRecord)).thenReturn(recordCount); final int actualCount = subject.updateIamRolePermission(awsIamRolePermissionRecord); assertThat(actualCount).isEqualTo(recordCount); } @Test public void deleteIamRolePermission_returns_record_count() { final int recordCount = 1; when(awsIamRoleMapper.deleteIamRolePermission(safeDepositBoxId, iamRoleId)).thenReturn(recordCount); final int actualCount = subject.deleteIamRolePermission(safeDepositBoxId, iamRoleId); assertThat(actualCount).isEqualTo(recordCount); } @Test public void deleteIamRolePermissions_returns_record_count() { final int recordCount = 1; when(awsIamRoleMapper.deleteIamRolePermissions(safeDepositBoxId)).thenReturn(recordCount); final int actualCount = subject.deleteIamRolePermissions(safeDepositBoxId); assertThat(actualCount).isEqualTo(recordCount); } @Test public void getKmsKey_returns_record_when_found() { when(awsIamRoleMapper.getKmsKey(iamRoleId, awsRegion)).thenReturn(awsIamRoleKmsKeyRecord); final Optional<AwsIamRoleKmsKeyRecord> actual = subject.getKmsKey(iamRoleId, awsRegion); assertThat(actual.isPresent()).isTrue(); assertThat(actual.get()).isEqualTo(awsIamRoleKmsKeyRecord); } @Test public void getKmsKey_returns_empty_when_record_not_found() { when(awsIamRoleMapper.getKmsKey(iamRoleId, awsRegion)).thenReturn(null); final Optional<AwsIamRoleKmsKeyRecord> actual = subject.getKmsKey(iamRoleId, awsRegion); assertThat(actual.isPresent()).isFalse(); } @Test public void createIamRoleKmsKey_returns_record_count() { final int recordCount = 1; when(awsIamRoleMapper.createIamRoleKmsKey(awsIamRoleKmsKeyRecord)).thenReturn(recordCount); final int actualCount = subject.createIamRoleKmsKey(awsIamRoleKmsKeyRecord); assertThat(actualCount).isEqualTo(recordCount); } @Test public void updateIamRoleKmsKey_returns_record_count() { final int recordCount = 1; when(awsIamRoleMapper.updateIamRoleKmsKey(awsIamRoleKmsKeyRecord)).thenReturn(recordCount); final int actualCount = subject.updateIamRoleKmsKey(awsIamRoleKmsKeyRecord); assertThat(actualCount).isEqualTo(recordCount); } @Test public void deleteIamRoleById_returns_record_count() { final int recordCount = 1; when(awsIamRoleMapper.deleteIamRoleById(iamRoleId)).thenReturn(recordCount); final int actualCount = subject.deleteIamRoleById(iamRoleId); assertThat(actualCount).isEqualTo(recordCount); } @Test public void deleteKmsKeyById_returns_record_count() { final int recordCount = 1; when(awsIamRoleMapper.deleteKmsKeyById(iamRoleKmsKeyId)).thenReturn(recordCount); final int actualCount = subject.deleteKmsKeyById(iamRoleKmsKeyId); assertThat(actualCount).isEqualTo(recordCount); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.isis.core.runtime.systemusinginstallers; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import javax.annotation.Nullable; import javax.jdo.annotations.PersistenceCapable; import javax.xml.bind.annotation.XmlElement; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Predicate; import com.google.common.collect.FluentIterable; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.reflections.Reflections; import org.reflections.vfs.Vfs; import org.apache.isis.applib.AppManifest; import org.apache.isis.applib.annotation.DomainObject; import org.apache.isis.applib.annotation.DomainObjectLayout; import org.apache.isis.applib.annotation.DomainService; import org.apache.isis.applib.annotation.DomainServiceLayout; import org.apache.isis.applib.annotation.Mixin; import org.apache.isis.applib.annotation.Nature; import org.apache.isis.applib.annotation.Programmatic; import org.apache.isis.applib.annotation.ViewModel; import org.apache.isis.applib.annotation.ViewModelLayout; import org.apache.isis.applib.fixturescripts.DiscoverableFixtureScript; import org.apache.isis.applib.fixturescripts.FixtureScript; import org.apache.isis.applib.services.classdiscovery.ClassDiscoveryServiceUsingReflections; import org.apache.isis.core.commons.config.IsisConfiguration; import org.apache.isis.core.commons.config.IsisConfigurationDefault; import org.apache.isis.core.commons.factory.InstanceUtil; import org.apache.isis.core.commons.lang.ClassUtil; import org.apache.isis.core.metamodel.facetapi.MetaModelRefiner; import org.apache.isis.core.metamodel.facets.Annotations; import org.apache.isis.core.metamodel.layoutmetadata.LayoutMetadataReader; import org.apache.isis.core.metamodel.progmodel.ProgrammingModel; import org.apache.isis.core.metamodel.services.ServicesInjector; import org.apache.isis.core.metamodel.specloader.ReflectorConstants; import org.apache.isis.core.metamodel.specloader.SpecificationLoader; import org.apache.isis.core.metamodel.specloader.validator.MetaModelValidator; import org.apache.isis.core.runtime.authentication.AuthenticationManager; import org.apache.isis.core.runtime.authorization.AuthorizationManager; import org.apache.isis.core.runtime.fixtures.FixturesInstallerFromConfiguration; import org.apache.isis.core.runtime.services.ServicesInstallerFromAnnotation; import org.apache.isis.core.runtime.services.ServicesInstallerFromConfiguration; import org.apache.isis.core.runtime.services.ServicesInstallerFromConfigurationAndAnnotation; import org.apache.isis.core.runtime.system.IsisSystemException; import org.apache.isis.core.runtime.system.SystemConstants; import org.apache.isis.objectstore.jdo.service.RegisterEntities; import org.apache.isis.progmodels.dflt.JavaReflectorHelper; import org.apache.isis.progmodels.dflt.ProgrammingModelFacetsJava5; /** * */ public abstract class IsisComponentProvider { //region > constructor, fields private final AppManifest appManifest; private final IsisConfigurationDefault configuration; protected final List<Object> services; protected final AuthenticationManager authenticationManager; protected final AuthorizationManager authorizationManager; public IsisComponentProvider( final AppManifest appManifest, final IsisConfiguration configuration, final AuthenticationManager authenticationManager, final AuthorizationManager authorizationManager){ if(appManifest == null) { throw new IllegalArgumentException("AppManifest is required"); } this.appManifest = appManifest; this.configuration = (IsisConfigurationDefault) configuration; // REVIEW: HACKY putAppManifestKey(appManifest); findAndRegisterTypes(appManifest); specifyServicesAndRegisteredEntitiesUsing(appManifest); addToConfigurationUsing(appManifest); this.services = new ServicesInstallerFromConfigurationAndAnnotation(getConfiguration()).getServices(); final String fixtureClassNamesCsv = classNamesFrom(getAppManifest().getFixtures()); putConfigurationProperty(FixturesInstallerFromConfiguration.FIXTURES, fixtureClassNamesCsv); this.authenticationManager = authenticationManager; this.authorizationManager = authorizationManager; } public AppManifest getAppManifest() { return appManifest; } public IsisConfigurationDefault getConfiguration() { return configuration; } //endregion //region > helpers (appManifest) private void putAppManifestKey(final AppManifest appManifest) { // required to prevent RegisterEntities validation from complaining // if it can't find any @PersistenceCapable entities in a module // that contains only services. putConfigurationProperty( SystemConstants.APP_MANIFEST_KEY, appManifest.getClass().getName() ); } private void findAndRegisterTypes(final AppManifest appManifest) { final Iterable<String> modulePackages = modulePackageNamesFrom(appManifest); final AppManifest.Registry registry = AppManifest.Registry.instance(); final List<String> moduleAndFrameworkPackages = Lists.newArrayList(); moduleAndFrameworkPackages.addAll(AppManifest.Registry.FRAMEWORK_PROVIDED_SERVICES); Iterables.addAll(moduleAndFrameworkPackages, modulePackages); Vfs.setDefaultURLTypes(ClassDiscoveryServiceUsingReflections.getUrlTypes()); final Reflections reflections = new Reflections(moduleAndFrameworkPackages); final Set<Class<?>> domainServiceTypes = Sets.newLinkedHashSet(); domainServiceTypes.addAll(reflections.getTypesAnnotatedWith(DomainService.class)); domainServiceTypes.addAll(reflections.getTypesAnnotatedWith(DomainServiceLayout.class)); final Set<Class<?>> persistenceCapableTypes = Sets.newLinkedHashSet(); persistenceCapableTypes.addAll(reflections.getTypesAnnotatedWith(PersistenceCapable.class)); // the fixtureScript types are introspected just to provide a drop-down when running fixture scripts // in prototyping mode (though they may be introspected lazily if actually run). // we therefore try to limit the set of fixture types eagerly introspected at startup final Set<Class<? extends FixtureScript>> fixtureScriptTypes = Sets.newLinkedHashSet(); fixtureScriptTypes.addAll( FluentIterable.from(reflections.getSubTypesOf(FixtureScript.class)). filter(new Predicate<Class<?>>(){ @Override public boolean apply(@Nullable final Class<?> aClass) { // ignore as a fixture script if annotated with @Programmatic // (though directly implementing DiscoverableFixtureScript takes precedence and will NOT ignore) return DiscoverableFixtureScript.class.isAssignableFrom(aClass) || Annotations.getAnnotation(aClass, Programmatic.class) == null; } }) .toList()); final Set<Class<?>> domainObjectTypes = Sets.newLinkedHashSet(); domainObjectTypes.addAll(reflections.getTypesAnnotatedWith(DomainObject.class)); domainObjectTypes.addAll(reflections.getTypesAnnotatedWith(DomainObjectLayout.class)); final Set<Class<?>> mixinTypes = Sets.newHashSet(); mixinTypes.addAll(reflections.getTypesAnnotatedWith(Mixin.class)); mixinTypes.addAll( Lists.newArrayList(Iterables.filter(domainObjectTypes, new Predicate<Class<?>>() { @Override public boolean apply(@Nullable final Class<?> input) { if(input == null) { return false; } final DomainObject annotation = input.getAnnotation(DomainObject.class); return annotation != null && annotation.nature() == Nature.MIXIN; } })) ); final Set<Class<?>> viewModelTypes = Sets.newLinkedHashSet(); viewModelTypes.addAll(reflections.getTypesAnnotatedWith(ViewModel.class)); viewModelTypes.addAll(reflections.getTypesAnnotatedWith(ViewModelLayout.class)); final Set<Class<?>> xmlElementTypes = Sets.newLinkedHashSet(); xmlElementTypes.addAll(reflections.getTypesAnnotatedWith(XmlElement.class)); // add in any explicitly registered services... domainServiceTypes.addAll(appManifest.getAdditionalServices()); // Reflections seems to have a bug whereby it will return some classes outside the // set of packages that we want (think this is to do with the fact that it matches based on // the prefix and gets it wrong); so we double check and filter out types outside our // required set of packages. // for a tiny bit of efficiency, we append a '.' to each package name here, outside the loops List<String> packagesWithDotSuffix = FluentIterable.from(moduleAndFrameworkPackages).transform(new Function<String, String>() { @Nullable @Override public String apply(@Nullable final String s) { return s != null ? s + "." : null; } }).toList(); registry.setDomainServiceTypes(withinPackageAndNotAnonymous(packagesWithDotSuffix, domainServiceTypes)); registry.setPersistenceCapableTypes(withinPackageAndNotAnonymous(packagesWithDotSuffix, persistenceCapableTypes)); registry.setFixtureScriptTypes(withinPackageAndNotAnonymous(packagesWithDotSuffix, fixtureScriptTypes)); registry.setMixinTypes(withinPackageAndNotAnonymous(packagesWithDotSuffix, mixinTypes)); registry.setDomainObjectTypes(withinPackageAndNotAnonymous(packagesWithDotSuffix, domainObjectTypes)); registry.setViewModelTypes(withinPackageAndNotAnonymous(packagesWithDotSuffix, viewModelTypes)); registry.setXmlElementTypes(withinPackageAndNotAnonymous(packagesWithDotSuffix, xmlElementTypes)); } static <T> Set<Class<? extends T>> withinPackageAndNotAnonymous( final List<String> packagesWithDotSuffix, final Set<Class<? extends T>> classes) { Set<Class<? extends T>> classesWithin = Sets.newLinkedHashSet(); for (Class<? extends T> clz : classes) { final String className = clz.getName(); if(containedWithin(packagesWithDotSuffix, className) && notAnonymous(clz)) { classesWithin.add(clz); } } return classesWithin; } static private boolean containedWithin(final List<String> packagesWithDotSuffix, final String className) { for (String packageWithDotSuffix : packagesWithDotSuffix) { if(className.startsWith(packageWithDotSuffix)) { return true; } } return false; } private static <T> boolean notAnonymous(final Class<? extends T> clz) { try { return !clz.isAnonymousClass(); } catch(NoClassDefFoundError error) { return false; // ignore, assume anonymous } } private void specifyServicesAndRegisteredEntitiesUsing(final AppManifest appManifest) { final Iterable<String> packageNames = modulePackageNamesFrom(appManifest); final String packageNamesCsv = Joiner.on(',').join(packageNames); putConfigurationProperty(ServicesInstallerFromAnnotation.PACKAGE_PREFIX_KEY, packageNamesCsv); putConfigurationProperty(RegisterEntities.PACKAGE_PREFIX_KEY, packageNamesCsv); final List<Class<?>> additionalServices = appManifest.getAdditionalServices(); if(additionalServices != null) { final String additionalServicesCsv = classNamesFrom(additionalServices); appendToPropertyCsvValue(ServicesInstallerFromConfiguration.SERVICES_KEY, additionalServicesCsv); } } private void appendToPropertyCsvValue(final String servicesKey, final String additionalServicesCsv) { final String existingServicesCsv = configuration.getString(servicesKey); final String servicesCsv = join(existingServicesCsv, additionalServicesCsv); putConfigurationProperty(servicesKey, servicesCsv); } private static String join(final String csv1, final String csv2) { if (csv1 == null) { return csv2; } if (csv2 == null) { return csv1; } return Joiner.on(",").join(csv1, csv2); } private Iterable<String> modulePackageNamesFrom(final AppManifest appManifest) { List<Class<?>> modules = appManifest.getModules(); if (modules == null || modules.isEmpty()) { throw new IllegalArgumentException( "If an appManifest is provided then it must return a non-empty set of modules"); } return Iterables.transform(modules, ClassUtil.Functions.packageNameOf()); } protected String classNamesFrom(final List<?> objectsOrClasses) { if (objectsOrClasses == null) { return null; } final Iterable<String> fixtureClassNames = Iterables.transform(objectsOrClasses, classNameOf()); return Joiner.on(',').join(fixtureClassNames); } private static Function<Object, String> classNameOf() { return new Function<Object, String>() { @Nullable @Override public String apply(final Object input) { Class<?> aClass = input instanceof Class ? (Class<?>) input : input.getClass(); return aClass.getName(); } }; } private void addToConfigurationUsing(final AppManifest appManifest) { final Map<String, String> configurationProperties = appManifest.getConfigurationProperties(); if (configurationProperties != null) { for (Map.Entry<String, String> configProp : configurationProperties.entrySet()) { addConfigurationProperty(configProp.getKey(), configProp.getValue()); } } } /** * TODO: hacky, {@link IsisConfiguration} is meant to be immutable... */ void putConfigurationProperty(final String key, final String value) { if(value == null) { return; } this.configuration.put(key, value); } /** * TODO: hacky, {@link IsisConfiguration} is meant to be immutable... */ void addConfigurationProperty(final String key, final String value) { if(value == null) { return; } this.configuration.add(key, value); } //endregion //region > provideAuth* public AuthenticationManager provideAuthenticationManager() { return authenticationManager; } public AuthorizationManager provideAuthorizationManager() { return authorizationManager; } //endregion //region > provideServiceInjector public ServicesInjector provideServiceInjector(final IsisConfiguration configuration) { return new ServicesInjector(services, configuration); } //endregion //region > provideSpecificationLoader public SpecificationLoader provideSpecificationLoader( final ServicesInjector servicesInjector, final Collection<MetaModelRefiner> metaModelRefiners) throws IsisSystemException { final ProgrammingModel programmingModel = createProgrammingModel(); final MetaModelValidator mmv = createMetaModelValidator(); final List<LayoutMetadataReader> layoutMetadataReaders = createLayoutMetadataReaders(); return JavaReflectorHelper.createObjectReflector( configuration, programmingModel, metaModelRefiners, layoutMetadataReaders, mmv, servicesInjector); } protected MetaModelValidator createMetaModelValidator() { final String metaModelValidatorClassName = configuration.getString( ReflectorConstants.META_MODEL_VALIDATOR_CLASS_NAME, ReflectorConstants.META_MODEL_VALIDATOR_CLASS_NAME_DEFAULT); return InstanceUtil.createInstance(metaModelValidatorClassName, MetaModelValidator.class); } protected ProgrammingModel createProgrammingModel() { final ProgrammingModel programmingModel = new ProgrammingModelFacetsJava5(configuration); ProgrammingModel.Util.includeFacetFactories(configuration, programmingModel); ProgrammingModel.Util.excludeFacetFactories(configuration, programmingModel); return programmingModel; } protected List<LayoutMetadataReader> createLayoutMetadataReaders() { final List<LayoutMetadataReader> layoutMetadataReaders = Lists.newArrayList(); final String[] layoutMetadataReaderClassNames = configuration.getList( ReflectorConstants.LAYOUT_METADATA_READER_LIST, ReflectorConstants.LAYOUT_METADATA_READER_LIST_DEFAULT); if (layoutMetadataReaderClassNames != null) { for (final String layoutMetadataReaderClassName : layoutMetadataReaderClassNames) { final LayoutMetadataReader layoutMetadataReader = InstanceUtil.createInstance(layoutMetadataReaderClassName, LayoutMetadataReader.class); layoutMetadataReaders.add(layoutMetadataReader); } } return layoutMetadataReaders; } //endregion }