gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.sql.planner.iterative.rule; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import io.prestosql.Session; import io.prestosql.execution.Lifespan; import io.prestosql.geospatial.KdbTree; import io.prestosql.geospatial.KdbTreeUtils; import io.prestosql.matching.Capture; import io.prestosql.matching.Captures; import io.prestosql.matching.Pattern; import io.prestosql.metadata.Metadata; import io.prestosql.metadata.QualifiedObjectName; import io.prestosql.metadata.Split; import io.prestosql.metadata.TableHandle; import io.prestosql.metadata.TableLayoutResult; import io.prestosql.spi.Page; import io.prestosql.spi.PrestoException; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.ConnectorPageSource; import io.prestosql.spi.connector.Constraint; import io.prestosql.spi.type.ArrayType; import io.prestosql.spi.type.TypeSignature; import io.prestosql.split.PageSourceManager; import io.prestosql.split.SplitManager; import io.prestosql.split.SplitSource; import io.prestosql.split.SplitSource.SplitBatch; import io.prestosql.sql.planner.Symbol; import io.prestosql.sql.planner.iterative.Rule; import io.prestosql.sql.planner.iterative.Rule.Context; import io.prestosql.sql.planner.iterative.Rule.Result; import io.prestosql.sql.planner.plan.Assignments; import io.prestosql.sql.planner.plan.FilterNode; import io.prestosql.sql.planner.plan.JoinNode; import io.prestosql.sql.planner.plan.PlanNode; import io.prestosql.sql.planner.plan.PlanNodeId; import io.prestosql.sql.planner.plan.ProjectNode; import io.prestosql.sql.planner.plan.SpatialJoinNode; import io.prestosql.sql.planner.plan.UnnestNode; import io.prestosql.sql.tree.Cast; import io.prestosql.sql.tree.ComparisonExpression; import io.prestosql.sql.tree.Expression; import io.prestosql.sql.tree.FunctionCall; import io.prestosql.sql.tree.QualifiedName; import io.prestosql.sql.tree.StringLiteral; import io.prestosql.sql.tree.SymbolReference; import java.io.IOException; import java.io.UncheckedIOException; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import static com.google.common.base.Verify.verify; import static com.google.common.collect.ImmutableList.toImmutableList; import static io.airlift.concurrent.MoreFutures.getFutureValue; import static io.prestosql.SystemSessionProperties.getSpatialPartitioningTableName; import static io.prestosql.SystemSessionProperties.isSpatialJoinEnabled; import static io.prestosql.matching.Capture.newCapture; import static io.prestosql.spi.StandardErrorCode.INVALID_SPATIAL_PARTITIONING; import static io.prestosql.spi.connector.ConnectorSplitManager.SplitSchedulingStrategy.UNGROUPED_SCHEDULING; import static io.prestosql.spi.connector.NotPartitionedPartitionHandle.NOT_PARTITIONED; import static io.prestosql.spi.type.DoubleType.DOUBLE; import static io.prestosql.spi.type.IntegerType.INTEGER; import static io.prestosql.spi.type.TypeSignature.parseTypeSignature; import static io.prestosql.spi.type.VarcharType.VARCHAR; import static io.prestosql.sql.planner.ExpressionNodeInliner.replaceExpression; import static io.prestosql.sql.planner.SymbolsExtractor.extractUnique; import static io.prestosql.sql.planner.plan.JoinNode.Type.INNER; import static io.prestosql.sql.planner.plan.JoinNode.Type.LEFT; import static io.prestosql.sql.planner.plan.Patterns.filter; import static io.prestosql.sql.planner.plan.Patterns.join; import static io.prestosql.sql.planner.plan.Patterns.source; import static io.prestosql.sql.tree.ComparisonExpression.Operator.LESS_THAN; import static io.prestosql.sql.tree.ComparisonExpression.Operator.LESS_THAN_OR_EQUAL; import static io.prestosql.util.SpatialJoinUtils.extractSupportedSpatialComparisons; import static io.prestosql.util.SpatialJoinUtils.extractSupportedSpatialFunctions; import static java.lang.String.format; import static java.util.Objects.requireNonNull; /** * Applies to broadcast spatial joins, inner and left, expressed via ST_Contains, * ST_Intersects and ST_Distance functions. * <p> * For example: * <ul> * <li>SELECT ... FROM a, b WHERE ST_Contains(b.geometry, a.geometry)</li> * <li>SELECT ... FROM a, b WHERE ST_Intersects(b.geometry, a.geometry)</li> * <li>SELECT ... FROM a, b WHERE ST_Distance(b.geometry, a.geometry) <= 300</li> * <li>SELECT ... FROM a, b WHERE 15.5 > ST_Distance(b.geometry, a.geometry)</li> * </ul> * <p> * Joins expressed via ST_Contains and ST_Intersects functions must match all of * the following criteria: * <p> * - arguments of the spatial function are non-scalar expressions; * - one of the arguments uses symbols from left side of the join, the other from right. * <p> * Joins expressed via ST_Distance function must use less than or less than or equals operator * to compare ST_Distance value with a radius and must match all of the following criteria: * <p> * - arguments of the spatial function are non-scalar expressions; * - one of the arguments uses symbols from left side of the join, the other from right; * - radius is either scalar expression or uses symbols only from the right (build) side of the join. * <p> * For inner join, replaces cross join node and a qualifying filter on top with a single * spatial join node. * <p> * For both inner and left joins, pushes non-trivial expressions of the spatial function * arguments and radius into projections on top of join child nodes. * <p> * Examples: * <pre> * Point-in-polygon inner join * ST_Contains(ST_GeometryFromText(a.wkt), ST_Point(b.longitude, b.latitude)) * becomes a spatial join * ST_Contains(st_geometryfromtext, st_point) * with st_geometryfromtext -> 'ST_GeometryFromText(a.wkt)' and * st_point -> 'ST_Point(b.longitude, b.latitude)' projections on top of child nodes. * * Distance query * ST_Distance(ST_Point(a.lon, a.lat), ST_Point(b.lon, b.lat)) <= 10 / (111.321 * cos(radians(b.lat))) * becomes a spatial join * ST_Distance(st_point_a, st_point_b) <= radius * with st_point_a -> 'ST_Point(a.lon, a.lat)', st_point_b -> 'ST_Point(b.lon, b.lat)' * and radius -> '10 / (111.321 * cos(radians(b.lat)))' projections on top of child nodes. * </pre> */ public class ExtractSpatialJoins { private static final TypeSignature GEOMETRY_TYPE_SIGNATURE = parseTypeSignature("Geometry"); private static final String KDB_TREE_TYPENAME = "KdbTree"; private final Metadata metadata; private final SplitManager splitManager; private final PageSourceManager pageSourceManager; public ExtractSpatialJoins(Metadata metadata, SplitManager splitManager, PageSourceManager pageSourceManager) { this.metadata = requireNonNull(metadata, "metadata is null"); this.splitManager = requireNonNull(splitManager, "splitManager is null"); this.pageSourceManager = requireNonNull(pageSourceManager, "pageSourceManager is null"); } public Set<Rule<?>> rules() { return ImmutableSet.of( new ExtractSpatialInnerJoin(metadata, splitManager, pageSourceManager), new ExtractSpatialLeftJoin(metadata, splitManager, pageSourceManager)); } @VisibleForTesting public static final class ExtractSpatialInnerJoin implements Rule<FilterNode> { private static final Capture<JoinNode> JOIN = newCapture(); private static final Pattern<FilterNode> PATTERN = filter() .with(source().matching(join().capturedAs(JOIN).matching(JoinNode::isCrossJoin))); private final Metadata metadata; private final SplitManager splitManager; private final PageSourceManager pageSourceManager; public ExtractSpatialInnerJoin(Metadata metadata, SplitManager splitManager, PageSourceManager pageSourceManager) { this.metadata = requireNonNull(metadata, "metadata is null"); this.splitManager = requireNonNull(splitManager, "splitManager is null"); this.pageSourceManager = requireNonNull(pageSourceManager, "pageSourceManager is null"); } @Override public boolean isEnabled(Session session) { return isSpatialJoinEnabled(session); } @Override public Pattern<FilterNode> getPattern() { return PATTERN; } @Override public Result apply(FilterNode node, Captures captures, Context context) { JoinNode joinNode = captures.get(JOIN); Expression filter = node.getPredicate(); List<FunctionCall> spatialFunctions = extractSupportedSpatialFunctions(filter); for (FunctionCall spatialFunction : spatialFunctions) { Result result = tryCreateSpatialJoin(context, joinNode, filter, node.getId(), node.getOutputSymbols(), spatialFunction, Optional.empty(), metadata, splitManager, pageSourceManager); if (!result.isEmpty()) { return result; } } List<ComparisonExpression> spatialComparisons = extractSupportedSpatialComparisons(filter); for (ComparisonExpression spatialComparison : spatialComparisons) { Result result = tryCreateSpatialJoin(context, joinNode, filter, node.getId(), node.getOutputSymbols(), spatialComparison, metadata, splitManager, pageSourceManager); if (!result.isEmpty()) { return result; } } return Result.empty(); } } @VisibleForTesting public static final class ExtractSpatialLeftJoin implements Rule<JoinNode> { private static final Pattern<JoinNode> PATTERN = join().matching(node -> node.getCriteria().isEmpty() && node.getFilter().isPresent() && node.getType() == LEFT); private final Metadata metadata; private final SplitManager splitManager; private final PageSourceManager pageSourceManager; public ExtractSpatialLeftJoin(Metadata metadata, SplitManager splitManager, PageSourceManager pageSourceManager) { this.metadata = requireNonNull(metadata, "metadata is null"); this.splitManager = requireNonNull(splitManager, "splitManager is null"); this.pageSourceManager = requireNonNull(pageSourceManager, "pageSourceManager is null"); } @Override public boolean isEnabled(Session session) { return isSpatialJoinEnabled(session); } @Override public Pattern<JoinNode> getPattern() { return PATTERN; } @Override public Result apply(JoinNode joinNode, Captures captures, Context context) { Expression filter = joinNode.getFilter().get(); List<FunctionCall> spatialFunctions = extractSupportedSpatialFunctions(filter); for (FunctionCall spatialFunction : spatialFunctions) { Result result = tryCreateSpatialJoin(context, joinNode, filter, joinNode.getId(), joinNode.getOutputSymbols(), spatialFunction, Optional.empty(), metadata, splitManager, pageSourceManager); if (!result.isEmpty()) { return result; } } List<ComparisonExpression> spatialComparisons = extractSupportedSpatialComparisons(filter); for (ComparisonExpression spatialComparison : spatialComparisons) { Result result = tryCreateSpatialJoin(context, joinNode, filter, joinNode.getId(), joinNode.getOutputSymbols(), spatialComparison, metadata, splitManager, pageSourceManager); if (!result.isEmpty()) { return result; } } return Result.empty(); } } private static Result tryCreateSpatialJoin( Context context, JoinNode joinNode, Expression filter, PlanNodeId nodeId, List<Symbol> outputSymbols, ComparisonExpression spatialComparison, Metadata metadata, SplitManager splitManager, PageSourceManager pageSourceManager) { PlanNode leftNode = joinNode.getLeft(); PlanNode rightNode = joinNode.getRight(); List<Symbol> leftSymbols = leftNode.getOutputSymbols(); List<Symbol> rightSymbols = rightNode.getOutputSymbols(); Expression radius; Optional<Symbol> newRadiusSymbol; ComparisonExpression newComparison; if (spatialComparison.getOperator() == LESS_THAN || spatialComparison.getOperator() == LESS_THAN_OR_EQUAL) { // ST_Distance(a, b) <= r radius = spatialComparison.getRight(); Set<Symbol> radiusSymbols = extractUnique(radius); if (radiusSymbols.isEmpty() || (rightSymbols.containsAll(radiusSymbols) && containsNone(leftSymbols, radiusSymbols))) { newRadiusSymbol = newRadiusSymbol(context, radius); newComparison = new ComparisonExpression(spatialComparison.getOperator(), spatialComparison.getLeft(), toExpression(newRadiusSymbol, radius)); } else { return Result.empty(); } } else { // r >= ST_Distance(a, b) radius = spatialComparison.getLeft(); Set<Symbol> radiusSymbols = extractUnique(radius); if (radiusSymbols.isEmpty() || (rightSymbols.containsAll(radiusSymbols) && containsNone(leftSymbols, radiusSymbols))) { newRadiusSymbol = newRadiusSymbol(context, radius); newComparison = new ComparisonExpression(spatialComparison.getOperator().flip(), spatialComparison.getRight(), toExpression(newRadiusSymbol, radius)); } else { return Result.empty(); } } Expression newFilter = replaceExpression(filter, ImmutableMap.of(spatialComparison, newComparison)); PlanNode newRightNode = newRadiusSymbol.map(symbol -> addProjection(context, rightNode, symbol, radius)).orElse(rightNode); JoinNode newJoinNode = new JoinNode( joinNode.getId(), joinNode.getType(), leftNode, newRightNode, joinNode.getCriteria(), joinNode.getOutputSymbols(), Optional.of(newFilter), joinNode.getLeftHashSymbol(), joinNode.getRightHashSymbol(), joinNode.getDistributionType()); return tryCreateSpatialJoin(context, newJoinNode, newFilter, nodeId, outputSymbols, (FunctionCall) newComparison.getLeft(), Optional.of(newComparison.getRight()), metadata, splitManager, pageSourceManager); } private static Result tryCreateSpatialJoin( Context context, JoinNode joinNode, Expression filter, PlanNodeId nodeId, List<Symbol> outputSymbols, FunctionCall spatialFunction, Optional<Expression> radius, Metadata metadata, SplitManager splitManager, PageSourceManager pageSourceManager) { // TODO Add support for distributed left spatial joins Optional<String> spatialPartitioningTableName = joinNode.getType() == INNER ? getSpatialPartitioningTableName(context.getSession()) : Optional.empty(); Optional<KdbTree> kdbTree = spatialPartitioningTableName.map(tableName -> loadKdbTree(tableName, context.getSession(), metadata, splitManager, pageSourceManager)); List<Expression> arguments = spatialFunction.getArguments(); verify(arguments.size() == 2); Expression firstArgument = arguments.get(0); Expression secondArgument = arguments.get(1); Set<Symbol> firstSymbols = extractUnique(firstArgument); Set<Symbol> secondSymbols = extractUnique(secondArgument); if (firstSymbols.isEmpty() || secondSymbols.isEmpty()) { return Result.empty(); } Optional<Symbol> newFirstSymbol = newGeometrySymbol(context, firstArgument, metadata); Optional<Symbol> newSecondSymbol = newGeometrySymbol(context, secondArgument, metadata); PlanNode leftNode = joinNode.getLeft(); PlanNode rightNode = joinNode.getRight(); PlanNode newLeftNode; PlanNode newRightNode; // Check if the order of arguments of the spatial function matches the order of join sides int alignment = checkAlignment(joinNode, firstSymbols, secondSymbols); if (alignment > 0) { newLeftNode = newFirstSymbol.map(symbol -> addProjection(context, leftNode, symbol, firstArgument)).orElse(leftNode); newRightNode = newSecondSymbol.map(symbol -> addProjection(context, rightNode, symbol, secondArgument)).orElse(rightNode); } else if (alignment < 0) { newLeftNode = newSecondSymbol.map(symbol -> addProjection(context, leftNode, symbol, secondArgument)).orElse(leftNode); newRightNode = newFirstSymbol.map(symbol -> addProjection(context, rightNode, symbol, firstArgument)).orElse(rightNode); } else { return Result.empty(); } Expression newFirstArgument = toExpression(newFirstSymbol, firstArgument); Expression newSecondArgument = toExpression(newSecondSymbol, secondArgument); Optional<Symbol> leftPartitionSymbol = Optional.empty(); Optional<Symbol> rightPartitionSymbol = Optional.empty(); if (kdbTree.isPresent()) { leftPartitionSymbol = Optional.of(context.getSymbolAllocator().newSymbol("pid", INTEGER)); rightPartitionSymbol = Optional.of(context.getSymbolAllocator().newSymbol("pid", INTEGER)); if (alignment > 0) { newLeftNode = addPartitioningNodes(context, newLeftNode, leftPartitionSymbol.get(), kdbTree.get(), newFirstArgument, Optional.empty()); newRightNode = addPartitioningNodes(context, newRightNode, rightPartitionSymbol.get(), kdbTree.get(), newSecondArgument, radius); } else { newLeftNode = addPartitioningNodes(context, newLeftNode, leftPartitionSymbol.get(), kdbTree.get(), newSecondArgument, Optional.empty()); newRightNode = addPartitioningNodes(context, newRightNode, rightPartitionSymbol.get(), kdbTree.get(), newFirstArgument, radius); } } Expression newSpatialFunction = new FunctionCall(spatialFunction.getName(), ImmutableList.of(newFirstArgument, newSecondArgument)); Expression newFilter = replaceExpression(filter, ImmutableMap.of(spatialFunction, newSpatialFunction)); return Result.ofPlanNode(new SpatialJoinNode( nodeId, SpatialJoinNode.Type.fromJoinNodeType(joinNode.getType()), newLeftNode, newRightNode, outputSymbols, newFilter, leftPartitionSymbol, rightPartitionSymbol, kdbTree.map(KdbTreeUtils::toJson))); } private static KdbTree loadKdbTree(String tableName, Session session, Metadata metadata, SplitManager splitManager, PageSourceManager pageSourceManager) { QualifiedObjectName name = toQualifiedObjectName(tableName, session.getCatalog().get(), session.getSchema().get()); TableHandle tableHandle = metadata.getTableHandle(session, name) .orElseThrow(() -> new PrestoException(INVALID_SPATIAL_PARTITIONING, format("Table not found: %s", name))); Map<String, ColumnHandle> columnHandles = metadata.getColumnHandles(session, tableHandle); List<ColumnHandle> visibleColumnHandles = columnHandles.values().stream() .filter(handle -> !metadata.getColumnMetadata(session, tableHandle, handle).isHidden()) .collect(toImmutableList()); checkSpatialPartitioningTable(visibleColumnHandles.size() == 1, "Expected single column for table %s, but found %s columns", name, columnHandles.size()); ColumnHandle kdbTreeColumn = Iterables.getOnlyElement(visibleColumnHandles); List<TableLayoutResult> layouts = metadata.getLayouts(session, tableHandle, Constraint.alwaysTrue(), Optional.of(ImmutableSet.of(kdbTreeColumn))); checkSpatialPartitioningTable(!layouts.isEmpty(), "Table is empty: %s", name); Optional<KdbTree> kdbTree = Optional.empty(); try (SplitSource splitSource = splitManager.getSplits(session, layouts.get(0).getLayout().getHandle(), UNGROUPED_SCHEDULING)) { while (!Thread.currentThread().isInterrupted()) { SplitBatch splitBatch = getFutureValue(splitSource.getNextBatch(NOT_PARTITIONED, Lifespan.taskWide(), 1000)); List<Split> splits = splitBatch.getSplits(); for (Split split : splits) { try (ConnectorPageSource pageSource = pageSourceManager.createPageSource(session, split, ImmutableList.of(kdbTreeColumn))) { do { getFutureValue(pageSource.isBlocked()); Page page = pageSource.getNextPage(); if (page != null && page.getPositionCount() > 0) { checkSpatialPartitioningTable(!kdbTree.isPresent(), "Expected exactly one row for table %s, but found more", name); checkSpatialPartitioningTable(page.getPositionCount() == 1, "Expected exactly one row for table %s, but found %s rows", name, page.getPositionCount()); String kdbTreeJson = VARCHAR.getSlice(page.getBlock(0), 0).toStringUtf8(); try { kdbTree = Optional.of(KdbTreeUtils.fromJson(kdbTreeJson)); } catch (IllegalArgumentException e) { checkSpatialPartitioningTable(false, "Invalid JSON string for KDB tree: %s", e.getMessage()); } } } while (!pageSource.isFinished()); } catch (IOException e) { throw new UncheckedIOException(e); } } if (splitBatch.isLastBatch()) { break; } } } checkSpatialPartitioningTable(kdbTree.isPresent(), "Expected exactly one row for table %s, but got none", name); return kdbTree.get(); } private static void checkSpatialPartitioningTable(boolean condition, String message, Object... arguments) { if (!condition) { throw new PrestoException(INVALID_SPATIAL_PARTITIONING, format(message, arguments)); } } private static QualifiedObjectName toQualifiedObjectName(String name, String catalog, String schema) { ImmutableList<String> ids = ImmutableList.copyOf(Splitter.on('.').split(name)); if (ids.size() == 3) { return new QualifiedObjectName(ids.get(0), ids.get(1), ids.get(2)); } if (ids.size() == 2) { return new QualifiedObjectName(catalog, ids.get(0), ids.get(1)); } if (ids.size() == 1) { return new QualifiedObjectName(catalog, schema, ids.get(0)); } throw new PrestoException(INVALID_SPATIAL_PARTITIONING, format("Invalid name: %s", name)); } private static int checkAlignment(JoinNode joinNode, Set<Symbol> maybeLeftSymbols, Set<Symbol> maybeRightSymbols) { List<Symbol> leftSymbols = joinNode.getLeft().getOutputSymbols(); List<Symbol> rightSymbols = joinNode.getRight().getOutputSymbols(); if (leftSymbols.containsAll(maybeLeftSymbols) && containsNone(leftSymbols, maybeRightSymbols) && rightSymbols.containsAll(maybeRightSymbols) && containsNone(rightSymbols, maybeLeftSymbols)) { return 1; } if (leftSymbols.containsAll(maybeRightSymbols) && containsNone(leftSymbols, maybeLeftSymbols) && rightSymbols.containsAll(maybeLeftSymbols) && containsNone(rightSymbols, maybeRightSymbols)) { return -1; } return 0; } private static Expression toExpression(Optional<Symbol> optionalSymbol, Expression defaultExpression) { return optionalSymbol.map(symbol -> (Expression) symbol.toSymbolReference()).orElse(defaultExpression); } private static Optional<Symbol> newGeometrySymbol(Context context, Expression expression, Metadata metadata) { if (expression instanceof SymbolReference) { return Optional.empty(); } return Optional.of(context.getSymbolAllocator().newSymbol(expression, metadata.getType(GEOMETRY_TYPE_SIGNATURE))); } private static Optional<Symbol> newRadiusSymbol(Context context, Expression expression) { if (expression instanceof SymbolReference) { return Optional.empty(); } return Optional.of(context.getSymbolAllocator().newSymbol(expression, DOUBLE)); } private static PlanNode addProjection(Context context, PlanNode node, Symbol symbol, Expression expression) { Assignments.Builder projections = Assignments.builder(); for (Symbol outputSymbol : node.getOutputSymbols()) { projections.putIdentity(outputSymbol); } projections.put(symbol, expression); return new ProjectNode(context.getIdAllocator().getNextId(), node, projections.build()); } private static PlanNode addPartitioningNodes(Context context, PlanNode node, Symbol partitionSymbol, KdbTree kdbTree, Expression geometry, Optional<Expression> radius) { Assignments.Builder projections = Assignments.builder(); for (Symbol outputSymbol : node.getOutputSymbols()) { projections.putIdentity(outputSymbol); } ImmutableList.Builder<Expression> partitioningArguments = ImmutableList.<Expression>builder() .add(new Cast(new StringLiteral(KdbTreeUtils.toJson(kdbTree)), KDB_TREE_TYPENAME)) .add(geometry); radius.map(partitioningArguments::add); FunctionCall partitioningFunction = new FunctionCall(QualifiedName.of("spatial_partitions"), partitioningArguments.build()); Symbol partitionsSymbol = context.getSymbolAllocator().newSymbol(partitioningFunction, new ArrayType(INTEGER)); projections.put(partitionsSymbol, partitioningFunction); return new UnnestNode( context.getIdAllocator().getNextId(), new ProjectNode(context.getIdAllocator().getNextId(), node, projections.build()), node.getOutputSymbols(), ImmutableMap.of(partitionsSymbol, ImmutableList.of(partitionSymbol)), Optional.empty()); } private static boolean containsNone(Collection<Symbol> values, Collection<Symbol> testValues) { return values.stream().noneMatch(ImmutableSet.copyOf(testValues)::contains); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/gkehub/v1/feature.proto package com.google.cloud.gkehub.v1; public interface FeatureOrBuilder extends // @@protoc_insertion_point(interface_extends:google.cloud.gkehub.v1.Feature) com.google.protobuf.MessageOrBuilder { /** * * * <pre> * Output only. The full, unique name of this Feature resource in the format * `projects/&#42;&#47;locations/&#42;&#47;features/&#42;`. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The name. */ java.lang.String getName(); /** * * * <pre> * Output only. The full, unique name of this Feature resource in the format * `projects/&#42;&#47;locations/&#42;&#47;features/&#42;`. * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for name. */ com.google.protobuf.ByteString getNameBytes(); /** * * * <pre> * GCP labels for this Feature. * </pre> * * <code>map&lt;string, string&gt; labels = 2;</code> */ int getLabelsCount(); /** * * * <pre> * GCP labels for this Feature. * </pre> * * <code>map&lt;string, string&gt; labels = 2;</code> */ boolean containsLabels(java.lang.String key); /** Use {@link #getLabelsMap()} instead. */ @java.lang.Deprecated java.util.Map<java.lang.String, java.lang.String> getLabels(); /** * * * <pre> * GCP labels for this Feature. * </pre> * * <code>map&lt;string, string&gt; labels = 2;</code> */ java.util.Map<java.lang.String, java.lang.String> getLabelsMap(); /** * * * <pre> * GCP labels for this Feature. * </pre> * * <code>map&lt;string, string&gt; labels = 2;</code> */ java.lang.String getLabelsOrDefault(java.lang.String key, java.lang.String defaultValue); /** * * * <pre> * GCP labels for this Feature. * </pre> * * <code>map&lt;string, string&gt; labels = 2;</code> */ java.lang.String getLabelsOrThrow(java.lang.String key); /** * * * <pre> * Output only. State of the Feature resource itself. * </pre> * * <code> * .google.cloud.gkehub.v1.FeatureResourceState resource_state = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the resourceState field is set. */ boolean hasResourceState(); /** * * * <pre> * Output only. State of the Feature resource itself. * </pre> * * <code> * .google.cloud.gkehub.v1.FeatureResourceState resource_state = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The resourceState. */ com.google.cloud.gkehub.v1.FeatureResourceState getResourceState(); /** * * * <pre> * Output only. State of the Feature resource itself. * </pre> * * <code> * .google.cloud.gkehub.v1.FeatureResourceState resource_state = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloud.gkehub.v1.FeatureResourceStateOrBuilder getResourceStateOrBuilder(); /** * * * <pre> * Optional. Hub-wide Feature configuration. If this Feature does not support any * Hub-wide configuration, this field may be unused. * </pre> * * <code> * .google.cloud.gkehub.v1.CommonFeatureSpec spec = 4 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the spec field is set. */ boolean hasSpec(); /** * * * <pre> * Optional. Hub-wide Feature configuration. If this Feature does not support any * Hub-wide configuration, this field may be unused. * </pre> * * <code> * .google.cloud.gkehub.v1.CommonFeatureSpec spec = 4 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The spec. */ com.google.cloud.gkehub.v1.CommonFeatureSpec getSpec(); /** * * * <pre> * Optional. Hub-wide Feature configuration. If this Feature does not support any * Hub-wide configuration, this field may be unused. * </pre> * * <code> * .google.cloud.gkehub.v1.CommonFeatureSpec spec = 4 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.cloud.gkehub.v1.CommonFeatureSpecOrBuilder getSpecOrBuilder(); /** * * * <pre> * Optional. Membership-specific configuration for this Feature. If this Feature does * not support any per-Membership configuration, this field may be unused. * The keys indicate which Membership the configuration is for, in the form: * projects/{p}/locations/{l}/memberships/{m} * Where {p} is the project, {l} is a valid location and {m} is a valid * Membership in this project at that location. {p} WILL match the Feature's * project. * {p} will always be returned as the project number, but the project ID is * also accepted during input. If the same Membership is specified in the map * twice (using the project ID form, and the project number form), exactly * ONE of the entries will be saved, with no guarantees as to which. For this * reason, it is recommended the same format be used for all entries when * mutating a Feature. * </pre> * * <code> * map&lt;string, .google.cloud.gkehub.v1.MembershipFeatureSpec&gt; membership_specs = 5 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ int getMembershipSpecsCount(); /** * * * <pre> * Optional. Membership-specific configuration for this Feature. If this Feature does * not support any per-Membership configuration, this field may be unused. * The keys indicate which Membership the configuration is for, in the form: * projects/{p}/locations/{l}/memberships/{m} * Where {p} is the project, {l} is a valid location and {m} is a valid * Membership in this project at that location. {p} WILL match the Feature's * project. * {p} will always be returned as the project number, but the project ID is * also accepted during input. If the same Membership is specified in the map * twice (using the project ID form, and the project number form), exactly * ONE of the entries will be saved, with no guarantees as to which. For this * reason, it is recommended the same format be used for all entries when * mutating a Feature. * </pre> * * <code> * map&lt;string, .google.cloud.gkehub.v1.MembershipFeatureSpec&gt; membership_specs = 5 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ boolean containsMembershipSpecs(java.lang.String key); /** Use {@link #getMembershipSpecsMap()} instead. */ @java.lang.Deprecated java.util.Map<java.lang.String, com.google.cloud.gkehub.v1.MembershipFeatureSpec> getMembershipSpecs(); /** * * * <pre> * Optional. Membership-specific configuration for this Feature. If this Feature does * not support any per-Membership configuration, this field may be unused. * The keys indicate which Membership the configuration is for, in the form: * projects/{p}/locations/{l}/memberships/{m} * Where {p} is the project, {l} is a valid location and {m} is a valid * Membership in this project at that location. {p} WILL match the Feature's * project. * {p} will always be returned as the project number, but the project ID is * also accepted during input. If the same Membership is specified in the map * twice (using the project ID form, and the project number form), exactly * ONE of the entries will be saved, with no guarantees as to which. For this * reason, it is recommended the same format be used for all entries when * mutating a Feature. * </pre> * * <code> * map&lt;string, .google.cloud.gkehub.v1.MembershipFeatureSpec&gt; membership_specs = 5 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ java.util.Map<java.lang.String, com.google.cloud.gkehub.v1.MembershipFeatureSpec> getMembershipSpecsMap(); /** * * * <pre> * Optional. Membership-specific configuration for this Feature. If this Feature does * not support any per-Membership configuration, this field may be unused. * The keys indicate which Membership the configuration is for, in the form: * projects/{p}/locations/{l}/memberships/{m} * Where {p} is the project, {l} is a valid location and {m} is a valid * Membership in this project at that location. {p} WILL match the Feature's * project. * {p} will always be returned as the project number, but the project ID is * also accepted during input. If the same Membership is specified in the map * twice (using the project ID form, and the project number form), exactly * ONE of the entries will be saved, with no guarantees as to which. For this * reason, it is recommended the same format be used for all entries when * mutating a Feature. * </pre> * * <code> * map&lt;string, .google.cloud.gkehub.v1.MembershipFeatureSpec&gt; membership_specs = 5 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.cloud.gkehub.v1.MembershipFeatureSpec getMembershipSpecsOrDefault( java.lang.String key, com.google.cloud.gkehub.v1.MembershipFeatureSpec defaultValue); /** * * * <pre> * Optional. Membership-specific configuration for this Feature. If this Feature does * not support any per-Membership configuration, this field may be unused. * The keys indicate which Membership the configuration is for, in the form: * projects/{p}/locations/{l}/memberships/{m} * Where {p} is the project, {l} is a valid location and {m} is a valid * Membership in this project at that location. {p} WILL match the Feature's * project. * {p} will always be returned as the project number, but the project ID is * also accepted during input. If the same Membership is specified in the map * twice (using the project ID form, and the project number form), exactly * ONE of the entries will be saved, with no guarantees as to which. For this * reason, it is recommended the same format be used for all entries when * mutating a Feature. * </pre> * * <code> * map&lt;string, .google.cloud.gkehub.v1.MembershipFeatureSpec&gt; membership_specs = 5 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.cloud.gkehub.v1.MembershipFeatureSpec getMembershipSpecsOrThrow(java.lang.String key); /** * * * <pre> * Output only. The Hub-wide Feature state. * </pre> * * <code> * .google.cloud.gkehub.v1.CommonFeatureState state = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the state field is set. */ boolean hasState(); /** * * * <pre> * Output only. The Hub-wide Feature state. * </pre> * * <code> * .google.cloud.gkehub.v1.CommonFeatureState state = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The state. */ com.google.cloud.gkehub.v1.CommonFeatureState getState(); /** * * * <pre> * Output only. The Hub-wide Feature state. * </pre> * * <code> * .google.cloud.gkehub.v1.CommonFeatureState state = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloud.gkehub.v1.CommonFeatureStateOrBuilder getStateOrBuilder(); /** * * * <pre> * Output only. Membership-specific Feature status. If this Feature does * report any per-Membership status, this field may be unused. * The keys indicate which Membership the state is for, in the form: * projects/{p}/locations/{l}/memberships/{m} * Where {p} is the project number, {l} is a valid location and {m} is a valid * Membership in this project at that location. {p} MUST match the Feature's * project number. * </pre> * * <code> * map&lt;string, .google.cloud.gkehub.v1.MembershipFeatureState&gt; membership_states = 7 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ int getMembershipStatesCount(); /** * * * <pre> * Output only. Membership-specific Feature status. If this Feature does * report any per-Membership status, this field may be unused. * The keys indicate which Membership the state is for, in the form: * projects/{p}/locations/{l}/memberships/{m} * Where {p} is the project number, {l} is a valid location and {m} is a valid * Membership in this project at that location. {p} MUST match the Feature's * project number. * </pre> * * <code> * map&lt;string, .google.cloud.gkehub.v1.MembershipFeatureState&gt; membership_states = 7 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ boolean containsMembershipStates(java.lang.String key); /** Use {@link #getMembershipStatesMap()} instead. */ @java.lang.Deprecated java.util.Map<java.lang.String, com.google.cloud.gkehub.v1.MembershipFeatureState> getMembershipStates(); /** * * * <pre> * Output only. Membership-specific Feature status. If this Feature does * report any per-Membership status, this field may be unused. * The keys indicate which Membership the state is for, in the form: * projects/{p}/locations/{l}/memberships/{m} * Where {p} is the project number, {l} is a valid location and {m} is a valid * Membership in this project at that location. {p} MUST match the Feature's * project number. * </pre> * * <code> * map&lt;string, .google.cloud.gkehub.v1.MembershipFeatureState&gt; membership_states = 7 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ java.util.Map<java.lang.String, com.google.cloud.gkehub.v1.MembershipFeatureState> getMembershipStatesMap(); /** * * * <pre> * Output only. Membership-specific Feature status. If this Feature does * report any per-Membership status, this field may be unused. * The keys indicate which Membership the state is for, in the form: * projects/{p}/locations/{l}/memberships/{m} * Where {p} is the project number, {l} is a valid location and {m} is a valid * Membership in this project at that location. {p} MUST match the Feature's * project number. * </pre> * * <code> * map&lt;string, .google.cloud.gkehub.v1.MembershipFeatureState&gt; membership_states = 7 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloud.gkehub.v1.MembershipFeatureState getMembershipStatesOrDefault( java.lang.String key, com.google.cloud.gkehub.v1.MembershipFeatureState defaultValue); /** * * * <pre> * Output only. Membership-specific Feature status. If this Feature does * report any per-Membership status, this field may be unused. * The keys indicate which Membership the state is for, in the form: * projects/{p}/locations/{l}/memberships/{m} * Where {p} is the project number, {l} is a valid location and {m} is a valid * Membership in this project at that location. {p} MUST match the Feature's * project number. * </pre> * * <code> * map&lt;string, .google.cloud.gkehub.v1.MembershipFeatureState&gt; membership_states = 7 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloud.gkehub.v1.MembershipFeatureState getMembershipStatesOrThrow( java.lang.String key); /** * * * <pre> * Output only. When the Feature resource was created. * </pre> * * <code>.google.protobuf.Timestamp create_time = 8 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the createTime field is set. */ boolean hasCreateTime(); /** * * * <pre> * Output only. When the Feature resource was created. * </pre> * * <code>.google.protobuf.Timestamp create_time = 8 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The createTime. */ com.google.protobuf.Timestamp getCreateTime(); /** * * * <pre> * Output only. When the Feature resource was created. * </pre> * * <code>.google.protobuf.Timestamp create_time = 8 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder(); /** * * * <pre> * Output only. When the Feature resource was last updated. * </pre> * * <code>.google.protobuf.Timestamp update_time = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the updateTime field is set. */ boolean hasUpdateTime(); /** * * * <pre> * Output only. When the Feature resource was last updated. * </pre> * * <code>.google.protobuf.Timestamp update_time = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The updateTime. */ com.google.protobuf.Timestamp getUpdateTime(); /** * * * <pre> * Output only. When the Feature resource was last updated. * </pre> * * <code>.google.protobuf.Timestamp update_time = 9 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder(); /** * * * <pre> * Output only. When the Feature resource was deleted. * </pre> * * <code>.google.protobuf.Timestamp delete_time = 10 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the deleteTime field is set. */ boolean hasDeleteTime(); /** * * * <pre> * Output only. When the Feature resource was deleted. * </pre> * * <code>.google.protobuf.Timestamp delete_time = 10 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The deleteTime. */ com.google.protobuf.Timestamp getDeleteTime(); /** * * * <pre> * Output only. When the Feature resource was deleted. * </pre> * * <code>.google.protobuf.Timestamp delete_time = 10 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.protobuf.TimestampOrBuilder getDeleteTimeOrBuilder(); }
package ca.ualberta.cs.linkai.beep; import android.content.Intent; import android.content.pm.PackageManager; import android.location.Address; import android.location.Geocoder; import android.location.Location; import android.os.Bundle; import android.app.Activity; import android.support.v4.app.ActivityCompat; import android.support.v4.app.FragmentActivity; import android.support.v4.content.ContextCompat; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.ListView; import android.widget.Toast; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.common.api.Status; import com.google.android.gms.location.LocationServices; import com.google.android.gms.location.places.Place; import com.google.android.gms.location.places.Places; import com.google.android.gms.location.places.ui.PlaceAutocompleteFragment; import com.google.android.gms.location.places.ui.PlaceSelectionListener; import com.google.android.gms.maps.CameraUpdate; import com.google.android.gms.maps.CameraUpdateFactory; import com.google.android.gms.maps.GoogleMap; import com.google.android.gms.maps.OnMapReadyCallback; import com.google.android.gms.maps.SupportMapFragment; import com.google.android.gms.maps.model.LatLng; import com.google.android.gms.maps.model.LatLngBounds; import com.google.android.gms.maps.model.Marker; import com.google.android.gms.maps.model.MarkerOptions; import java.io.IOException; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.List; import java.util.Locale; public class SearchByLocationActivity extends FragmentActivity implements OnMapReadyCallback, ActivityCompat.OnRequestPermissionsResultCallback, GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener { private static final String TAG = SearchByLocationActivity.class.getSimpleName(); private GoogleMap mMap; /** * Implement Google API Client */ protected GoogleApiClient mGoogleApiClient; Location mLastLocation; double lat = 0; double lng = 0; private static int MY_PERMISSION_ACCESS_COURSE_LOCATION = 1; private com.google.android.gms.maps.model.Marker OriginMarker; private Marker Marker; ArrayList<Double> list = new ArrayList<>(); public static ArrayList<Request> requestList = new ArrayList<>(); private RequestsAdapter adapter; ListView resultList; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); buildGoogleApiClient(); setContentView(R.layout.activity_search_by_location); resultList = (ListView) findViewById(R.id.resultList); adapter = new RequestsAdapter(this, requestList); resultList.setAdapter(adapter); // Obtain the SupportMapFragment and get notified when the map is ready to be used. SupportMapFragment mapFragment = (SupportMapFragment) getSupportFragmentManager() .findFragmentById(R.id.map); mapFragment.getMapAsync(this); } private void buildGoogleApiClient() { mGoogleApiClient = new GoogleApiClient.Builder(this) .addConnectionCallbacks(this) .addOnConnectionFailedListener(this) .addApi(LocationServices.API) .addApi(Places.GEO_DATA_API) .addOnConnectionFailedListener(this).build(); } /** * Manipulates the map once available. * This callback is triggered when the map is ready to be used. * This is where we can add markers or lines, add listeners or move the camera. In this case, * we just add a marker near Sydney, Australia. * If Google Play services is not installed on the device, the user will be prompted to install * it inside the SupportMapFragment. This method will only be triggered once the user has * installed Google Play services and returned to the app. */ @Override public void onMapReady(GoogleMap googleMap) { mMap = googleMap; if (ContextCompat.checkSelfPermission( this, android.Manifest.permission.ACCESS_COARSE_LOCATION ) != PackageManager.PERMISSION_GRANTED ) { ActivityCompat.requestPermissions(this, new String[] { android.Manifest.permission.ACCESS_COARSE_LOCATION }, MY_PERMISSION_ACCESS_COURSE_LOCATION ); } mMap.setMyLocationEnabled(true); mMap.getUiSettings().setZoomControlsEnabled(true); mMap.getUiSettings().setCompassEnabled(true); mMap.setOnMapClickListener(new GoogleMap.OnMapClickListener() { @Override public void onMapClick(LatLng latLng) { if(Marker != null) { Marker.remove(); } final LatLng LatLng = new LatLng(latLng.latitude, latLng.longitude); DecimalFormat numberFormat = new DecimalFormat("#.00"); String temp1 = numberFormat.format(LatLng.latitude); String temp2 = numberFormat.format(LatLng.longitude); Marker = mMap.addMarker(new MarkerOptions().position(latLng).title( "Lat:" + temp1 + " , " + "Lng:" + temp2)); /*Marker = mMap.addMarker(new MarkerOptions().position(latLng).title(String.valueOf(latLng.latitude) + " , " + String.valueOf(latLng.longitude)).draggable(true));*/ /* // Set Camera position //reference: http://stackoverflow.com/questions/14828217/android-map-v2-zoom-to-show-all-the-markers //first calculate the bounds of both two markers LatLngBounds.Builder builder = new LatLngBounds.Builder(); builder.include(Marker.getPosition()); LatLngBounds bounds = builder.build(); int padding = 200; // offset from edges of the map in pixels CameraUpdate cameraUpdate = CameraUpdateFactory.newLatLngBounds(bounds, padding); mMap.animateCamera(cameraUpdate);*/ mMap.setOnInfoWindowClickListener(new GoogleMap.OnInfoWindowClickListener() { @Override public void onInfoWindowClick(Marker marker) { list.add(LatLng.latitude); list.add(LatLng.longitude); ElasticsearchRequestController.GetRequestByNearbyAddressTask getRequestByLocationTask = new ElasticsearchRequestController.GetRequestByNearbyAddressTask(); getRequestByLocationTask.execute(list); try { requestList = getRequestByLocationTask.get(); } catch (Exception e) { Log.i("Error", "Failed to get the Accounts out of the async object."); } if (requestList.isEmpty()) { Toast.makeText(SearchByLocationActivity.this, "No request find", Toast.LENGTH_SHORT).show(); } else { Toast.makeText(SearchByLocationActivity.this, "Request found", Toast.LENGTH_SHORT).show(); adapter.clear(); adapter.addAll(requestList); adapter.notifyDataSetChanged(); } } }); } }); resultList.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) { Intent intent = new Intent(SearchByLocationActivity.this, RequestDetailAndAcceptActivity.class); intent.putExtra("request_Detail",i); startActivity(intent); } }); // Set a listener for info window events. //mMap.setOnInfoWindowClickListener(this); } /* @Override public void onInfoWindowClick(Marker marker) { Toast.makeText(this, "Info window clicked", Toast.LENGTH_SHORT).show(); Geocoder geocoder = new Geocoder(SearchByLocationActivity.this); try { List<Address> addresses = geocoder.getFromLocation(LatLng.latitude, LatLng.longitude, 5); } catch (IOException e) { e.printStackTrace(); } }*/ /** * When the map is showed, as we connect to the google map API server * we get our current location * @param bundle */ @Override public void onConnected(Bundle bundle) { if (ContextCompat.checkSelfPermission( this, android.Manifest.permission.ACCESS_COARSE_LOCATION ) != PackageManager.PERMISSION_GRANTED ) { ActivityCompat.requestPermissions( this, new String[] {android.Manifest.permission.ACCESS_COARSE_LOCATION }, MY_PERMISSION_ACCESS_COURSE_LOCATION ); } mLastLocation = LocationServices.FusedLocationApi.getLastLocation( mGoogleApiClient); if (mLastLocation != null) { lat = mLastLocation.getLatitude(); lng = mLastLocation.getLongitude(); LatLng loc = new LatLng(53.523219, -113.526354); OriginMarker = mMap.addMarker(new MarkerOptions().position(loc).title("My Current Location")); mMap.moveCamera(CameraUpdateFactory.newLatLngZoom(loc,13)); } } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_driver_main, menu); return true; } /** * deal with the hamburger button * @param item * @return */ @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_settings) { Intent intent = new Intent(this, EditProfileActivity.class); startActivity(intent); return true; } if (id == R.id.view_request) { Intent intent = new Intent(this, RequestsListActivity.class); startActivity(intent); return true; } return super.onOptionsItemSelected(item); } @Override public void onConnectionSuspended(int i) { } @Override public void onConnectionFailed(ConnectionResult connectionResult) { } @Override protected void onStart() { super.onStart(); mGoogleApiClient.connect(); // Search Requests by initiator ElasticsearchRequestController.GetRequestByInitiatorTask getRequestByInitiatorTask = new ElasticsearchRequestController.GetRequestByInitiatorTask(); getRequestByInitiatorTask.execute(RuntimeAccount.getInstance().myAccount); try { RuntimeRequestList.getInstance().myRequestList = getRequestByInitiatorTask.get(); } catch (Exception e) { Log.i("Error", "Failed to get the Requests out of the async object."); Toast.makeText(SearchByLocationActivity.this, "Unable to find Requests by elastic search", Toast.LENGTH_SHORT).show(); } } @Override public void onPause() { super.onPause(); if (mGoogleApiClient.isConnected()) { mGoogleApiClient.disconnect(); } } }
/** * This class is generated by jOOQ */ package io.cattle.platform.core.model; /** * This class is generated by jOOQ. */ @javax.annotation.Generated(value = { "http://www.jooq.org", "3.3.0" }, comments = "This class is generated by jOOQ") @java.lang.SuppressWarnings({ "all", "unchecked", "rawtypes" }) @javax.persistence.Entity @javax.persistence.Table(name = "volume", schema = "cattle") public interface Volume extends java.io.Serializable { /** * Setter for <code>cattle.volume.id</code>. */ public void setId(java.lang.Long value); /** * Getter for <code>cattle.volume.id</code>. */ @javax.persistence.Id @javax.persistence.Column(name = "id", unique = true, nullable = false, precision = 19) public java.lang.Long getId(); /** * Setter for <code>cattle.volume.name</code>. */ public void setName(java.lang.String value); /** * Getter for <code>cattle.volume.name</code>. */ @javax.persistence.Column(name = "name", length = 255) public java.lang.String getName(); /** * Setter for <code>cattle.volume.account_id</code>. */ public void setAccountId(java.lang.Long value); /** * Getter for <code>cattle.volume.account_id</code>. */ @javax.persistence.Column(name = "account_id", precision = 19) public java.lang.Long getAccountId(); /** * Setter for <code>cattle.volume.kind</code>. */ public void setKind(java.lang.String value); /** * Getter for <code>cattle.volume.kind</code>. */ @javax.persistence.Column(name = "kind", nullable = false, length = 255) public java.lang.String getKind(); /** * Setter for <code>cattle.volume.uuid</code>. */ public void setUuid(java.lang.String value); /** * Getter for <code>cattle.volume.uuid</code>. */ @javax.persistence.Column(name = "uuid", unique = true, nullable = false, length = 128) public java.lang.String getUuid(); /** * Setter for <code>cattle.volume.description</code>. */ public void setDescription(java.lang.String value); /** * Getter for <code>cattle.volume.description</code>. */ @javax.persistence.Column(name = "description", length = 1024) public java.lang.String getDescription(); /** * Setter for <code>cattle.volume.state</code>. */ public void setState(java.lang.String value); /** * Getter for <code>cattle.volume.state</code>. */ @javax.persistence.Column(name = "state", nullable = false, length = 128) public java.lang.String getState(); /** * Setter for <code>cattle.volume.created</code>. */ public void setCreated(java.util.Date value); /** * Getter for <code>cattle.volume.created</code>. */ @javax.persistence.Column(name = "created") public java.util.Date getCreated(); /** * Setter for <code>cattle.volume.removed</code>. */ public void setRemoved(java.util.Date value); /** * Getter for <code>cattle.volume.removed</code>. */ @javax.persistence.Column(name = "removed") public java.util.Date getRemoved(); /** * Setter for <code>cattle.volume.remove_time</code>. */ public void setRemoveTime(java.util.Date value); /** * Getter for <code>cattle.volume.remove_time</code>. */ @javax.persistence.Column(name = "remove_time") public java.util.Date getRemoveTime(); /** * Setter for <code>cattle.volume.data</code>. */ public void setData(java.util.Map<String,Object> value); /** * Getter for <code>cattle.volume.data</code>. */ @javax.persistence.Column(name = "data", length = 16777215) public java.util.Map<String,Object> getData(); /** * Setter for <code>cattle.volume.physical_size_mb</code>. */ public void setPhysicalSizeMb(java.lang.Long value); /** * Getter for <code>cattle.volume.physical_size_mb</code>. */ @javax.persistence.Column(name = "physical_size_mb", precision = 19) public java.lang.Long getPhysicalSizeMb(); /** * Setter for <code>cattle.volume.virtual_size_mb</code>. */ public void setVirtualSizeMb(java.lang.Long value); /** * Getter for <code>cattle.volume.virtual_size_mb</code>. */ @javax.persistence.Column(name = "virtual_size_mb", precision = 19) public java.lang.Long getVirtualSizeMb(); /** * Setter for <code>cattle.volume.device_number</code>. */ public void setDeviceNumber(java.lang.Integer value); /** * Getter for <code>cattle.volume.device_number</code>. */ @javax.persistence.Column(name = "device_number", precision = 10) public java.lang.Integer getDeviceNumber(); /** * Setter for <code>cattle.volume.format</code>. */ public void setFormat(java.lang.String value); /** * Getter for <code>cattle.volume.format</code>. */ @javax.persistence.Column(name = "format", length = 255) public java.lang.String getFormat(); /** * Setter for <code>cattle.volume.allocation_state</code>. */ public void setAllocationState(java.lang.String value); /** * Getter for <code>cattle.volume.allocation_state</code>. */ @javax.persistence.Column(name = "allocation_state", length = 255) public java.lang.String getAllocationState(); /** * Setter for <code>cattle.volume.attached_state</code>. */ public void setAttachedState(java.lang.String value); /** * Getter for <code>cattle.volume.attached_state</code>. */ @javax.persistence.Column(name = "attached_state", length = 255) public java.lang.String getAttachedState(); /** * Setter for <code>cattle.volume.instance_id</code>. */ public void setInstanceId(java.lang.Long value); /** * Getter for <code>cattle.volume.instance_id</code>. */ @javax.persistence.Column(name = "instance_id", precision = 19) public java.lang.Long getInstanceId(); /** * Setter for <code>cattle.volume.image_id</code>. */ public void setImageId(java.lang.Long value); /** * Getter for <code>cattle.volume.image_id</code>. */ @javax.persistence.Column(name = "image_id", precision = 19) public java.lang.Long getImageId(); /** * Setter for <code>cattle.volume.offering_id</code>. */ public void setOfferingId(java.lang.Long value); /** * Getter for <code>cattle.volume.offering_id</code>. */ @javax.persistence.Column(name = "offering_id", precision = 19) public java.lang.Long getOfferingId(); /** * Setter for <code>cattle.volume.zone_id</code>. */ public void setZoneId(java.lang.Long value); /** * Getter for <code>cattle.volume.zone_id</code>. */ @javax.persistence.Column(name = "zone_id", precision = 19) public java.lang.Long getZoneId(); /** * Setter for <code>cattle.volume.uri</code>. */ public void setUri(java.lang.String value); /** * Getter for <code>cattle.volume.uri</code>. */ @javax.persistence.Column(name = "uri", length = 512) public java.lang.String getUri(); // ------------------------------------------------------------------------- // FROM and INTO // ------------------------------------------------------------------------- /** * Load data from another generated Record/POJO implementing the common interface Volume */ public void from(io.cattle.platform.core.model.Volume from); /** * Copy data into another generated Record/POJO implementing the common interface Volume */ public <E extends io.cattle.platform.core.model.Volume> E into(E into); }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.application.options.pathMacros; import com.intellij.application.options.PathMacrosCollector; import com.intellij.openapi.application.ApplicationBundle; import com.intellij.openapi.application.PathMacros; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Couple; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.JBColor; import com.intellij.util.ui.Table; import javax.swing.*; import javax.swing.table.AbstractTableModel; import javax.swing.table.DefaultTableCellRenderer; import javax.swing.table.TableColumn; import java.awt.*; import java.io.File; import java.util.*; import java.util.List; /** * @author dsl */ public class PathMacroTable extends Table { private static final Logger LOG = Logger.getInstance("#com.intellij.application.options.pathMacros.PathMacroTable"); private final PathMacros myPathMacros = PathMacros.getInstance(); private final MyTableModel myTableModel = new MyTableModel(); private static final int NAME_COLUMN = 0; private static final int VALUE_COLUMN = 1; private final List<Couple<String>> myMacros = new ArrayList<>(); private static final Comparator<Couple<String>> MACRO_COMPARATOR = (pair, pair1) -> pair.getFirst().compareTo(pair1.getFirst()); private final Collection<String> myUndefinedMacroNames; public PathMacroTable() { this(null); } public PathMacroTable(final Collection<String> undefinedMacroNames) { myUndefinedMacroNames = undefinedMacroNames; setModel(myTableModel); TableColumn column = getColumnModel().getColumn(NAME_COLUMN); column.setCellRenderer(new DefaultTableCellRenderer() { public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { final Component component = super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column); final String macroValue = getMacroValueAt(row); component.setForeground(macroValue.length() == 0 ? JBColor.RED : isSelected ? table.getSelectionForeground() : table.getForeground()); return component; } }); setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION); //obtainData(); getEmptyText().setText(ApplicationBundle.message("text.no.path.variables")); } public String getMacroValueAt(int row) { return (String) getValueAt(row, VALUE_COLUMN); } public void addMacro() { final String title = ApplicationBundle.message("title.add.variable"); final PathMacroEditor macroEditor = new PathMacroEditor(title, "", "", new AddValidator(title)); if (macroEditor.showAndGet()) { final String name = macroEditor.getName(); myMacros.add(Couple.of(name, macroEditor.getValue())); Collections.sort(myMacros, MACRO_COMPARATOR); final int index = indexOfMacroWithName(name); LOG.assertTrue(index >= 0); myTableModel.fireTableDataChanged(); setRowSelectionInterval(index, index); } } private boolean isValidRow(int selectedRow) { return selectedRow >= 0 && selectedRow < myMacros.size(); } public void removeSelectedMacros() { final int[] selectedRows = getSelectedRows(); if(selectedRows.length == 0) return; Arrays.sort(selectedRows); final int originalRow = selectedRows[0]; for (int i = selectedRows.length - 1; i >= 0; i--) { final int selectedRow = selectedRows[i]; if (isValidRow(selectedRow)) { myMacros.remove(selectedRow); } } myTableModel.fireTableDataChanged(); if (originalRow < getRowCount()) { setRowSelectionInterval(originalRow, originalRow); } else if (getRowCount() > 0) { final int index = getRowCount() - 1; setRowSelectionInterval(index, index); } } public void commit() { myPathMacros.removeAllMacros(); for (Couple<String> pair : myMacros) { final String value = pair.getSecond(); if (value != null && value.trim().length() > 0) { String path = value.replace(File.separatorChar, '/'); path = StringUtil.trimEnd(path, "/"); myPathMacros.setMacro(pair.getFirst(), path); } } } public void reset() { obtainData(); } private boolean hasMacroWithName(String name) { if (PathMacros.getInstance().getSystemMacroNames().contains(name)) { return true; } for (Couple<String> macro : myMacros) { if (name.equals(macro.getFirst())) { return true; } } return false; } private int indexOfMacroWithName(String name) { for (int i = 0; i < myMacros.size(); i++) { final Couple<String> pair = myMacros.get(i); if (name.equals(pair.getFirst())) { return i; } } return -1; } private void obtainData() { obtainMacroPairs(myMacros); myTableModel.fireTableDataChanged(); } private void obtainMacroPairs(final List<Couple<String>> macros) { macros.clear(); final Set<String> macroNames = myPathMacros.getUserMacroNames(); for (String name : macroNames) { macros.add(Couple.of(name, myPathMacros.getValue(name).replace('/', File.separatorChar))); } if (myUndefinedMacroNames != null) { for (String undefinedMacroName : myUndefinedMacroNames) { macros.add(Couple.of(undefinedMacroName, "")); } } Collections.sort(macros, MACRO_COMPARATOR); } public void editMacro() { if (getSelectedRowCount() != 1) { return; } final int selectedRow = getSelectedRow(); final Couple<String> pair = myMacros.get(selectedRow); final String title = ApplicationBundle.message("title.edit.variable"); final String macroName = pair.getFirst(); final PathMacroEditor macroEditor = new PathMacroEditor(title, macroName, pair.getSecond(), new EditValidator()); if (macroEditor.showAndGet()) { myMacros.remove(selectedRow); myMacros.add(Couple.of(macroEditor.getName(), macroEditor.getValue())); Collections.sort(myMacros, MACRO_COMPARATOR); myTableModel.fireTableDataChanged(); } } public boolean isModified() { final ArrayList<Couple<String>> macros = new ArrayList<>(); obtainMacroPairs(macros); return !macros.equals(myMacros); } private class MyTableModel extends AbstractTableModel{ public int getColumnCount() { return 2; } public int getRowCount() { return myMacros.size(); } public Class getColumnClass(int columnIndex) { return String.class; } public Object getValueAt(int rowIndex, int columnIndex) { final Couple<String> pair = myMacros.get(rowIndex); switch (columnIndex) { case NAME_COLUMN: return pair.getFirst(); case VALUE_COLUMN: return pair.getSecond(); } LOG.error("Wrong indices"); return null; } public void setValueAt(Object aValue, int rowIndex, int columnIndex) { } public String getColumnName(int columnIndex) { switch (columnIndex) { case NAME_COLUMN: return ApplicationBundle.message("column.name"); case VALUE_COLUMN: return ApplicationBundle.message("column.value"); } return null; } public boolean isCellEditable(int rowIndex, int columnIndex) { return false; } } private class AddValidator implements PathMacroEditor.Validator { private final String myTitle; public AddValidator(String title) { myTitle = title; } public boolean checkName(String name) { if (name.length() == 0) return false; return PathMacrosCollector.MACRO_PATTERN.matcher("$" + name + "$").matches(); } public boolean isOK(String name, String value) { if(name.length() == 0) return false; if (hasMacroWithName(name)) { Messages.showErrorDialog(PathMacroTable.this, ApplicationBundle.message("error.variable.already.exists", name), myTitle); return false; } return true; } } private static class EditValidator implements PathMacroEditor.Validator { public boolean checkName(String name) { if (name.isEmpty() || PathMacros.getInstance().getSystemMacroNames().contains(name)) { return false; } return PathMacrosCollector.MACRO_PATTERN.matcher("$" + name + "$").matches(); } public boolean isOK(String name, String value) { return checkName(name); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.operations; import org.apache.flink.sql.parser.ddl.SqlCreateTable; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.api.SqlDialect; import org.apache.flink.table.api.TableColumn; import org.apache.flink.table.api.TableConfig; import org.apache.flink.table.api.TableSchema; import org.apache.flink.table.catalog.Catalog; import org.apache.flink.table.catalog.CatalogDatabaseImpl; import org.apache.flink.table.catalog.CatalogFunction; import org.apache.flink.table.catalog.CatalogFunctionImpl; import org.apache.flink.table.catalog.CatalogManager; import org.apache.flink.table.catalog.CatalogTable; import org.apache.flink.table.catalog.CatalogTableImpl; import org.apache.flink.table.catalog.FunctionCatalog; import org.apache.flink.table.catalog.GenericInMemoryCatalog; import org.apache.flink.table.catalog.ObjectIdentifier; import org.apache.flink.table.catalog.ObjectPath; import org.apache.flink.table.catalog.exceptions.DatabaseNotExistException; import org.apache.flink.table.catalog.exceptions.FunctionAlreadyExistException; import org.apache.flink.table.catalog.exceptions.TableAlreadyExistException; import org.apache.flink.table.catalog.exceptions.TableNotExistException; import org.apache.flink.table.module.ModuleManager; import org.apache.flink.table.operations.CatalogSinkModifyOperation; import org.apache.flink.table.operations.Operation; import org.apache.flink.table.operations.UseCatalogOperation; import org.apache.flink.table.operations.UseDatabaseOperation; import org.apache.flink.table.operations.ddl.AlterDatabaseOperation; import org.apache.flink.table.operations.ddl.AlterTablePropertiesOperation; import org.apache.flink.table.operations.ddl.AlterTableRenameOperation; import org.apache.flink.table.operations.ddl.CreateDatabaseOperation; import org.apache.flink.table.operations.ddl.CreateTableOperation; import org.apache.flink.table.operations.ddl.DropDatabaseOperation; import org.apache.flink.table.planner.calcite.CalciteParser; import org.apache.flink.table.planner.calcite.FlinkPlannerImpl; import org.apache.flink.table.planner.catalog.CatalogManagerCalciteSchema; import org.apache.flink.table.planner.delegation.PlannerContext; import org.apache.flink.table.planner.expressions.utils.Func0$; import org.apache.flink.table.planner.expressions.utils.Func1$; import org.apache.flink.table.planner.expressions.utils.Func8$; import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions; import org.apache.flink.table.types.DataType; import org.apache.flink.table.utils.CatalogManagerMocks; import org.apache.calcite.sql.SqlNode; import org.junit.After; import org.junit.Before; import org.junit.Test; import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.stream.Collectors; import static org.apache.calcite.jdbc.CalciteSchemaBuilder.asRootSchema; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; /** * Test cases for {@link SqlToOperationConverter}. */ public class SqlToOperationConverterTest { private final TableConfig tableConfig = new TableConfig(); private final Catalog catalog = new GenericInMemoryCatalog("MockCatalog", "default"); private final CatalogManager catalogManager = CatalogManagerMocks.preparedCatalogManager() .defaultCatalog("builtin", catalog) .build(); private final ModuleManager moduleManager = new ModuleManager(); private final FunctionCatalog functionCatalog = new FunctionCatalog( tableConfig, catalogManager, moduleManager); private final PlannerContext plannerContext = new PlannerContext(tableConfig, functionCatalog, catalogManager, asRootSchema(new CatalogManagerCalciteSchema(catalogManager, false)), new ArrayList<>()); @Before public void before() throws TableAlreadyExistException, DatabaseNotExistException { final ObjectPath path1 = new ObjectPath(catalogManager.getCurrentDatabase(), "t1"); final ObjectPath path2 = new ObjectPath(catalogManager.getCurrentDatabase(), "t2"); final TableSchema tableSchema = TableSchema.builder() .field("a", DataTypes.BIGINT()) .field("b", DataTypes.VARCHAR(Integer.MAX_VALUE)) .field("c", DataTypes.INT()) .field("d", DataTypes.VARCHAR(Integer.MAX_VALUE)) .build(); Map<String, String> properties = new HashMap<>(); properties.put("connector", "COLLECTION"); final CatalogTable catalogTable = new CatalogTableImpl(tableSchema, properties, ""); catalog.createTable(path1, catalogTable, true); catalog.createTable(path2, catalogTable, true); } @After public void after() throws TableNotExistException { final ObjectPath path1 = new ObjectPath(catalogManager.getCurrentDatabase(), "t1"); final ObjectPath path2 = new ObjectPath(catalogManager.getCurrentDatabase(), "t2"); catalog.dropTable(path1, true); catalog.dropTable(path2, true); } @Test public void testUseCatalog() { final String sql = "USE CATALOG cat1"; Operation operation = parse(sql, SqlDialect.DEFAULT); assert operation instanceof UseCatalogOperation; assertEquals("cat1", ((UseCatalogOperation) operation).getCatalogName()); } @Test public void testUseDatabase() { final String sql1 = "USE db1"; Operation operation1 = parse(sql1, SqlDialect.DEFAULT); assert operation1 instanceof UseDatabaseOperation; assertEquals("builtin", ((UseDatabaseOperation) operation1).getCatalogName()); assertEquals("db1", ((UseDatabaseOperation) operation1).getDatabaseName()); final String sql2 = "USE cat1.db1"; Operation operation2 = parse(sql2, SqlDialect.DEFAULT); assert operation2 instanceof UseDatabaseOperation; assertEquals("cat1", ((UseDatabaseOperation) operation2).getCatalogName()); assertEquals("db1", ((UseDatabaseOperation) operation2).getDatabaseName()); } @Test(expected = SqlConversionException.class) public void testUseDatabaseWithException() { final String sql = "USE cat1.db1.tbl1"; Operation operation = parse(sql, SqlDialect.DEFAULT); } @Test public void testCreateDatabase() { final String[] createDatabaseSqls = new String[] { "create database db1", "create database if not exists cat1.db1", "create database cat1.db1 comment 'db1_comment'", "create database cat1.db1 comment 'db1_comment' with ('k1' = 'v1', 'K2' = 'V2')" }; final String[] expectedCatalogs = new String[] {"builtin", "cat1", "cat1", "cat1"}; final String expectedDatabase = "db1"; final String[] expectedComments = new String[] {null, null, "db1_comment", "db1_comment"}; final boolean[] expectedIgnoreIfExists = new boolean[] {false, true, false, false}; Map<String, String> properties = new HashMap<>(); properties.put("k1", "v1"); properties.put("K2", "V2"); final Map[] expectedProperties = new Map[] { new HashMap<String, String>(), new HashMap<String, String>(), new HashMap<String, String>(), new HashMap(properties)}; for (int i = 0; i < createDatabaseSqls.length; i++) { Operation operation = parse(createDatabaseSqls[i], SqlDialect.DEFAULT); assert operation instanceof CreateDatabaseOperation; final CreateDatabaseOperation createDatabaseOperation = (CreateDatabaseOperation) operation; assertEquals(expectedCatalogs[i], createDatabaseOperation.getCatalogName()); assertEquals(expectedDatabase, createDatabaseOperation.getDatabaseName()); assertEquals(expectedComments[i], createDatabaseOperation.getCatalogDatabase().getComment()); assertEquals(expectedIgnoreIfExists[i], createDatabaseOperation.isIgnoreIfExists()); assertEquals(expectedProperties[i], createDatabaseOperation.getCatalogDatabase().getProperties()); } } @Test public void testDropDatabase() { final String[] dropDatabaseSqls = new String[] { "drop database db1", "drop database if exists db1", "drop database if exists cat1.db1 CASCADE", "drop database if exists cat1.db1 RESTRICT" }; final String[] expectedCatalogs = new String[] {"builtin", "builtin", "cat1", "cat1"}; final String expectedDatabase = "db1"; final boolean[] expectedIfExists = new boolean[] {false, true, true, true}; final boolean[] expectedIsCascades = new boolean[] {false, false, true, false}; for (int i = 0; i < dropDatabaseSqls.length; i++) { Operation operation = parse(dropDatabaseSqls[i], SqlDialect.DEFAULT); assert operation instanceof DropDatabaseOperation; final DropDatabaseOperation dropDatabaseOperation = (DropDatabaseOperation) operation; assertEquals(expectedCatalogs[i], dropDatabaseOperation.getCatalogName()); assertEquals(expectedDatabase, dropDatabaseOperation.getDatabaseName()); assertEquals(expectedIfExists[i], dropDatabaseOperation.isIfExists()); assertEquals(expectedIsCascades[i], dropDatabaseOperation.isCascade()); } } @Test public void testAlterDatabase() throws Exception { catalogManager.registerCatalog("cat1", new GenericInMemoryCatalog("default", "default")); catalogManager.getCatalog("cat1").get() .createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), "db1_comment"), true); final String sql = "alter database cat1.db1 set ('k1'='v1', 'K2'='V2')"; Operation operation = parse(sql, SqlDialect.DEFAULT); assert operation instanceof AlterDatabaseOperation; Map<String, String> properties = new HashMap<>(); properties.put("k1", "v1"); properties.put("K2", "V2"); assertEquals("db1", ((AlterDatabaseOperation) operation).getDatabaseName()); assertEquals("cat1", ((AlterDatabaseOperation) operation).getCatalogName()); assertEquals("db1_comment", ((AlterDatabaseOperation) operation).getCatalogDatabase().getComment()); assertEquals(properties, ((AlterDatabaseOperation) operation).getCatalogDatabase().getProperties()); } @Test public void testCreateTable() { final String sql = "CREATE TABLE tbl1 (\n" + " a bigint,\n" + " b varchar, \n" + " c int, \n" + " d varchar" + ")\n" + " PARTITIONED BY (a, d)\n" + " with (\n" + " 'connector' = 'kafka', \n" + " 'kafka.topic' = 'log.test'\n" + ")\n"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.HIVE); final CalciteParser parser = getParserBySqlDialect(SqlDialect.HIVE); Operation operation = parse(sql, planner, parser); assert operation instanceof CreateTableOperation; CreateTableOperation op = (CreateTableOperation) operation; CatalogTable catalogTable = op.getCatalogTable(); assertEquals(Arrays.asList("a", "d"), catalogTable.getPartitionKeys()); assertArrayEquals(catalogTable.getSchema().getFieldNames(), new String[] {"a", "b", "c", "d"}); assertArrayEquals(catalogTable.getSchema().getFieldDataTypes(), new DataType[]{ DataTypes.BIGINT(), DataTypes.VARCHAR(Integer.MAX_VALUE), DataTypes.INT(), DataTypes.VARCHAR(Integer.MAX_VALUE)}); } @Test(expected = SqlConversionException.class) public void testCreateTableWithPkUniqueKeys() { FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.HIVE); final CalciteParser parser = getParserBySqlDialect(SqlDialect.HIVE); final String sql = "CREATE TABLE tbl1 (\n" + " a bigint,\n" + " b varchar, \n" + " c int, \n" + " d varchar, \n" + " primary key(a), \n" + " unique(a, b) \n" + ")\n" + " PARTITIONED BY (a, d)\n" + " with (\n" + " 'connector' = 'kafka', \n" + " 'kafka.topic' = 'log.test'\n" + ")\n"; parse(sql, planner, parser); } @Test public void testCreateTableWithMinusInOptionKey() { final String sql = "create table source_table(\n" + " a int,\n" + " b bigint,\n" + " c varchar\n" + ") with (\n" + " 'a-B-c-d124' = 'Ab',\n" + " 'a.b-c-d.e-f.g' = 'ada',\n" + " 'a.b-c-d.e-f1231.g' = 'ada',\n" + " 'a.b-c-d.*' = 'adad')\n"; final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); SqlNode node = parser.parse(sql); assert node instanceof SqlCreateTable; Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get(); assert operation instanceof CreateTableOperation; CreateTableOperation op = (CreateTableOperation) operation; CatalogTable catalogTable = op.getCatalogTable(); Map<String, String> properties = catalogTable.getProperties() .entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); Map<String, String> sortedProperties = new TreeMap<>(properties); final String expected = "{a-B-c-d124=Ab, " + "a.b-c-d.*=adad, " + "a.b-c-d.e-f.g=ada, " + "a.b-c-d.e-f1231.g=ada}"; assertEquals(expected, sortedProperties.toString()); } @Test public void testCreateTableWithWatermark() throws FunctionAlreadyExistException, DatabaseNotExistException { CatalogFunction cf = new CatalogFunctionImpl( JavaUserDefinedScalarFunctions.JavaFunc5.class.getName()); catalog.createFunction(ObjectPath.fromString("default.myfunc"), cf, true); final String sql = "create table source_table(\n" + " a int,\n" + " b bigint,\n" + " c timestamp(3),\n" + " watermark for `c` as myfunc(c, 1) - interval '5' second\n" + ") with (\n" + " 'connector.type' = 'kafka')\n"; final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); SqlNode node = parser.parse(sql); assert node instanceof SqlCreateTable; Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get(); assert operation instanceof CreateTableOperation; CreateTableOperation op = (CreateTableOperation) operation; CatalogTable catalogTable = op.getCatalogTable(); Map<String, String> properties = catalogTable.toProperties(); Map<String, String> expected = new HashMap<>(); expected.put("schema.0.name", "a"); expected.put("schema.0.data-type", "INT"); expected.put("schema.1.name", "b"); expected.put("schema.1.data-type", "BIGINT"); expected.put("schema.2.name", "c"); expected.put("schema.2.data-type", "TIMESTAMP(3)"); expected.put("schema.watermark.0.rowtime", "c"); expected.put( "schema.watermark.0.strategy.expr", "`builtin`.`default`.`myfunc`(`c`, 1) - INTERVAL '5' SECOND"); expected.put("schema.watermark.0.strategy.data-type", "TIMESTAMP(3)"); expected.put("connector.type", "kafka"); assertEquals(expected, properties); } @Test public void testSqlInsertWithStaticPartition() { final String sql = "insert into t1 partition(a=1) select b, c, d from t2"; FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, parser); assert operation instanceof CatalogSinkModifyOperation; CatalogSinkModifyOperation sinkModifyOperation = (CatalogSinkModifyOperation) operation; final Map<String, String> expectedStaticPartitions = new HashMap<>(); expectedStaticPartitions.put("a", "1"); assertEquals(expectedStaticPartitions, sinkModifyOperation.getStaticPartitions()); } @Test // TODO: tweak the tests when FLINK-13604 is fixed. public void testCreateTableWithFullDataTypes() { final List<TestItem> testItems = Arrays.asList( createTestItem("CHAR", DataTypes.CHAR(1)), createTestItem("CHAR NOT NULL", DataTypes.CHAR(1).notNull()), createTestItem("CHAR NULL", DataTypes.CHAR(1)), createTestItem("CHAR(33)", DataTypes.CHAR(33)), createTestItem("VARCHAR", DataTypes.STRING()), createTestItem("VARCHAR(33)", DataTypes.VARCHAR(33)), createTestItem("STRING", DataTypes.STRING()), createTestItem("BOOLEAN", DataTypes.BOOLEAN()), createTestItem("BINARY", DataTypes.BINARY(1)), createTestItem("BINARY(33)", DataTypes.BINARY(33)), createTestItem("VARBINARY", DataTypes.BYTES()), createTestItem("VARBINARY(33)", DataTypes.VARBINARY(33)), createTestItem("BYTES", DataTypes.BYTES()), createTestItem("DECIMAL", DataTypes.DECIMAL(10, 0)), createTestItem("DEC", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DEC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("NUMERIC(10)", DataTypes.DECIMAL(10, 0)), createTestItem("DECIMAL(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("DEC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("NUMERIC(10, 3)", DataTypes.DECIMAL(10, 3)), createTestItem("TINYINT", DataTypes.TINYINT()), createTestItem("SMALLINT", DataTypes.SMALLINT()), createTestItem("INTEGER", DataTypes.INT()), createTestItem("INT", DataTypes.INT()), createTestItem("BIGINT", DataTypes.BIGINT()), createTestItem("FLOAT", DataTypes.FLOAT()), createTestItem("DOUBLE", DataTypes.DOUBLE()), createTestItem("DOUBLE PRECISION", DataTypes.DOUBLE()), createTestItem("DATE", DataTypes.DATE()), createTestItem("TIME", DataTypes.TIME()), createTestItem("TIME WITHOUT TIME ZONE", DataTypes.TIME()), // Expect to be TIME(3). createTestItem("TIME(3)", DataTypes.TIME()), // Expect to be TIME(3). createTestItem("TIME(3) WITHOUT TIME ZONE", DataTypes.TIME()), createTestItem("TIMESTAMP", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP WITHOUT TIME ZONE", DataTypes.TIMESTAMP(6)), createTestItem("TIMESTAMP(3)", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP(3) WITHOUT TIME ZONE", DataTypes.TIMESTAMP(3)), createTestItem("TIMESTAMP WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(6)), createTestItem("TIMESTAMP(3) WITH LOCAL TIME ZONE", DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3)), createTestItem("ARRAY<TIMESTAMP(3) WITH LOCAL TIME ZONE>", DataTypes.ARRAY(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3))), createTestItem("ARRAY<INT NOT NULL>", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY", DataTypes.ARRAY(DataTypes.INT())), createTestItem("INT NOT NULL ARRAY", DataTypes.ARRAY(DataTypes.INT().notNull())), createTestItem("INT ARRAY NOT NULL", DataTypes.ARRAY(DataTypes.INT()).notNull()), createTestItem("MULTISET<INT NOT NULL>", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET", DataTypes.MULTISET(DataTypes.INT())), createTestItem("INT NOT NULL MULTISET", DataTypes.MULTISET(DataTypes.INT().notNull())), createTestItem("INT MULTISET NOT NULL", DataTypes.MULTISET(DataTypes.INT()).notNull()), createTestItem("MAP<BIGINT, BOOLEAN>", DataTypes.MAP(DataTypes.BIGINT(), DataTypes.BOOLEAN())), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>. createTestItem("ROW<f0 INT NOT NULL, f1 BOOLEAN>", DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), // Expect to be ROW<`f0` INT NOT NULL, `f1` BOOLEAN>. createTestItem("ROW(f0 INT NOT NULL, f1 BOOLEAN)", DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ROW<`f0` INT>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW(`f0` INT)", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.INT()))), createTestItem("ROW<>", DataTypes.ROW()), createTestItem("ROW()", DataTypes.ROW()), // Expect to be ROW<`f0` INT NOT NULL '...', `f1` BOOLEAN '...'>. createTestItem("ROW<f0 INT NOT NULL 'This is a comment.'," + " f1 BOOLEAN 'This as well.'>", DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN()))), createTestItem("ARRAY<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.ARRAY( DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 INT, f1 BOOLEAN> MULTISET", DataTypes.MULTISET( DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("MULTISET<ROW<f0 INT, f1 BOOLEAN>>", DataTypes.MULTISET( DataTypes.ROW( DataTypes.FIELD("f0", DataTypes.INT()), DataTypes.FIELD("f1", DataTypes.BOOLEAN())))), createTestItem("ROW<f0 Row<f00 INT, f01 BOOLEAN>, " + "f1 INT ARRAY, " + "f2 BOOLEAN MULTISET>", DataTypes.ROW(DataTypes.FIELD("f0", DataTypes.ROW( DataTypes.FIELD("f00", DataTypes.INT()), DataTypes.FIELD("f01", DataTypes.BOOLEAN()))), DataTypes.FIELD("f1", DataTypes.ARRAY(DataTypes.INT())), DataTypes.FIELD("f2", DataTypes.MULTISET(DataTypes.BOOLEAN())))) ); StringBuilder buffer = new StringBuilder("create table t1(\n"); for (int i = 0; i < testItems.size(); i++) { buffer.append("f") .append(i) .append(" ") .append(testItems.get(i).testExpr); if (i == testItems.size() - 1) { buffer.append(")"); } else { buffer.append(",\n"); } } final String sql = buffer.toString(); final FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); final CalciteParser parser = getParserBySqlDialect(SqlDialect.DEFAULT); SqlNode node = parser.parse(sql); assert node instanceof SqlCreateTable; Operation operation = SqlToOperationConverter.convert(planner, catalogManager, node).get(); TableSchema schema = ((CreateTableOperation) operation).getCatalogTable().getSchema(); Object[] expectedDataTypes = testItems.stream().map(item -> item.expectedType).toArray(); assertArrayEquals(expectedDataTypes, schema.getFieldDataTypes()); } @Test public void testCreateTableWithComputedColumn() { final String sql = "CREATE TABLE tbl1 (\n" + " a int,\n" + " b varchar, \n" + " c as a - 1, \n" + " d as b || '$$', \n" + " e as my_udf1(a)," + " f as `default`.my_udf2(a) + 1," + " g as builtin.`default`.my_udf3(a) || '##'\n" + ")\n" + " with (\n" + " 'connector' = 'kafka', \n" + " 'kafka.topic' = 'log.test'\n" + ")\n"; functionCatalog.registerTempCatalogScalarFunction( ObjectIdentifier.of("builtin", "default", "my_udf1"), Func0$.MODULE$); functionCatalog.registerTempCatalogScalarFunction( ObjectIdentifier.of("builtin", "default", "my_udf2"), Func1$.MODULE$); functionCatalog.registerTempCatalogScalarFunction( ObjectIdentifier.of("builtin", "default", "my_udf3"), Func8$.MODULE$); FlinkPlannerImpl planner = getPlannerBySqlDialect(SqlDialect.DEFAULT); Operation operation = parse(sql, planner, getParserBySqlDialect(SqlDialect.DEFAULT)); assert operation instanceof CreateTableOperation; CreateTableOperation op = (CreateTableOperation) operation; CatalogTable catalogTable = op.getCatalogTable(); assertArrayEquals( new String[] {"a", "b", "c", "d", "e", "f", "g"}, catalogTable.getSchema().getFieldNames()); assertArrayEquals( new DataType[]{ DataTypes.INT(), DataTypes.STRING(), DataTypes.INT(), DataTypes.STRING(), DataTypes.INT().notNull(), DataTypes.INT(), DataTypes.STRING()}, catalogTable.getSchema().getFieldDataTypes()); String[] columnExpressions = catalogTable.getSchema().getTableColumns().stream() .filter(TableColumn::isGenerated) .map(c -> c.getExpr().orElse(null)) .toArray(String[]::new); String[] expected = new String[] { "`a` - 1", "`b` || '$$'", "`builtin`.`default`.`my_udf1`(`a`)", "`builtin`.`default`.`my_udf2`(`a`) + 1", "`builtin`.`default`.`my_udf3`(`a`) || '##'" }; assertArrayEquals( expected, columnExpressions); } @Test public void testAlterTable() throws Exception { Catalog catalog = new GenericInMemoryCatalog("default", "default"); catalogManager.registerCatalog("cat1", catalog); catalog.createDatabase("db1", new CatalogDatabaseImpl(new HashMap<>(), null), true); CatalogTable catalogTable = new CatalogTableImpl( TableSchema.builder().field("a", DataTypes.STRING()).build(), new HashMap<>(), "tb1"); catalogManager.setCurrentCatalog("cat1"); catalogManager.setCurrentDatabase("db1"); catalog.createTable(new ObjectPath("db1", "tb1"), catalogTable, true); final String[] renameTableSqls = new String[] { "alter table cat1.db1.tb1 rename to tb2", "alter table db1.tb1 rename to tb2", "alter table tb1 rename to cat1.db1.tb2", }; final ObjectIdentifier expectedIdentifier = ObjectIdentifier.of("cat1", "db1", "tb1"); final ObjectIdentifier expectedNewIdentifier = ObjectIdentifier.of("cat1", "db1", "tb2"); //test rename table converter for (int i = 0; i < renameTableSqls.length; i++) { Operation operation = parse(renameTableSqls[i], SqlDialect.DEFAULT); assert operation instanceof AlterTableRenameOperation; final AlterTableRenameOperation alterTableRenameOperation = (AlterTableRenameOperation) operation; assertEquals(expectedIdentifier, alterTableRenameOperation.getTableIdentifier()); assertEquals(expectedNewIdentifier, alterTableRenameOperation.getNewTableIdentifier()); } // test alter table properties Operation operation = parse("alter table cat1.db1.tb1 set ('k1' = 'v1', 'K2' = 'V2')", SqlDialect.DEFAULT); assert operation instanceof AlterTablePropertiesOperation; final AlterTablePropertiesOperation alterTablePropertiesOperation = (AlterTablePropertiesOperation) operation; assertEquals(expectedIdentifier, alterTablePropertiesOperation.getTableIdentifier()); assertEquals(2, alterTablePropertiesOperation.getCatalogTable().getProperties().size()); Map<String, String> properties = new HashMap<>(); properties.put("k1", "v1"); properties.put("K2", "V2"); assertEquals(properties, alterTablePropertiesOperation.getCatalogTable().getProperties()); } //~ Tool Methods ---------------------------------------------------------- private static TestItem createTestItem(Object... args) { assert args.length == 2; final String testExpr = (String) args[0]; TestItem testItem = TestItem.fromTestExpr(testExpr); if (args[1] instanceof String) { testItem.withExpectedError((String) args[1]); } else { testItem.withExpectedType(args[1]); } return testItem; } private Operation parse(String sql, FlinkPlannerImpl planner, CalciteParser parser) { SqlNode node = parser.parse(sql); return SqlToOperationConverter.convert(planner, catalogManager, node).get(); } private Operation parse(String sql, SqlDialect sqlDialect) { FlinkPlannerImpl planner = getPlannerBySqlDialect(sqlDialect); final CalciteParser parser = getParserBySqlDialect(sqlDialect); SqlNode node = parser.parse(sql); return SqlToOperationConverter.convert(planner, catalogManager, node).get(); } private FlinkPlannerImpl getPlannerBySqlDialect(SqlDialect sqlDialect) { tableConfig.setSqlDialect(sqlDialect); return plannerContext.createFlinkPlanner(catalogManager.getCurrentCatalog(), catalogManager.getCurrentDatabase()); } private CalciteParser getParserBySqlDialect(SqlDialect sqlDialect) { tableConfig.setSqlDialect(sqlDialect); return plannerContext.createCalciteParser(); } //~ Inner Classes ---------------------------------------------------------- private static class TestItem { private final String testExpr; @Nullable private Object expectedType; @Nullable private String expectedError; private TestItem(String testExpr) { this.testExpr = testExpr; } static TestItem fromTestExpr(String testExpr) { return new TestItem(testExpr); } TestItem withExpectedType(Object expectedType) { this.expectedType = expectedType; return this; } TestItem withExpectedError(String expectedError) { this.expectedError = expectedError; return this; } @Override public String toString() { return this.testExpr; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.ivy; import java.io.File; import java.io.IOException; import java.net.URL; import java.text.ParseException; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.ivy.core.deliver.DeliverOptions; import org.apache.ivy.core.deliver.PublishingDependencyRevisionResolver; import org.apache.ivy.core.install.InstallOptions; import org.apache.ivy.core.module.descriptor.Artifact; import org.apache.ivy.core.module.descriptor.ModuleDescriptor; import org.apache.ivy.core.module.id.ModuleId; import org.apache.ivy.core.module.id.ModuleRevisionId; import org.apache.ivy.core.publish.PublishOptions; import org.apache.ivy.core.report.ArtifactDownloadReport; import org.apache.ivy.core.report.ResolveReport; import org.apache.ivy.core.resolve.DownloadOptions; import org.apache.ivy.core.resolve.IvyNode; import org.apache.ivy.core.resolve.ResolveOptions; import org.apache.ivy.core.resolve.ResolvedModuleRevision; import org.apache.ivy.core.retrieve.RetrieveOptions; import org.apache.ivy.core.search.ModuleEntry; import org.apache.ivy.core.search.OrganisationEntry; import org.apache.ivy.core.search.RevisionEntry; import org.apache.ivy.core.sort.SilentNonMatchingVersionReporter; import org.apache.ivy.core.sort.SortOptions; import org.apache.ivy.plugins.matcher.PatternMatcher; import org.apache.ivy.plugins.resolver.DependencyResolver; import org.apache.ivy.util.Message; import org.apache.ivy.util.filter.Filter; import org.apache.ivy.util.filter.FilterHelper; import static org.apache.ivy.util.StringUtils.splitToArray; /** * This class can be used for easy migration from Ivy 1.4 API. * <p> * Indeed, Ivy 2.0 API has changed substantially, so it can take time to migrate existing code using * Ivy 1.4 API to the new API. Using this class it's really easy: replace your instance of Ivy by an * instance of this class. * <p> * For instance, where you were doing: * * <pre> * Ivy ivy = new Ivy(); * </pre> * * do instead: * * <pre> * Ivy14 ivy = new Ivy14(); * </pre> * * And that should be enough in most cases! */ public class Ivy14 { // CheckStyle:ParameterNumberCheck OFF // don't check parameter numbers, since this class is here for backward compatibility private Ivy ivy; public Ivy14() { this(Ivy.newInstance()); } public Ivy14(Ivy ivy) { this.ivy = ivy; } public boolean check(URL ivyFile, String resolvername) { return ivy.check(ivyFile, resolvername); } public void configure(File settingsFile) throws ParseException, IOException { ivy.configure(settingsFile); } public void configure(URL settingsURL) throws ParseException, IOException { ivy.configure(settingsURL); } public void configureDefault() throws ParseException, IOException { ivy.configureDefault(); } public void deliver(ModuleRevisionId mrid, String revision, File cache, String destIvyPattern, String status, Date pubdate, PublishingDependencyRevisionResolver pdrResolver, boolean validate, boolean resolveDynamicRevisions) throws IOException, ParseException { ivy.deliver(mrid, revision, destIvyPattern, new DeliverOptions(status, pubdate, pdrResolver, validate, resolveDynamicRevisions, null)); } public void deliver(ModuleRevisionId mrid, String revision, File cache, String destIvyPattern, String status, Date pubdate, PublishingDependencyRevisionResolver pdrResolver, boolean validate) throws IOException, ParseException { deliver(mrid, revision, cache, destIvyPattern, status, pubdate, pdrResolver, validate, true); } public Map<ArtifactDownloadReport, Set<String>> determineArtifactsToCopy(ModuleId moduleId, String[] confs, File cache, String destFilePattern, String destIvyPattern, Filter<Artifact> artifactFilter) throws ParseException, IOException { return ivy.getRetrieveEngine().determineArtifactsToCopy( new ModuleRevisionId(moduleId, Ivy.getWorkingRevision()), destFilePattern, new RetrieveOptions().setConfs(confs).setDestIvyPattern(destIvyPattern) .setArtifactFilter(artifactFilter)); } public Map<ArtifactDownloadReport, Set<String>> determineArtifactsToCopy(ModuleId moduleId, String[] confs, File cache, String destFilePattern, String destIvyPattern) throws ParseException, IOException { return ivy.getRetrieveEngine().determineArtifactsToCopy( new ModuleRevisionId(moduleId, Ivy.getWorkingRevision()), destFilePattern, new RetrieveOptions().setConfs(confs).setDestIvyPattern(destIvyPattern)); } public ArtifactDownloadReport download(Artifact artifact, File cache, boolean useOrigin) { Message.deprecated("using cache and useOrigin when calling download is not supported anymore"); return ivy.getResolveEngine().download(artifact, new DownloadOptions()); } public ResolvedModuleRevision findModule(ModuleRevisionId id) { ResolveOptions options = new ResolveOptions(); options.setValidate(false); return ivy.getResolveEngine().findModule(id, options); } public IvyNode[] getDependencies(ModuleDescriptor md, String[] confs, File cache, Date date, ResolveReport report, boolean validate, boolean transitive) { return ivy.getResolveEngine().getDependencies(md, newResolveOptions(confs, null, cache, date, validate, false, transitive, false, true, true, FilterHelper.NO_FILTER), report); } public IvyNode[] getDependencies(ModuleDescriptor md, String[] confs, File cache, Date date, ResolveReport report, boolean validate) { return ivy.getResolveEngine().getDependencies(md, newResolveOptions(confs, null, cache, date, validate, false, true, false, true, true, FilterHelper.NO_FILTER), report); } public IvyNode[] getDependencies(URL ivySource, String[] confs, File cache, Date date, boolean validate) throws ParseException, IOException { return ivy.getResolveEngine().getDependencies(ivySource, newResolveOptions(confs, null, cache, date, validate, false, true, false, true, true, FilterHelper.NO_FILTER)); } public String getVariable(String name) { return ivy.getVariable(name); } public ResolveReport install(ModuleRevisionId mrid, String from, String to, boolean transitive, boolean validate, boolean overwrite, Filter<Artifact> artifactFilter, File cache, String matcherName) throws IOException { return ivy.install(mrid, from, to, new InstallOptions().setTransitive(transitive).setValidate(validate) .setOverwrite(overwrite).setArtifactFilter(artifactFilter) .setMatcherName(matcherName)); } public void interrupt() { ivy.interrupt(); } public void interrupt(Thread operatingThread) { ivy.interrupt(operatingThread); } public boolean isInterrupted() { return ivy.isInterrupted(); } public ModuleEntry[] listModuleEntries(OrganisationEntry org) { return ivy.listModuleEntries(org); } public ModuleId[] listModules(ModuleId criteria, PatternMatcher matcher) { return ivy.listModules(criteria, matcher); } public ModuleRevisionId[] listModules(ModuleRevisionId criteria, PatternMatcher matcher) { return ivy.listModules(criteria, matcher); } public String[] listModules(String org) { return ivy.listModules(org); } public OrganisationEntry[] listOrganisationEntries() { return ivy.listOrganisationEntries(); } public String[] listOrganisations() { return ivy.listOrganisations(); } public RevisionEntry[] listRevisionEntries(ModuleEntry module) { return ivy.listRevisionEntries(module); } public String[] listRevisions(String org, String module) { return ivy.listRevisions(org, module); } public String[] listTokenValues(String token, Map<String, Object> otherTokenValues) { return ivy.listTokenValues(token, otherTokenValues); } public Collection<Artifact> publish(ModuleDescriptor md, DependencyResolver resolver, Collection<String> srcArtifactPattern, String srcIvyPattern, Artifact[] extraArtifacts, boolean overwrite, String conf) throws IOException { return ivy.getPublishEngine().publish(md, srcArtifactPattern, resolver, new PublishOptions().setSrcIvyPattern(srcIvyPattern).setExtraArtifacts(extraArtifacts) .setOverwrite(overwrite).setConfs(splitToArray(conf))); } public Collection<Artifact> publish(ModuleRevisionId mrid, String pubrevision, File cache, Collection<String> srcArtifactPattern, String resolverName, String srcIvyPattern, String status, Date pubdate, Artifact[] extraArtifacts, boolean validate, boolean overwrite, boolean update, String conf) throws IOException { return ivy.publish(mrid, srcArtifactPattern, resolverName, new PublishOptions().setStatus(status).setPubdate(pubdate).setPubrevision(pubrevision) .setSrcIvyPattern(srcIvyPattern).setExtraArtifacts(extraArtifacts) .setUpdate(update).setValidate(validate).setOverwrite(overwrite) .setConfs(splitToArray(conf))); } public Collection<Artifact> publish(ModuleRevisionId mrid, String pubrevision, File cache, String srcArtifactPattern, String resolverName, String srcIvyPattern, boolean validate, boolean overwrite) throws IOException { return ivy.publish(mrid, Collections.singleton(srcArtifactPattern), resolverName, new PublishOptions().setPubrevision(pubrevision).setSrcIvyPattern(srcIvyPattern) .setValidate(validate).setOverwrite(overwrite)); } public Collection<Artifact> publish(ModuleRevisionId mrid, String pubrevision, File cache, String srcArtifactPattern, String resolverName, String srcIvyPattern, boolean validate) throws IOException { return ivy.publish(mrid, Collections.singleton(srcArtifactPattern), resolverName, new PublishOptions().setPubrevision(pubrevision).setSrcIvyPattern(srcIvyPattern) .setValidate(validate)); } public Collection<Artifact> publish(ModuleRevisionId mrid, String pubrevision, File cache, String srcArtifactPattern, String resolverName, String srcIvyPattern, String status, Date pubdate, Artifact[] extraArtifacts, boolean validate, boolean overwrite, boolean update, String conf) throws IOException { return ivy.publish(mrid, Collections.singleton(srcArtifactPattern), resolverName, new PublishOptions().setStatus(status).setPubdate(pubdate).setPubrevision(pubrevision) .setSrcIvyPattern(srcIvyPattern).setExtraArtifacts(extraArtifacts) .setUpdate(update).setValidate(validate).setOverwrite(overwrite) .setConfs(splitToArray(conf))); } public ResolveReport resolve(File ivySource) throws ParseException, IOException { return ivy.resolve(ivySource); } public ResolveReport resolve(ModuleDescriptor md, String[] confs, File cache, Date date, boolean validate, boolean useCacheOnly, boolean transitive, boolean useOrigin, boolean download, boolean outputReport, Filter<Artifact> artifactFilter) throws ParseException, IOException { return ivy.resolve(md, newResolveOptions(confs, null, cache, date, validate, useCacheOnly, transitive, useOrigin, download, outputReport, artifactFilter)); } private ResolveOptions newResolveOptions(String[] confs, String revision, File cache, Date date, boolean validate, boolean useCacheOnly, boolean transitive, boolean useOrigin, boolean download, boolean outputReport, Filter<Artifact> artifactFilter) { if (useOrigin) { ivy.getSettings().useDeprecatedUseOrigin(); } return new ResolveOptions().setConfs(confs).setRevision(revision).setValidate(validate) .setUseCacheOnly(useCacheOnly).setTransitive(transitive).setDownload(download) .setOutputReport(outputReport).setArtifactFilter(artifactFilter); } public ResolveReport resolve(ModuleDescriptor md, String[] confs, File cache, Date date, boolean validate, boolean useCacheOnly, boolean transitive, boolean download, boolean outputReport, Filter<Artifact> artifactFilter) throws ParseException, IOException { return ivy.resolve(md, newResolveOptions(confs, null, cache, date, validate, useCacheOnly, transitive, false, download, outputReport, artifactFilter)); } public ResolveReport resolve(ModuleDescriptor md, String[] confs, File cache, Date date, boolean validate, boolean useCacheOnly, boolean transitive, Filter<Artifact> artifactFilter) throws ParseException, IOException { return ivy.resolve(md, newResolveOptions(confs, null, cache, date, validate, useCacheOnly, transitive, false, true, true, artifactFilter)); } public ResolveReport resolve(ModuleDescriptor md, String[] confs, File cache, Date date, boolean validate, boolean useCacheOnly, Filter<Artifact> artifactFilter) throws ParseException, IOException { return ivy.resolve(md, newResolveOptions(confs, null, cache, date, validate, useCacheOnly, true, false, true, true, artifactFilter)); } public ResolveReport resolve(ModuleRevisionId mrid, String[] confs, boolean transitive, boolean changing, File cache, Date date, boolean validate, boolean useCacheOnly, boolean useOrigin, Filter<Artifact> artifactFilter) throws ParseException, IOException { return ivy.resolve(mrid, newResolveOptions(confs, null, cache, date, validate, useCacheOnly, transitive, useOrigin, true, true, artifactFilter), changing); } public ResolveReport resolve(ModuleRevisionId mrid, String[] confs, boolean transitive, boolean changing, File cache, Date date, boolean validate, boolean useCacheOnly, Filter<Artifact> artifactFilter) throws ParseException, IOException { return ivy.resolve(mrid, newResolveOptions(confs, null, cache, date, validate, useCacheOnly, transitive, false, true, true, artifactFilter), changing); } public ResolveReport resolve(ModuleRevisionId mrid, String[] confs) throws ParseException, IOException { return ivy.resolve(mrid, newResolveOptions(confs, null, ivy.getSettings().getDefaultCache(), null, true, false, true, false, true, true, FilterHelper.NO_FILTER), false); } public ResolveReport resolve(URL ivySource, String revision, String[] confs, File cache, Date date, boolean validate, boolean useCacheOnly, boolean transitive, boolean useOrigin, Filter<Artifact> artifactFilter) throws ParseException, IOException { return ivy.resolve(ivySource, newResolveOptions(confs, revision, cache, date, validate, useCacheOnly, transitive, useOrigin, true, true, artifactFilter)); } public ResolveReport resolve(URL ivySource, String revision, String[] confs, File cache, Date date, boolean validate, boolean useCacheOnly, boolean transitive, Filter<Artifact> artifactFilter) throws ParseException, IOException { return ivy.resolve(ivySource, newResolveOptions(confs, revision, cache, date, validate, useCacheOnly, transitive, false, true, true, artifactFilter)); } public ResolveReport resolve(URL ivySource, String revision, String[] confs, File cache, Date date, boolean validate, boolean useCacheOnly, Filter<Artifact> artifactFilter) throws ParseException, IOException { return ivy.resolve(ivySource, newResolveOptions(confs, revision, cache, date, validate, useCacheOnly, true, false, true, true, artifactFilter)); } public ResolveReport resolve(URL ivySource, String revision, String[] confs, File cache, Date date, boolean validate, boolean useCacheOnly) throws ParseException, IOException { return ivy.resolve(ivySource, newResolveOptions(confs, revision, cache, date, validate, useCacheOnly, true, false, true, true, FilterHelper.NO_FILTER)); } public ResolveReport resolve(URL ivySource, String revision, String[] confs, File cache, Date date, boolean validate) throws ParseException, IOException { return ivy.resolve(ivySource, newResolveOptions(confs, revision, cache, date, validate, false, true, false, true, true, FilterHelper.NO_FILTER)); } public ResolveReport resolve(URL ivySource) throws ParseException, IOException { return ivy.resolve(ivySource); } public int retrieve(ModuleId moduleId, String[] confs, File cache, String destFilePattern, String destIvyPattern, Filter<Artifact> artifactFilter, boolean sync, boolean useOrigin, boolean makeSymlinks) { try { return ivy.retrieve(new ModuleRevisionId(moduleId, Ivy.getWorkingRevision()), new RetrieveOptions().setConfs(confs).setDestArtifactPattern(destFilePattern) .setDestIvyPattern(destIvyPattern) .setArtifactFilter(artifactFilter).setSync(sync).setUseOrigin(useOrigin) .setMakeSymlinks(makeSymlinks)).getNbrArtifactsCopied(); } catch (IOException e) { throw new RuntimeException(e); } } public int retrieve(ModuleId moduleId, String[] confs, File cache, String destFilePattern, String destIvyPattern, Filter<Artifact> artifactFilter, boolean sync, boolean useOrigin) { try { return ivy.retrieve(new ModuleRevisionId(moduleId, Ivy.getWorkingRevision()), new RetrieveOptions().setConfs(confs).setDestArtifactPattern(destFilePattern) .setDestIvyPattern(destIvyPattern) .setArtifactFilter(artifactFilter).setSync(sync) .setUseOrigin(useOrigin)).getNbrArtifactsCopied(); } catch (IOException e) { throw new RuntimeException(e); } } public int retrieve(ModuleId moduleId, String[] confs, File cache, String destFilePattern, String destIvyPattern, Filter<Artifact> artifactFilter) { try { return ivy.retrieve(new ModuleRevisionId(moduleId, Ivy.getWorkingRevision()), new RetrieveOptions().setConfs(confs).setDestArtifactPattern(destFilePattern) .setDestIvyPattern(destIvyPattern) .setArtifactFilter(artifactFilter)).getNbrArtifactsCopied(); } catch (IOException e) { throw new RuntimeException(e); } } public int retrieve(ModuleId moduleId, String[] confs, File cache, String destFilePattern, String destIvyPattern) { try { return ivy.retrieve(new ModuleRevisionId(moduleId, Ivy.getWorkingRevision()), new RetrieveOptions().setConfs(confs).setDestArtifactPattern(destFilePattern) .setDestIvyPattern(destIvyPattern)).getNbrArtifactsCopied(); } catch (IOException e) { throw new RuntimeException(e); } } public int retrieve(ModuleId moduleId, String[] confs, File cache, String destFilePattern) { try { return ivy.retrieve(new ModuleRevisionId(moduleId, Ivy.getWorkingRevision()), new RetrieveOptions().setConfs(confs) .setDestArtifactPattern(destFilePattern)).getNbrArtifactsCopied(); } catch (IOException e) { throw new RuntimeException(e); } } public void setVariable(String varName, String value) { ivy.setVariable(varName, value); } public List<ModuleDescriptor> sortModuleDescriptors( Collection<ModuleDescriptor> moduleDescriptors) { return ivy.sortModuleDescriptors(moduleDescriptors, new SortOptions() .setNonMatchingVersionReporter(new SilentNonMatchingVersionReporter())); } public List<IvyNode> sortNodes(Collection<IvyNode> nodes) { return ivy.sortNodes(nodes, new SortOptions() .setNonMatchingVersionReporter(new SilentNonMatchingVersionReporter())); } public String substitute(String str) { return ivy.substitute(str); } // CheckStyle:ParameterNumberCheck ON }
/******************************************************************************* * Copyright (c) 2006-2007 University of Toronto Database Group * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software * is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE * OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *******************************************************************************/ package simfunctions; import java.sql.ResultSet; import java.util.ArrayList; import java.util.BitSet; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Set; import java.util.TreeSet; import java.util.Vector; import dbdriver.MySqlDB; import utility.Config; public class RunIBClusteringAP2 { public static int queryTokenLength = 2; public static Vector<Vector<Integer>> subsets(Vector<Integer> set, int size){ Vector<Vector<Integer>> output = new Vector<Vector<Integer>>(); if (size == 1) { for (Integer vi: set){ Vector<Integer> v = new Vector<Integer>(); v.add(vi); output.add(v); } } else { for (int i = 1; i <= set.size()-(size-1); i++){ Vector<Integer> set2 = new Vector<Integer>(); for (int j = i+1; j <= set.size(); j++) set2.add(set.elementAt(j-1)); for (Vector<Integer> sset : subsets(set2, size-1)){ Vector<Integer> v = new Vector<Integer>(); v.add(set.elementAt(i-1)); for (Integer vi:sset) v.add(vi); output.add(v); } } } return output; } public static void findTrueClusters(String tablename, HashMap<Integer,Integer> trueCluster, HashMap<Integer,BitSet> trueMembers) { Config config = new Config(); MySqlDB mysqlDB = new MySqlDB(config.returnURL(), config.user, config.passwd); String sql = ""; try { sql = " SELECT c1.tid as tid1, c2.tid as tid2 FROM " + config.dbName + "." + tablename + " c1," + config.dbName + "." + tablename + " c2" + " where c1.id=c2.id "; ResultSet rs = mysqlDB.executeQuery(sql); //t4 = System.currentTimeMillis(); //System.out.println("DEBUG INFO: " + (t4-t3) + "ms"); //BitSet isThere = new BitSet(count); BitSet isThere = new BitSet(); int maxId = 0; rs.beforeFirst(); while (rs.next()){ //System.out.println(rs.getInt(1) + " " + rs.getInt(2)); int tid1 = rs.getInt(1); int tid2 = rs.getInt(2); if (!isThere.get(tid1)) { if (!isThere.get(tid2)) { maxId++; trueCluster.put(tid1,maxId); trueCluster.put(tid2,maxId); BitSet mems = new BitSet(); mems.set(tid1); mems.set(tid2); trueMembers.put(maxId, mems); isThere.set(tid1); isThere.set(tid2); } else { int cId = trueCluster.get(tid2); trueCluster.put(tid1, cId); BitSet mems = trueMembers.get(cId); mems.set(tid1); trueMembers.put(cId, mems); isThere.set(tid1); } } else { if (!isThere.get(tid2)) { int cId = trueCluster.get(tid1); trueCluster.put(tid2, cId); BitSet mems = trueMembers.get(cId); mems.set(tid2); trueMembers.put(cId, mems); isThere.set(tid2); } } } //System.out.println(trueCluster); //System.out.println(trueMembers); } catch (Exception e) { System.out.println("Database error"); e.printStackTrace(); } } public static void findPairsWSign(String tablename, Preprocess measure, String pairTable){ Config config = new Config(); MySqlDB mysqlDB = new MySqlDB(config.returnURL(), config.user, config.passwd); long t2, t3; t2 = System.currentTimeMillis(); boolean log_pairs_to_db = true; String sql = " SELECT s1.tid as tid1, s2.tid as tid2 " + " FROM cnamesu.`sign` s1, cnamesu.`sign` s2 " + " WHERE s1.sign = s2.sign and s1.tid <= s2.tid " + " GROUP BY tid1,tid2 "; try { if (log_pairs_to_db){ String query = "drop table if exists " + config.dbName + "." + pairTable; mysqlDB.executeUpdate(query); query = "create table " + config.dbName + "." + pairTable + " (tid1 int, tid2 int)"; mysqlDB.executeUpdate(query); query = "INSERT INTO " + config.dbName + "." + pairTable + "( " + sql + " )"; mysqlDB.executeUpdate(query); t3 = System.currentTimeMillis(); System.out.println("Similar Pairs Generation: " + (t3-t2) + "ms"); } } catch (Exception e) { System.out.println("Database error"); e.printStackTrace(); } } public static void evaluate(HashMap<Integer,BitSet> clusters, HashMap<Integer,BitSet> members, HashMap<Integer,Integer> trueCluster, HashMap<Integer,BitSet> trueMembers){ /* * * Evaluation of clustering - Precision and Recall * */ double SumP = 0; double SumR = 0; int totalSize = 0; int ClusterCount = trueMembers.keySet().size(); for (Integer cId:trueMembers.keySet()){ int correctCount = 0; Vector<Integer> v = new Vector<Integer>(); BitSet mems = trueMembers.get(cId); int t = mems.nextSetBit(0); while (t != -1){ v.add(t); t = mems.nextSetBit(t+1); } //System.out.println(v); int count = 0 ; for (Vector<Integer> pair:subsets(v,2)){ count ++; //System.out.println(pair); //if (cluster.get(pair.get(0)) != cluster.get(pair.get(1))) System.out.print(" " + pair.get(0) + "," + pair.get(1) + " "); //if (cluster.get(pair.get(0)) != cluster.get(pair.get(1))) System.out.println(" " + cluster.get(pair.get(0)) + "," + cluster.get(pair.get(1)) + " "); BitSet m = (BitSet) clusters.get(pair.get(0)).clone(); m.and(clusters.get(pair.get(1))); if ( m.cardinality() != 0) { correctCount ++; //System.out.print("*" + correctCount + "*" + count + "* "); } //System.out.println(pair); } double precision = (count!=0) ? Math.min((double)((1.0*correctCount )/(1.0*count)),1) : 0; //System.out.println( " " + precision ); SumP += precision; //if (precision != 1) System.err.println("oops!"); if (count==0) ClusterCount--; } System.out.println( " Number of Clusters: " + members.size() ); System.out.println( " Ground Truth Clusters with size >= 2: " + ClusterCount ); //System.out.println( " Average CPrecision for all records: " + (SumP/(double)trueMembers.keySet().size()) ); System.out.println( " Average CPrecision for all records: " + (SumP/(double)ClusterCount) ); double penalty = (double)trueMembers.size()/members.size(); double pcp = (penalty>=1) ? (SumP/(double)ClusterCount) : (SumP/(double)ClusterCount)*penalty; System.out.println( " Average Penalized CPrecision for all records: " + pcp ); //System.out.println( " Average CPrecision for records present in clusters: " + (SumP/(double)ClusterCount) ); ClusterCount = members.keySet().size(); SumP = 0; SumR = 0; totalSize = 0; // For each ground truth cluster g_i: for (Integer gcId:trueMembers.keySet()){ // // Find argmax_{c_j} ( | intersect(c_j,g_i) | / union(c_j,g_i) || ) // BitSet gc = trueMembers.get(gcId); int argmax = 0; double max = 0.0; BitSet inm = new BitSet(); //BitSet unm = new BitSet(); for (Integer cId:members.keySet()){ BitSet in = (BitSet) members.get(cId).clone(); BitSet un = (BitSet) members.get(cId).clone(); in.and(gc); un.or(gc); double jc = (double) in.cardinality() / (double) un.cardinality(); if (jc > max) { inm = in; //unm = un; max = jc; argmax = cId; } } if (argmax == 0){ System.err.println("Cluster " + gcId + " doesn't match "); } //else System.out.println("Cluster " + gcId + " matches " + argmax); double csize = members.get(argmax).cardinality(); double precision = ((double) inm.cardinality() / csize); double recall = ((double) inm.cardinality() / gc.cardinality()); SumP += gc.cardinality()*precision; SumR += gc.cardinality()*recall; //System.out.println("re: " + recall ); totalSize += gc.cardinality(); } double Pr = (SumP/totalSize); double Re = (SumR/totalSize); System.out.println( " Average Precision for all records: " + Pr ); System.out.println( " Average Recall for all records: " + Re ); System.out.println( " Average F1 for all records: " + (2*Pr*Re)/(Pr+Re) ); } public static BitSet convertToBitSet2(Set<String> stringSet){ BitSet output = new BitSet(); for (String qgram : stringSet) { output.set((qgram.charAt(1) << 7) | qgram.charAt(0)); } return output; } public static HashMap getSortedMap(HashMap hmap) { HashMap map = new LinkedHashMap(); List mapKeys = new ArrayList(hmap.keySet()); List mapValues = new ArrayList(hmap.values()); hmap.clear(); TreeSet sortedSet = new TreeSet(mapValues); Object[] sortedArray = sortedSet.toArray(); int size = sortedArray.length; // a) Ascending sort for (int i=0; i<size; i++) { map.put(mapKeys.get(mapValues.indexOf(sortedArray[i])), sortedArray[i]); } return map; } public static BitSet getCluster(HashMap<Integer, String> strs, Vector<Integer> tids, HashMap<Integer, Double> scores, BitSet pr, Double thr2){ boolean debug_mode = false; HashMap<Integer, BitSet> C = new HashMap<Integer, BitSet>(); HashMap<Integer, Double> p_x = new HashMap<Integer, Double>(); HashMap<Integer, HashMap<Integer,Double>> p_y_x = new HashMap<Integer, HashMap<Integer,Double>>(); // initialize clusters BitSet X = new BitSet(); BitSet Y = new BitSet(); HashMap<Integer, Double> Yidf = new HashMap<Integer, Double>(); HashMap<Integer, Double> SumIdf = new HashMap<Integer, Double>(); int totalsize = 0; for (int tid: strs.keySet()){ X.set(tid); String str = strs.get(tid); BitSet strSet = convertToBitSet2(RunProbabilityAssignment.getTF(str).keySet()); int i = strSet.nextSetBit(0); double sumidf = 0.0; while (i!=-1){ Y.set(i); if (!Yidf.containsKey(i)) { Yidf.put(i, Math.log(1.0 + 1.0)); } else { Yidf.put( i, (1.0 / Math.log(1.0 + ((1.0/Yidf.get(i))+1.0) )) ); } sumidf += Yidf.get(i); i = strSet.nextSetBit(i+1); } SumIdf.put(tid, sumidf); totalsize += strSet.cardinality(); } for (int tid: strs.keySet()){ X.set(tid); BitSet cMember = new BitSet(); cMember.set(tid); C.put(tid, cMember); String str = strs.get(tid); BitSet strSet = convertToBitSet2(RunProbabilityAssignment.getTF(str).keySet()); HashMap<Integer,Double> py = new HashMap<Integer,Double>(); int i = strSet.nextSetBit(0); while (i!=-1){ Y.set(i); //py.put(i, 1.0/strSet.cardinality()); py.put(i, Yidf.get(i)/SumIdf.get(tid)); i = strSet.nextSetBit(i+1); } p_x.put(tid, 1.0/strs.size()); //p_x.put(tid, ((double)strSet.cardinality())/totalsize*1.0 ); p_y_x.put(tid, py); } HashMap<Integer, Double> p_c = new HashMap<Integer, Double>(); HashMap<Integer, HashMap<Integer, Double>> p_y_c = new HashMap<Integer, HashMap<Integer, Double>>(); for (int cid: C.keySet()){ BitSet xs = C.get(cid); Double spx = 0.0; int i = xs.nextSetBit(0); while (i!=-1){ //Y.set(i); spx += p_x.get(i); i = xs.nextSetBit(i+1); } p_c.put(cid, spx); HashMap<Integer,Double> sum_px_pyx = new HashMap<Integer,Double>(); i = xs.nextSetBit(0); while (i!=-1){ //Y.set(i); for (int y:p_y_x.get(i).keySet()){ if (sum_px_pyx.containsKey(y)) { sum_px_pyx.put(y, sum_px_pyx.get(cid) + ( p_x.get(i)*p_y_x.get(i).get(y) / p_c.get(cid) ) ); } else sum_px_pyx.put(y, ( p_x.get(i)*p_y_x.get(i).get(y) / p_c.get(cid) ) ); } i = xs.nextSetBit(i+1); } p_y_c.put(cid, sum_px_pyx ); } if (debug_mode) { System.out.println("X: " + X); System.out.println("Y: " + Y); System.out.println("Yidf: " + Yidf); System.out.println("Sumidf: " + SumIdf); System.out.println("C: " + C); System.out.println("p_y_c: " + p_y_c); System.out.println("p_y_x: " + p_y_x); System.out.println(); } // Merge clusters ci,cj :: merge(clusters, p_c, p_y_c, i, j) int cid1 = tids.firstElement(); pr.set(cid1); //merge(C, cid1, cid2, p_c, p_y_c); double il = 0.0; //il = infoLoss(C, cid1, 2, p_c, p_y_c); //System.out.println(" 2: " + il); double avgil = 0.0; int i = 0; BitSet cluster = new BitSet(); HashMap<Integer, Double> ils = new HashMap<Integer, Double>(); int count = 0; for (int tid2: tids){ i++; if (scores.get(tid2) >= thr2){ merge(C, cid1, tid2, p_c, p_y_c); cluster.set(tid2); pr.set(tid2); if (debug_mode) { System.out.println(" " + tid2 + " merged."); } } else { il = infoLoss(C, cid1, tid2, p_c, p_y_c); ils.put(tid2,il); avgil += il; count++; //System.out.println(" " + tid2 + ": " + il); } } //avgil = avgil/(strs.size()-1); if (debug_mode) { avgil = avgil/(count); System.out.println("Avg: " + (avgil)); System.out.println(); } //if (count == 0) System.out.println( " ** ** " + tids); //Collections.sort(ils); if (debug_mode) { ils = getSortedMap(ils); int ii = 0; for (Integer d: ils.keySet()){ System.out.println(" " + d + ": " + ils.get(d)); ii++; } } //System.out.println(ils); double stddev = 0.0; for (Integer d: ils.keySet()){ double ili = ils.get(d); stddev += ((avgil - ili)*(avgil - ili)); } stddev = Math.sqrt((stddev/ils.size())); for (Integer tid: ils.keySet()) { if (ils.get(tid) < avgil - 1.0*stddev*stddev) cluster.set(tid); } /* merge(C, cid1, cid2, p_c, p_y_c); System.out.println("C: " + C); System.out.println("p_y_c: " + p_y_c); System.out.println("p_c: " + p_c); */ return cluster; } public static void merge (HashMap<Integer, BitSet> C, Integer cidi, Integer cidj, HashMap<Integer, Double> p_c, HashMap<Integer, HashMap<Integer, Double>> p_y_c){ BitSet cmems = C.get(cidi); cmems.or(C.get(cidj)); C.remove(cidj); C.put(cidi, cmems); double pci = p_c.get(cidi); double pcj = p_c.get(cidj); double pcs = pci + pcj; HashMap<Integer,Double> py = new HashMap<Integer,Double>(); for (int y:p_y_c.get(cidj).keySet()){ if (!p_y_c.get(cidi).containsKey(y)){ p_y_c.get(cidi).put(y,0.0); } } for (int y:p_y_c.get(cidi).keySet()){ if (!p_y_c.get(cidj).containsKey(y)){ p_y_c.get(cidj).put(y,0.0); } Double p = ( (pci/pcs) * p_y_c.get(cidi).get(y) ) + ( (pcj/pcs) * p_y_c.get(cidj).get(y) ); py.put(y,p); } p_y_c.remove(cidj); p_y_c.put(cidi, py); p_c.remove(cidj); p_c.put(cidi, pcs); } public static double infoLoss(HashMap<Integer, BitSet> C, Integer cidi, Integer cidj, HashMap<Integer, Double> p_c, HashMap<Integer, HashMap<Integer, Double>> p_y_c){ double il = 0.0; double pci = p_c.get(cidi); double pcj = p_c.get(cidj); double pcs = pci + pcj; HashMap<Integer,Double> pi = p_y_c.get(cidi); HashMap<Integer,Double> pj = p_y_c.get(cidj); HashMap<Integer,Double> ph = new HashMap<Integer,Double>(); for (int y:p_y_c.get(cidj).keySet()){ if (!p_y_c.get(cidi).containsKey(y)){ p_y_c.get(cidi).put(y,0.0); } } for (int y:p_y_c.get(cidi).keySet()){ if (!p_y_c.get(cidj).containsKey(y)){ p_y_c.get(cidj).put(y,0.0); } Double p = ( (pci/pcs) * pi.get(y) ) + ( (pcj/pcs) * pj.get(y) ); ph.put(y,p); } double djs = (pci/pcs)*dkl(pi,ph) + (pcj/pcs)*dkl(pj,ph); //System.out.println("pj:" + pj); //System.out.println("ph:" + ph); //System.out.println("djs:" + dkl(pj,ph)); return (pci+pcj)*djs; } public static double dkl (HashMap<Integer,Double> p1, HashMap<Integer,Double> p2){ double sum = 0.0; for (int x:p1.keySet()){ //System.out.println("*: " + sum); if ((p2.containsKey(x))&&(p1.get(x)!=0)) sum += p1.get(x) * Math.log( p1.get(x)/p2.get(x) ); } return sum; } public static void findClustersM(String tablename, Preprocess measure, HashMap<Integer,Integer> cluster, HashMap<Integer,BitSet> members, Double thr1, Double thr2){ boolean debug_mode = true; ResultSet rs; Config config = new Config(); MySqlDB mysqlDB = new MySqlDB(config.returnURL(), config.user, config.passwd); String scoreTable = "scores_" + tablename + "_" + Preprocess.extractMetricName(measure.getClass().getName()); boolean log_pairs_to_db = true; //long t2, t3; //t2 = System.currentTimeMillis(); String sql = " SELECT s.tid1, s.tid2, s.score, c.`string` " + " FROM " + scoreTable + " s, " + tablename + " c " + " where s.tid2 = c.tid and score >= " + thr1 + " order by tid1, score desc"; /* " SELECT s.tid1 as tid1, s.tid2 as tid2, score " + " FROM " + scoreTable + " s " + " WHERE s.score >= " + thr + " order by tid1, score desc"; */ HashMap<Integer, HashMap<Integer, Double>> scores = new HashMap<Integer, HashMap<Integer, Double>>(); HashMap<Integer, Vector<Integer>> tid2sOrder = new HashMap<Integer, Vector<Integer>>(); Vector<Integer> tid1sOrder = new Vector<Integer>(); HashMap<Integer, String> data = new HashMap<Integer, String>(); try { rs = mysqlDB.executeQuery(sql); while (rs.next()){ int tid1 = rs.getInt(1); int tid2 = rs.getInt(2); double score = rs.getDouble(3); tid1sOrder.add(tid1); String str = rs.getString(4); data.put(tid2, str); if (!scores.containsKey(tid1)){ HashMap<Integer, Double> tscores = new HashMap<Integer, Double>(); tscores.put(tid2, score); scores.put(tid1, tscores); Vector<Integer> tids = new Vector<Integer>(); tids.add(tid2); tid2sOrder.put(tid1, tids); } else{ HashMap<Integer, Double> tscores = scores.get(tid1); tscores.put(tid2, score); scores.put(tid1, tscores); Vector<Integer> tids = tid2sOrder.get(tid1); tids.add(tid2); tid2sOrder.put(tid1, tids); } } } catch (Exception e) { System.out.println("Database error"); e.printStackTrace(); } BitSet pr = new BitSet(); for (int tid1: tid1sOrder){ if (!pr.get(tid1)){ Vector<Integer> tids = tid2sOrder.get(tid1); HashMap<Integer, Double> tscores = scores.get(tid1); Vector<Integer> marked = new Vector<Integer>(); for (int tid: tids){ if (pr.get(tid)) { marked.add(tid); } } for (int tid:marked){ tids.removeElement(tid); tscores.remove(tid); } HashMap<Integer,String> strs = new HashMap<Integer,String>(); for (int tid:tids){ if (!pr.get(tid)) strs.put(tid, data.get(tid)); } BitSet clusterMems = getCluster(strs, tids, tscores, pr, thr2); if (debug_mode) System.out.println("Cluster Mem.s: " + clusterMems); members.put(tid1, clusterMems); /* int i = clusterMems.nextSetBit(0); while (i != -1){ pr.set(i); tid2sOrder.remove(i); cluster.put(i, tid1); i = clusterMems.nextSetBit(i+1); } */ } } } public static void main(String[] args) { String tablename = "cu6"; long t1, t2, t3, t4, t5, tf; t1 = System.currentTimeMillis(); /* * * Call Similarity Join * */ Preprocess bm25WeightedJaccard = new WeightedJaccardBM25(); Preprocess measure = bm25WeightedJaccard; t1 = System.currentTimeMillis(); //SimilarityJoin.run(tablename, measure, 0.2); t2 = System.currentTimeMillis(); System.out.println("Similarity Join: " + (t2-t1) + "ms"); HashMap<Integer,Integer> cluster = new HashMap<Integer,Integer>(); HashMap<Integer,BitSet> members = new HashMap<Integer,BitSet>(); findClustersM(tablename, measure, cluster, members, 0.2, 0.4); System.out.println("Cluster Count before merge: " + members.size() ); Vector<Integer> cidlist = new Vector<Integer>(); for (int cid1:members.keySet()){ for (int cid2:members.keySet()){ if (cid1!=cid2) { BitSet c1 = members.get(cid1); BitSet c2 = members.get(cid2); if ((c1.cardinality()>2)&&(c2.cardinality()>2)){ BitSet inter = new BitSet(); inter = (BitSet) c2.clone(); inter.and(c1); if (inter.cardinality() >= (c2.cardinality()-2) ){ if (!cidlist.contains(cid1)){ c1.or(c2); cidlist.add(cid2); members.put(cid1, c1); } } } } } } for (int cid2:cidlist) members.remove(cid2); HashMap<Integer,BitSet> clusterNums = new HashMap<Integer,BitSet>(); for (Integer cid:members.keySet()){ BitSet mems = members.get(cid); int t = mems.nextSetBit(0); while (t != -1){ if (clusterNums.containsKey(t)){ BitSet b = clusterNums.get(t); b.set(cid); clusterNums.put(t, b); } else { BitSet b = new BitSet(); b.set(cid); clusterNums.put(t, b); } t = mems.nextSetBit(t+1); } } System.out.println("Cluster Count: " + members.size() ); t3 = System.currentTimeMillis(); System.out.println("Finding Clusters took: " + (t3-t2) + "ms"); // Finding clusters from pairs: t4 = System.currentTimeMillis(); System.out.println("Time for clustering: " + (t4-t1) + "ms"); //System.out.println(members.size()); /* * Evaluation of clustering - Precision and Recall */ // Finding true clusters: HashMap<Integer,Integer> trueCluster = new HashMap<Integer,Integer>(); HashMap<Integer,BitSet> trueMembers = new HashMap<Integer,BitSet>(); findTrueClusters(tablename, trueCluster, trueMembers); //System.out.println(trueMembers); //System.out.println(trueMembers.get(223)); //System.out.println(members); /* int count = 0; int sumSize = 0; for (int cid:trueMembers.keySet()){ if (trueMembers.get(cid).cardinality() >= 0) count++; sumSize += trueMembers.get(cid).cardinality(); } System.out.println("** " + ((double)sumSize/count) ); */ System.out.println(); evaluate(clusterNums, members, trueCluster, trueMembers); //tf = System.currentTimeMillis(); //System.out.println("Total Time: " + (tf-t1) + "ms"); } }
/* * Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.application.common.model; import org.apache.axiom.om.OMElement; import org.apache.commons.collections.CollectionUtils; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElementWrapper; import javax.xml.bind.annotation.XmlRootElement; /** * Inbound authentication request configuration. */ @XmlAccessorType(XmlAccessType.FIELD) @XmlRootElement(name = "InboundAuthenticationRequestConfig") public class InboundAuthenticationRequestConfig implements Serializable { private static final long serialVersionUID = -62766721187073002L; @XmlElement(name = "InboundAuthKey") private String inboundAuthKey; @XmlElement(name = "InboundAuthType") private String inboundAuthType; @XmlElement(name = "InboundConfigType") private String inboundConfigType; @XmlElement(name = "friendlyName") private String friendlyName; @XmlElement(name = "inboundConfiguration") private String inboundConfiguration; @XmlElementWrapper(name = "Properties") @XmlElement(name = "Property") private Property[] properties = new Property[0]; /* * <InboundAuthenticationRequestConfig> <InboundAuthKey></InboundAuthKey> * <InboundAuthType></InboundAuthType> <Properties></Properties> * </InboundAuthenticationRequestConfig> */ public static InboundAuthenticationRequestConfig build( OMElement inboundAuthenticationRequestConfigOM) { if (inboundAuthenticationRequestConfigOM == null) { return null; } InboundAuthenticationRequestConfig inboundAuthenticationRequestConfig; inboundAuthenticationRequestConfig = new InboundAuthenticationRequestConfig(); Iterator<?> members = inboundAuthenticationRequestConfigOM.getChildElements(); while (members.hasNext()) { OMElement member = (OMElement) members.next(); if ("InboundAuthKey".equalsIgnoreCase(member.getLocalName())) { inboundAuthenticationRequestConfig.setInboundAuthKey(member.getText()); } else if ("InboundAuthType".equalsIgnoreCase(member.getLocalName())) { inboundAuthenticationRequestConfig.setInboundAuthType(member.getText()); } else if ("InboundConfigType".equalsIgnoreCase(member.getLocalName())) { inboundAuthenticationRequestConfig.setInboundConfigType(member.getText()); } else if ("friendlyName".equalsIgnoreCase(member.getLocalName())) { inboundAuthenticationRequestConfig.setFriendlyName(member.getText()); } else if ("Properties".equalsIgnoreCase(member.getLocalName())) { Iterator<?> propertiesIter = member.getChildElements(); List<Property> propertiesArrList = new ArrayList<Property>(); if (propertiesIter != null) { while (propertiesIter.hasNext()) { OMElement propertiesElement = (OMElement) (propertiesIter.next()); Property prop = Property.build(propertiesElement); if (prop != null) { propertiesArrList.add(prop); } } } if (CollectionUtils.isNotEmpty(propertiesArrList)) { Property[] propertiesArr = propertiesArrList.toArray(new Property[0]); inboundAuthenticationRequestConfig.setProperties(propertiesArr); } } } return inboundAuthenticationRequestConfig; } /** * @return */ public String getInboundAuthKey() { return inboundAuthKey; } /** * @param inboundAuthKey */ public void setInboundAuthKey(String inboundAuthKey) { this.inboundAuthKey = inboundAuthKey; } /** * @return */ public String getInboundAuthType() { return inboundAuthType; } /** * @param inboundAuthType */ public void setInboundAuthType(String inboundAuthType) { this.inboundAuthType = inboundAuthType; } /** * * @return inboundUIType */ public String getInboundConfigType() { return inboundConfigType; } /** * Sets the UIType of the inbound authentication config. * @param inboundConfigType */ public void setInboundConfigType(String inboundConfigType) { this.inboundConfigType = inboundConfigType; } /** * @return */ public Property[] getProperties() { return properties; } /** * @param properties */ public void setProperties(Property[] properties) { if (properties == null) { return; } Set<Property> propertySet = new HashSet<Property>(Arrays.asList(properties)); this.properties = sortPropertiesByDisplayOrder(propertySet); } public String getFriendlyName() { return friendlyName; } public void setFriendlyName(String friendlyName) { this.friendlyName = friendlyName; } private Property[] sortPropertiesByDisplayOrder(Set<Property> propertySet) { List<Property> list = new ArrayList(propertySet); list.sort(new Comparator<Property>() { @Override public int compare(Property pro1, Property pro2) { return Integer.compare(pro1.getDisplayOrder(), pro2.getDisplayOrder()); } }); return list.toArray(new Property[0]); } public String getInboundConfiguration() { return inboundConfiguration; } public void setInboundConfiguration(String inboundConfiguration) { this.inboundConfiguration = inboundConfiguration; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.xcontent.*; import org.elasticsearch.index.VersionType; import org.elasticsearch.search.fetch.source.FetchSourceContext; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Locale; /** * A more like this query that finds documents that are "like" the provided {@link #likeText(String)} * which is checked against the fields the query is constructed with. */ public class MoreLikeThisQueryBuilder extends BaseQueryBuilder implements BoostableQueryBuilder<MoreLikeThisQueryBuilder> { /** * A single get item. Pure delegate to multi get. */ public static final class Item extends MultiGetRequest.Item implements ToXContent { private BytesReference doc; public Item() { super(); } public Item(String index, @Nullable String type, String id) { super(index, type, id); } public BytesReference doc() { return doc; } public Item doc(XContentBuilder doc) { this.doc = doc.bytes(); return this; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); if (this.index() != null) { builder.field("_index", this.index()); } if (this.id() != null) { builder.field("_id", this.id()); } if (this.doc() != null) { XContentType contentType = XContentFactory.xContentType(doc); if (contentType == builder.contentType()) { builder.rawField("doc", doc); } else { XContentParser parser = XContentFactory.xContent(contentType).createParser(doc); parser.nextToken(); builder.field("doc"); builder.copyCurrentStructure(parser); } } if (this.type() != null) { builder.field("_type", this.type()); } if (this.fields() != null) { builder.array("fields", this.fields()); } if (this.routing() != null) { builder.field("_routing", this.routing()); } if (this.fetchSourceContext() != null) { FetchSourceContext source = this.fetchSourceContext(); String[] includes = source.includes(); String[] excludes = source.excludes(); if (includes.length == 0 && excludes.length == 0) { builder.field("_source", source.fetchSource()); } else if (includes.length > 0 && excludes.length == 0) { builder.array("_source", source.includes()); } else if (excludes.length > 0) { builder.startObject("_source"); if (includes.length > 0) { builder.array("includes", source.includes()); } builder.array("excludes", source.excludes()); builder.endObject(); } } if (this.version() != Versions.MATCH_ANY) { builder.field("_version", this.version()); } if (this.versionType() != VersionType.INTERNAL) { builder.field("_version_type", this.versionType().toString().toLowerCase(Locale.ROOT)); } return builder.endObject(); } } private final String[] fields; private String likeText; private List<String> ids = new ArrayList<>(); private List<Item> docs = new ArrayList<>(); private Boolean include = null; private String minimumShouldMatch = null; private int minTermFreq = -1; private int maxQueryTerms = -1; private String[] stopWords = null; private int minDocFreq = -1; private int maxDocFreq = -1; private int minWordLength = -1; private int maxWordLength = -1; private float boostTerms = -1; private float boost = -1; private String analyzer; private Boolean failOnUnsupportedField; private String queryName; /** * Constructs a new more like this query which uses the "_all" field. */ public MoreLikeThisQueryBuilder() { this.fields = null; } /** * Sets the field names that will be used when generating the 'More Like This' query. * * @param fields the field names that will be used when generating the 'More Like This' query. */ public MoreLikeThisQueryBuilder(String... fields) { this.fields = fields; } /** * The text to use in order to find documents that are "like" this. */ public MoreLikeThisQueryBuilder likeText(String likeText) { this.likeText = likeText; return this; } public MoreLikeThisQueryBuilder ids(String... ids) { this.ids = Arrays.asList(ids); return this; } public MoreLikeThisQueryBuilder docs(Item... docs) { this.docs = Arrays.asList(docs); return this; } public MoreLikeThisQueryBuilder addItem(Item item) { this.docs.add(item); return this; } public MoreLikeThisQueryBuilder include(boolean include) { this.include = include; return this; } /** * Number of terms that must match the generated query expressed in the * common syntax for minimum should match. Defaults to <tt>30%</tt>. * * @see org.elasticsearch.common.lucene.search.Queries#calculateMinShouldMatch(int, String) */ public MoreLikeThisQueryBuilder minimumShouldMatch(String minimumShouldMatch) { this.minimumShouldMatch = minimumShouldMatch; return this; } /** * The percentage of terms to match. Defaults to <tt>0.3</tt>. */ @Deprecated public MoreLikeThisQueryBuilder percentTermsToMatch(float percentTermsToMatch) { return minimumShouldMatch(Math.round(percentTermsToMatch * 100) + "%"); } /** * The frequency below which terms will be ignored in the source doc. The default * frequency is <tt>2</tt>. */ public MoreLikeThisQueryBuilder minTermFreq(int minTermFreq) { this.minTermFreq = minTermFreq; return this; } /** * Sets the maximum number of query terms that will be included in any generated query. * Defaults to <tt>25</tt>. */ public MoreLikeThisQueryBuilder maxQueryTerms(int maxQueryTerms) { this.maxQueryTerms = maxQueryTerms; return this; } /** * Set the set of stopwords. * <p/> * <p>Any word in this set is considered "uninteresting" and ignored. Even if your Analyzer allows stopwords, you * might want to tell the MoreLikeThis code to ignore them, as for the purposes of document similarity it seems * reasonable to assume that "a stop word is never interesting". */ public MoreLikeThisQueryBuilder stopWords(String... stopWords) { this.stopWords = stopWords; return this; } /** * Sets the frequency at which words will be ignored which do not occur in at least this * many docs. Defaults to <tt>5</tt>. */ public MoreLikeThisQueryBuilder minDocFreq(int minDocFreq) { this.minDocFreq = minDocFreq; return this; } /** * Set the maximum frequency in which words may still appear. Words that appear * in more than this many docs will be ignored. Defaults to unbounded. */ public MoreLikeThisQueryBuilder maxDocFreq(int maxDocFreq) { this.maxDocFreq = maxDocFreq; return this; } /** * Sets the minimum word length below which words will be ignored. Defaults * to <tt>0</tt>. */ public MoreLikeThisQueryBuilder minWordLength(int minWordLength) { this.minWordLength = minWordLength; return this; } /** * Sets the maximum word length above which words will be ignored. Defaults to * unbounded (<tt>0</tt>). */ public MoreLikeThisQueryBuilder maxWordLength(int maxWordLength) { this.maxWordLength = maxWordLength; return this; } /** * Sets the boost factor to use when boosting terms. Defaults to <tt>1</tt>. */ public MoreLikeThisQueryBuilder boostTerms(float boostTerms) { this.boostTerms = boostTerms; return this; } /** * The analyzer that will be used to analyze the text. Defaults to the analyzer associated with the fied. */ public MoreLikeThisQueryBuilder analyzer(String analyzer) { this.analyzer = analyzer; return this; } public MoreLikeThisQueryBuilder boost(float boost) { this.boost = boost; return this; } /** * Whether to fail or return no result when this query is run against a field which is not supported such as binary/numeric fields. */ public MoreLikeThisQueryBuilder failOnUnsupportedField(boolean fail) { failOnUnsupportedField = fail; return this; } /** * Sets the query name for the filter that can be used when searching for matched_filters per hit. */ public MoreLikeThisQueryBuilder queryName(String queryName) { this.queryName = queryName; return this; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(MoreLikeThisQueryParser.NAME); if (fields != null) { builder.startArray("fields"); for (String field : fields) { builder.value(field); } builder.endArray(); } if (likeText == null && this.docs.isEmpty() && this.ids.isEmpty()) { throw new ElasticsearchIllegalArgumentException("more_like_this requires either '"+ MoreLikeThisQueryParser.Fields.LIKE_TEXT.getPreferredName() +"' or 'docs/ids' to be provided"); } builder.field(MoreLikeThisQueryParser.Fields.LIKE_TEXT.getPreferredName(), likeText); if (minimumShouldMatch != null) { builder.field(MoreLikeThisQueryParser.Fields.MINIMUM_SHOULD_MATCH.getPreferredName(), minimumShouldMatch); } if (minTermFreq != -1) { builder.field(MoreLikeThisQueryParser.Fields.MIN_TERM_FREQ.getPreferredName(), minTermFreq); } if (maxQueryTerms != -1) { builder.field(MoreLikeThisQueryParser.Fields.MAX_QUERY_TERMS.getPreferredName(), maxQueryTerms); } if (stopWords != null && stopWords.length > 0) { builder.startArray(MoreLikeThisQueryParser.Fields.STOP_WORDS.getPreferredName()); for (String stopWord : stopWords) { builder.value(stopWord); } builder.endArray(); } if (minDocFreq != -1) { builder.field(MoreLikeThisQueryParser.Fields.MIN_DOC_FREQ.getPreferredName(), minDocFreq); } if (maxDocFreq != -1) { builder.field(MoreLikeThisQueryParser.Fields.MAX_DOC_FREQ.getPreferredName(), maxDocFreq); } if (minWordLength != -1) { builder.field(MoreLikeThisQueryParser.Fields.MIN_WORD_LENGTH.getPreferredName(), minWordLength); } if (maxWordLength != -1) { builder.field(MoreLikeThisQueryParser.Fields.MAX_WORD_LENGTH.getPreferredName(), maxWordLength); } if (boostTerms != -1) { builder.field(MoreLikeThisQueryParser.Fields.BOOST_TERMS.getPreferredName(), boostTerms); } if (boost != -1) { builder.field("boost", boost); } if (analyzer != null) { builder.field("analyzer", analyzer); } if (failOnUnsupportedField != null) { builder.field(MoreLikeThisQueryParser.Fields.FAIL_ON_UNSUPPORTED_FIELD.getPreferredName(), failOnUnsupportedField); } if (queryName != null) { builder.field("_name", queryName); } if (!ids.isEmpty()) { builder.array("ids", ids.toArray()); } if (!docs.isEmpty()) { builder.array("docs", docs.toArray()); } if (include != null) { builder.field("include", include); } builder.endObject(); } }
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package sg.atom.core.datastructure.collection; import java.util.Iterator; import java.util.NoSuchElementException; import sg.atom.corex.math.MathUtils; /** An unordered map where the values are floats. This implementation is a cuckoo hash map using 3 hashes, random walking, and a * small stash for problematic keys. Null keys are not allowed. No allocation is done except when growing the table size. <br> * <br> * This map performs very fast get, containsKey, and remove (typically O(1), worst case O(log(n))). Put may be a bit slower, * depending on hash collisions. Load factors greater than 0.91 greatly increase the chances the map will have to rehash to the * next higher POT size. * @author Nathan Sweet */ public class ObjectFloatMap<K> implements Iterable<ObjectFloatMap.Entry<K>> { private static final int PRIME1 = 0xbe1f14b1; private static final int PRIME2 = 0xb4b82e39; private static final int PRIME3 = 0xced1c241; public int size; K[] keyTable; float[] valueTable; int capacity, stashSize; private float loadFactor; private int hashShift, mask, threshold; private int stashCapacity; private int pushIterations; private Entries entries1, entries2; private Values values1, values2; private Keys keys1, keys2; /** Creates a new map with an initial capacity of 32 and a load factor of 0.8. This map will hold 25 items before growing the * backing table. */ public ObjectFloatMap () { this(32, 0.8f); } /** Creates a new map with a load factor of 0.8. This map will hold initialCapacity * 0.8 items before growing the backing * table. */ public ObjectFloatMap (int initialCapacity) { this(initialCapacity, 0.8f); } /** Creates a new map with the specified initial capacity and load factor. This map will hold initialCapacity * loadFactor items * before growing the backing table. */ public ObjectFloatMap (int initialCapacity, float loadFactor) { if (initialCapacity < 0) throw new IllegalArgumentException("initialCapacity must be >= 0: " + initialCapacity); if (initialCapacity > 1 << 30) throw new IllegalArgumentException("initialCapacity is too large: " + initialCapacity); capacity = MathUtils.nextPowerOfTwo(initialCapacity); if (loadFactor <= 0) throw new IllegalArgumentException("loadFactor must be > 0: " + loadFactor); this.loadFactor = loadFactor; threshold = (int)(capacity * loadFactor); mask = capacity - 1; hashShift = 31 - Integer.numberOfTrailingZeros(capacity); stashCapacity = Math.max(3, (int)Math.ceil(Math.log(capacity)) * 2); pushIterations = Math.max(Math.min(capacity, 8), (int)Math.sqrt(capacity) / 8); keyTable = (K[])new Object[capacity + stashCapacity]; valueTable = new float[keyTable.length]; } /** Creates a new map identical to the specified map. */ public ObjectFloatMap (ObjectFloatMap<? extends K> map) { this(map.capacity, map.loadFactor); stashSize = map.stashSize; System.arraycopy(map.keyTable, 0, keyTable, 0, map.keyTable.length); System.arraycopy(map.valueTable, 0, valueTable, 0, map.valueTable.length); size = map.size; } public void put (K key, float value) { if (key == null) throw new IllegalArgumentException("key cannot be null."); K[] keyTable = this.keyTable; // Check for existing keys. int hashCode = key.hashCode(); int index1 = hashCode & mask; K key1 = keyTable[index1]; if (key.equals(key1)) { valueTable[index1] = value; return; } int index2 = hash2(hashCode); K key2 = keyTable[index2]; if (key.equals(key2)) { valueTable[index2] = value; return; } int index3 = hash3(hashCode); K key3 = keyTable[index3]; if (key.equals(key3)) { valueTable[index3] = value; return; } // Update key in the stash. for (int i = capacity, n = i + stashSize; i < n; i++) { if (key.equals(keyTable[i])) { valueTable[i] = value; return; } } // Check for empty buckets. if (key1 == null) { keyTable[index1] = key; valueTable[index1] = value; if (size++ >= threshold) resize(capacity << 1); return; } if (key2 == null) { keyTable[index2] = key; valueTable[index2] = value; if (size++ >= threshold) resize(capacity << 1); return; } if (key3 == null) { keyTable[index3] = key; valueTable[index3] = value; if (size++ >= threshold) resize(capacity << 1); return; } push(key, value, index1, key1, index2, key2, index3, key3); } public void putAll (ObjectFloatMap<K> map) { for (Entry<K> entry : map.entries()) put(entry.key, entry.value); } /** Skips checks for existing keys. */ private void putResize (K key, float value) { // Check for empty buckets. int hashCode = key.hashCode(); int index1 = hashCode & mask; K key1 = keyTable[index1]; if (key1 == null) { keyTable[index1] = key; valueTable[index1] = value; if (size++ >= threshold) resize(capacity << 1); return; } int index2 = hash2(hashCode); K key2 = keyTable[index2]; if (key2 == null) { keyTable[index2] = key; valueTable[index2] = value; if (size++ >= threshold) resize(capacity << 1); return; } int index3 = hash3(hashCode); K key3 = keyTable[index3]; if (key3 == null) { keyTable[index3] = key; valueTable[index3] = value; if (size++ >= threshold) resize(capacity << 1); return; } push(key, value, index1, key1, index2, key2, index3, key3); } private void push (K insertKey, float insertValue, int index1, K key1, int index2, K key2, int index3, K key3) { K[] keyTable = this.keyTable; float[] valueTable = this.valueTable; int mask = this.mask; // Push keys until an empty bucket is found. K evictedKey; float evictedValue; int i = 0, pushIterations = this.pushIterations; do { // Replace the key and value for one of the hashes. switch (MathUtils.random(2)) { case 0: evictedKey = key1; evictedValue = valueTable[index1]; keyTable[index1] = insertKey; valueTable[index1] = insertValue; break; case 1: evictedKey = key2; evictedValue = valueTable[index2]; keyTable[index2] = insertKey; valueTable[index2] = insertValue; break; default: evictedKey = key3; evictedValue = valueTable[index3]; keyTable[index3] = insertKey; valueTable[index3] = insertValue; break; } // If the evicted key hashes to an empty bucket, put it there and stop. int hashCode = evictedKey.hashCode(); index1 = hashCode & mask; key1 = keyTable[index1]; if (key1 == null) { keyTable[index1] = evictedKey; valueTable[index1] = evictedValue; if (size++ >= threshold) resize(capacity << 1); return; } index2 = hash2(hashCode); key2 = keyTable[index2]; if (key2 == null) { keyTable[index2] = evictedKey; valueTable[index2] = evictedValue; if (size++ >= threshold) resize(capacity << 1); return; } index3 = hash3(hashCode); key3 = keyTable[index3]; if (key3 == null) { keyTable[index3] = evictedKey; valueTable[index3] = evictedValue; if (size++ >= threshold) resize(capacity << 1); return; } if (++i == pushIterations) break; insertKey = evictedKey; insertValue = evictedValue; } while (true); putStash(evictedKey, evictedValue); } private void putStash (K key, float value) { if (stashSize == stashCapacity) { // Too many pushes occurred and the stash is full, increase the table size. resize(capacity << 1); put(key, value); return; } // Store key in the stash. int index = capacity + stashSize; keyTable[index] = key; valueTable[index] = value; stashSize++; size++; } /** @param defaultValue Returned if the key was not associated with a value. */ public float get (K key, float defaultValue) { int hashCode = key.hashCode(); int index = hashCode & mask; if (!key.equals(keyTable[index])) { index = hash2(hashCode); if (!key.equals(keyTable[index])) { index = hash3(hashCode); if (!key.equals(keyTable[index])) return getStash(key, defaultValue); } } return valueTable[index]; } private float getStash (K key, float defaultValue) { K[] keyTable = this.keyTable; for (int i = capacity, n = i + stashSize; i < n; i++) if (key.equals(keyTable[i])) return valueTable[i]; return defaultValue; } /** Returns the key's current value and increments the stored value. If the key is not in the map, defaultValue + increment is * put into the map. */ public float getAndIncrement (K key, float defaultValue, float increment) { int hashCode = key.hashCode(); int index = hashCode & mask; if (!key.equals(keyTable[index])) { index = hash2(hashCode); if (!key.equals(keyTable[index])) { index = hash3(hashCode); if (!key.equals(keyTable[index])) return getAndIncrementStash(key, defaultValue, increment); } } float value = valueTable[index]; valueTable[index] = value + increment; return value; } private float getAndIncrementStash (K key, float defaultValue, float increment) { K[] keyTable = this.keyTable; for (int i = capacity, n = i + stashSize; i < n; i++) if (key.equals(keyTable[i])) { float value = valueTable[i]; valueTable[i] = value + increment; return value; } put(key, defaultValue + increment); return defaultValue; } public float remove (K key, float defaultValue) { int hashCode = key.hashCode(); int index = hashCode & mask; if (key.equals(keyTable[index])) { keyTable[index] = null; float oldValue = valueTable[index]; size--; return oldValue; } index = hash2(hashCode); if (key.equals(keyTable[index])) { keyTable[index] = null; float oldValue = valueTable[index]; size--; return oldValue; } index = hash3(hashCode); if (key.equals(keyTable[index])) { keyTable[index] = null; float oldValue = valueTable[index]; size--; return oldValue; } return removeStash(key, defaultValue); } float removeStash (K key, float defaultValue) { K[] keyTable = this.keyTable; for (int i = capacity, n = i + stashSize; i < n; i++) { if (key.equals(keyTable[i])) { float oldValue = valueTable[i]; removeStashIndex(i); size--; return oldValue; } } return defaultValue; } void removeStashIndex (int index) { // If the removed location was not last, move the last tuple to the removed location. stashSize--; int lastIndex = capacity + stashSize; if (index < lastIndex) { keyTable[index] = keyTable[lastIndex]; valueTable[index] = valueTable[lastIndex]; } } /** Reduces the size of the backing arrays to be the specified capacity or less. If the capacity is already less, nothing is * done. If the map contains more items than the specified capacity, the next highest power of two capacity is used instead. */ public void shrink (int maximumCapacity) { if (maximumCapacity < 0) throw new IllegalArgumentException("maximumCapacity must be >= 0: " + maximumCapacity); if (size > maximumCapacity) maximumCapacity = size; if (capacity <= maximumCapacity) return; maximumCapacity = MathUtils.nextPowerOfTwo(maximumCapacity); resize(maximumCapacity); } /** Clears the map and reduces the size of the backing arrays to be the specified capacity if they are larger. */ public void clear (int maximumCapacity) { if (capacity <= maximumCapacity) { clear(); return; } size = 0; resize(maximumCapacity); } public void clear () { if (size == 0) return; K[] keyTable = this.keyTable; for (int i = capacity + stashSize; i-- > 0;) keyTable[i] = null; size = 0; stashSize = 0; } /** Returns true if the specified value is in the map. Note this traverses the entire map and compares every value, which may be * an expensive operation. */ public boolean containsValue (float value) { float[] valueTable = this.valueTable; for (int i = capacity + stashSize; i-- > 0;) if (valueTable[i] == value) return true; return false; } public boolean containsKey (K key) { int hashCode = key.hashCode(); int index = hashCode & mask; if (!key.equals(keyTable[index])) { index = hash2(hashCode); if (!key.equals(keyTable[index])) { index = hash3(hashCode); if (!key.equals(keyTable[index])) return containsKeyStash(key); } } return true; } private boolean containsKeyStash (K key) { K[] keyTable = this.keyTable; for (int i = capacity, n = i + stashSize; i < n; i++) if (key.equals(keyTable[i])) return true; return false; } /** Returns the key for the specified value, or null if it is not in the map. Note this traverses the entire map and compares * every value, which may be an expensive operation. */ public K findKey (float value) { float[] valueTable = this.valueTable; for (int i = capacity + stashSize; i-- > 0;) if (valueTable[i] == value) return keyTable[i]; return null; } /** Increases the size of the backing array to accommodate the specified number of additional items. Useful before adding many * items to avoid multiple backing array resizes. */ public void ensureCapacity (int additionalCapacity) { int sizeNeeded = size + additionalCapacity; if (sizeNeeded >= threshold) resize(MathUtils.nextPowerOfTwo((int)(sizeNeeded / loadFactor))); } private void resize (int newSize) { int oldEndIndex = capacity + stashSize; capacity = newSize; threshold = (int)(newSize * loadFactor); mask = newSize - 1; hashShift = 31 - Integer.numberOfTrailingZeros(newSize); stashCapacity = Math.max(3, (int)Math.ceil(Math.log(newSize)) * 2); pushIterations = Math.max(Math.min(newSize, 8), (int)Math.sqrt(newSize) / 8); K[] oldKeyTable = keyTable; float[] oldValueTable = valueTable; keyTable = (K[])new Object[newSize + stashCapacity]; valueTable = new float[newSize + stashCapacity]; int oldSize = size; size = 0; stashSize = 0; if (oldSize > 0) { for (int i = 0; i < oldEndIndex; i++) { K key = oldKeyTable[i]; if (key != null) putResize(key, oldValueTable[i]); } } } private int hash2 (int h) { h *= PRIME2; return (h ^ h >>> hashShift) & mask; } private int hash3 (int h) { h *= PRIME3; return (h ^ h >>> hashShift) & mask; } public String toString () { if (size == 0) return "{}"; StringBuilder buffer = new StringBuilder(32); buffer.append('{'); K[] keyTable = this.keyTable; float[] valueTable = this.valueTable; int i = keyTable.length; while (i-- > 0) { K key = keyTable[i]; if (key == null) continue; buffer.append(key); buffer.append('='); buffer.append(valueTable[i]); break; } while (i-- > 0) { K key = keyTable[i]; if (key == null) continue; buffer.append(", "); buffer.append(key); buffer.append('='); buffer.append(valueTable[i]); } buffer.append('}'); return buffer.toString(); } public Entries<K> iterator () { return entries(); } /** Returns an iterator for the entries in the map. Remove is supported. Note that the same iterator instance is returned each * time this method is called. Use the {@link Entries} constructor for nested or multithreaded iteration. */ public Entries<K> entries () { if (entries1 == null) { entries1 = new Entries(this); entries2 = new Entries(this); } if (!entries1.valid) { entries1.reset(); entries1.valid = true; entries2.valid = false; return entries1; } entries2.reset(); entries2.valid = true; entries1.valid = false; return entries2; } /** Returns an iterator for the values in the map. Remove is supported. Note that the same iterator instance is returned each * time this method is called. Use the {@link Entries} constructor for nested or multithreaded iteration. */ public Values values () { if (values1 == null) { values1 = new Values(this); values2 = new Values(this); } if (!values1.valid) { values1.reset(); values1.valid = true; values2.valid = false; return values1; } values2.reset(); values2.valid = true; values1.valid = false; return values2; } /** Returns an iterator for the keys in the map. Remove is supported. Note that the same iterator instance is returned each time * this method is called. Use the {@link Entries} constructor for nested or multithreaded iteration. */ public Keys<K> keys () { if (keys1 == null) { keys1 = new Keys(this); keys2 = new Keys(this); } if (!keys1.valid) { keys1.reset(); keys1.valid = true; keys2.valid = false; return keys1; } keys2.reset(); keys2.valid = true; keys1.valid = false; return keys2; } static public class Entry<K> { public K key; public float value; public String toString () { return key + "=" + value; } } static private class MapIterator<K> { public boolean hasNext; final ObjectFloatMap<K> map; int nextIndex, currentIndex; boolean valid = true; public MapIterator (ObjectFloatMap<K> map) { this.map = map; reset(); } public void reset () { currentIndex = -1; nextIndex = -1; findNextIndex(); } void findNextIndex () { hasNext = false; K[] keyTable = map.keyTable; for (int n = map.capacity + map.stashSize; ++nextIndex < n;) { if (keyTable[nextIndex] != null) { hasNext = true; break; } } } public void remove () { if (currentIndex < 0) throw new IllegalStateException("next must be called before remove."); if (currentIndex >= map.capacity) { map.removeStashIndex(currentIndex); nextIndex = currentIndex - 1; findNextIndex(); } else { map.keyTable[currentIndex] = null; } currentIndex = -1; map.size--; } } static public class Entries<K> extends MapIterator<K> implements Iterable<Entry<K>>, Iterator<Entry<K>> { private Entry<K> entry = new Entry(); public Entries (ObjectFloatMap<K> map) { super(map); } /** Note the same entry instance is returned each time this method is called. */ public Entry<K> next () { if (!hasNext) throw new NoSuchElementException(); if (!valid) throw new RuntimeException("#iterator() cannot be used nested."); K[] keyTable = map.keyTable; entry.key = keyTable[nextIndex]; entry.value = map.valueTable[nextIndex]; currentIndex = nextIndex; findNextIndex(); return entry; } public boolean hasNext () { if (!valid) throw new RuntimeException("#iterator() cannot be used nested."); return hasNext; } public Entries<K> iterator () { return this; } public void remove () { super.remove(); } } static public class Values extends MapIterator<Object> { public Values (ObjectFloatMap<?> map) { super((ObjectFloatMap<Object>)map); } public boolean hasNext () { if (!valid) throw new RuntimeException("#iterator() cannot be used nested."); return hasNext; } public float next () { if (!hasNext) throw new NoSuchElementException(); if (!valid) throw new RuntimeException("#iterator() cannot be used nested."); float value = map.valueTable[nextIndex]; currentIndex = nextIndex; findNextIndex(); return value; } /** Returns a new array containing the remaining values. */ public FloatArray toArray () { FloatArray array = new FloatArray(true, map.size); while (hasNext) array.add(next()); return array; } } static public class Keys<K> extends MapIterator<K> implements Iterable<K>, Iterator<K> { public Keys (ObjectFloatMap<K> map) { super((ObjectFloatMap<K>)map); } public boolean hasNext () { if (!valid) throw new RuntimeException("#iterator() cannot be used nested."); return hasNext; } public K next () { if (!hasNext) throw new NoSuchElementException(); if (!valid) throw new RuntimeException("#iterator() cannot be used nested."); K key = map.keyTable[nextIndex]; currentIndex = nextIndex; findNextIndex(); return key; } public Keys<K> iterator () { return this; } /** Returns a new array containing the remaining keys. */ public Array<K> toArray () { Array array = new Array(true, map.size); while (hasNext) array.add(next()); return array; } /** Adds the remaining keys to the array. */ public Array<K> toArray (Array<K> array) { while (hasNext) array.add(next()); return array; } public void remove () { super.remove(); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.run.v1alpha1.model; /** * A single application container. This specifies both the container to run, the command to run in * the container and the arguments to supply to it. Note that additional arguments may be supplied * by the system to the container at runtime. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Cloud Run Admin API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Container extends com.google.api.client.json.GenericJson { /** * Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable * references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be * resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax can be * escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, * regardless of whether the variable exists or not. Cannot be updated. More info: * https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument- * container/#running-a-command-in-a-shell +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> args; /** * Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is * not provided. Variable references $(VAR_NAME) are expanded using the container's environment. * If a variable cannot be resolved, the reference in the input string will be unchanged. The * $(VAR_NAME) syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will * never be expanded, regardless of whether the variable exists or not. Cannot be updated. More * info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument- * container/#running-a-command-in-a-shell +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> command; /** * List of environment variables to set in the container. Cannot be updated. +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<EnvVar> env; /** * List of sources to populate environment variables in the container. The keys defined within a * source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container * is starting. When a key exists in multiple sources, the value associated with the last source * will take precedence. Values defined by an Env with a duplicate key will take precedence. * Cannot be updated. +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<EnvFromSource> envFrom; /** * Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String image; /** * Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is * specified, or IfNotPresent otherwise. Cannot be updated. More info: * https://kubernetes.io/docs/concepts/containers/images#updating-images +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String imagePullPolicy; /** * Actions that the management system should take in response to container lifecycle events. * Cannot be updated. +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private Lifecycle lifecycle; /** * Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be * updated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container- * probes +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private Probe livenessProbe; /** * Name of the container specified as a DNS_LABEL. Each container must have a unique name * (DNS_LABEL). Cannot be updated. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * List of ports to expose from the container. Exposing a port here gives the system additional * information about the network connections a container uses, but is primarily informational. Not * specifying a port here DOES NOT prevent that port from being exposed. Any port which is * listening on the default "0.0.0.0" address inside a container will be accessible from the * network. Cannot be updated. +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<ContainerPort> ports; /** * Periodic probe of container service readiness. Container will be removed from service endpoints * if the probe fails. Cannot be updated. More info: * https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private Probe readinessProbe; /** * Compute Resources required by this container. Cannot be updated. More info: * https://kubernetes.io/docs/concepts/storage/persistent-volumes#resources +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private ResourceRequirements resources; /** * Security options the pod should run with. More info: https://kubernetes.io/docs/concepts/policy * /security-context/ More info: https://kubernetes.io/docs/tasks/configure-pod-container * /security-context/ +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private SecurityContext securityContext; /** * Whether this container should allocate a buffer for stdin in the container runtime. If this is * not set, reads from stdin in the container will always result in EOF. Default is false. * +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean stdin; /** * Whether the container runtime should close the stdin channel after it has been opened by a * single attach. When stdin is true the stdin stream will remain open across multiple attach * sessions. If stdinOnce is set to true, stdin is opened on container start, is empty until the * first client attaches to stdin, and then remains open and accepts data until the client * disconnects, at which time stdin is closed and remains closed until the container is restarted. * If this flag is false, a container processes that reads from stdin will never receive an EOF. * Default is false +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean stdinOnce; /** * Optional: Path at which the file to which the container's termination message will be written * is mounted into the container's filesystem. Message written is intended to be brief final * status, such as an assertion failure message. Will be truncated by the node if greater than * 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to * /dev/termination-log. Cannot be updated. +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String terminationMessagePath; /** * Indicate how the termination message should be populated. File will use the contents of * terminationMessagePath to populate the container status message on both success and failure. * FallbackToLogsOnError will use the last chunk of container log output if the termination * message file is empty and the container exited with an error. The log output is limited to 2048 * bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated. +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String terminationMessagePolicy; /** * Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. * Default is false. +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean tty; /** * volumeDevices is the list of block devices to be used by the container. This is an alpha * feature and may change in the future. +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<VolumeDevice> volumeDevices; /** * Pod volumes to mount into the container's filesystem. Cannot be updated. +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<VolumeMount> volumeMounts; /** * Container's working directory. If not specified, the container runtime's default will be used, * which might be configured in the container image. Cannot be updated. +optional * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String workingDir; /** * Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable * references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be * resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax can be * escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, * regardless of whether the variable exists or not. Cannot be updated. More info: * https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument- * container/#running-a-command-in-a-shell +optional * @return value or {@code null} for none */ public java.util.List<java.lang.String> getArgs() { return args; } /** * Arguments to the entrypoint. The docker image's CMD is used if this is not provided. Variable * references $(VAR_NAME) are expanded using the container's environment. If a variable cannot be * resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax can be * escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, * regardless of whether the variable exists or not. Cannot be updated. More info: * https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument- * container/#running-a-command-in-a-shell +optional * @param args args or {@code null} for none */ public Container setArgs(java.util.List<java.lang.String> args) { this.args = args; return this; } /** * Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is * not provided. Variable references $(VAR_NAME) are expanded using the container's environment. * If a variable cannot be resolved, the reference in the input string will be unchanged. The * $(VAR_NAME) syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will * never be expanded, regardless of whether the variable exists or not. Cannot be updated. More * info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument- * container/#running-a-command-in-a-shell +optional * @return value or {@code null} for none */ public java.util.List<java.lang.String> getCommand() { return command; } /** * Entrypoint array. Not executed within a shell. The docker image's ENTRYPOINT is used if this is * not provided. Variable references $(VAR_NAME) are expanded using the container's environment. * If a variable cannot be resolved, the reference in the input string will be unchanged. The * $(VAR_NAME) syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will * never be expanded, regardless of whether the variable exists or not. Cannot be updated. More * info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument- * container/#running-a-command-in-a-shell +optional * @param command command or {@code null} for none */ public Container setCommand(java.util.List<java.lang.String> command) { this.command = command; return this; } /** * List of environment variables to set in the container. Cannot be updated. +optional * @return value or {@code null} for none */ public java.util.List<EnvVar> getEnv() { return env; } /** * List of environment variables to set in the container. Cannot be updated. +optional * @param env env or {@code null} for none */ public Container setEnv(java.util.List<EnvVar> env) { this.env = env; return this; } /** * List of sources to populate environment variables in the container. The keys defined within a * source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container * is starting. When a key exists in multiple sources, the value associated with the last source * will take precedence. Values defined by an Env with a duplicate key will take precedence. * Cannot be updated. +optional * @return value or {@code null} for none */ public java.util.List<EnvFromSource> getEnvFrom() { return envFrom; } /** * List of sources to populate environment variables in the container. The keys defined within a * source must be a C_IDENTIFIER. All invalid keys will be reported as an event when the container * is starting. When a key exists in multiple sources, the value associated with the last source * will take precedence. Values defined by an Env with a duplicate key will take precedence. * Cannot be updated. +optional * @param envFrom envFrom or {@code null} for none */ public Container setEnvFrom(java.util.List<EnvFromSource> envFrom) { this.envFrom = envFrom; return this; } /** * Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images * @return value or {@code null} for none */ public java.lang.String getImage() { return image; } /** * Docker image name. More info: https://kubernetes.io/docs/concepts/containers/images * @param image image or {@code null} for none */ public Container setImage(java.lang.String image) { this.image = image; return this; } /** * Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is * specified, or IfNotPresent otherwise. Cannot be updated. More info: * https://kubernetes.io/docs/concepts/containers/images#updating-images +optional * @return value or {@code null} for none */ public java.lang.String getImagePullPolicy() { return imagePullPolicy; } /** * Image pull policy. One of Always, Never, IfNotPresent. Defaults to Always if :latest tag is * specified, or IfNotPresent otherwise. Cannot be updated. More info: * https://kubernetes.io/docs/concepts/containers/images#updating-images +optional * @param imagePullPolicy imagePullPolicy or {@code null} for none */ public Container setImagePullPolicy(java.lang.String imagePullPolicy) { this.imagePullPolicy = imagePullPolicy; return this; } /** * Actions that the management system should take in response to container lifecycle events. * Cannot be updated. +optional * @return value or {@code null} for none */ public Lifecycle getLifecycle() { return lifecycle; } /** * Actions that the management system should take in response to container lifecycle events. * Cannot be updated. +optional * @param lifecycle lifecycle or {@code null} for none */ public Container setLifecycle(Lifecycle lifecycle) { this.lifecycle = lifecycle; return this; } /** * Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be * updated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container- * probes +optional * @return value or {@code null} for none */ public Probe getLivenessProbe() { return livenessProbe; } /** * Periodic probe of container liveness. Container will be restarted if the probe fails. Cannot be * updated. More info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container- * probes +optional * @param livenessProbe livenessProbe or {@code null} for none */ public Container setLivenessProbe(Probe livenessProbe) { this.livenessProbe = livenessProbe; return this; } /** * Name of the container specified as a DNS_LABEL. Each container must have a unique name * (DNS_LABEL). Cannot be updated. * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * Name of the container specified as a DNS_LABEL. Each container must have a unique name * (DNS_LABEL). Cannot be updated. * @param name name or {@code null} for none */ public Container setName(java.lang.String name) { this.name = name; return this; } /** * List of ports to expose from the container. Exposing a port here gives the system additional * information about the network connections a container uses, but is primarily informational. Not * specifying a port here DOES NOT prevent that port from being exposed. Any port which is * listening on the default "0.0.0.0" address inside a container will be accessible from the * network. Cannot be updated. +optional * @return value or {@code null} for none */ public java.util.List<ContainerPort> getPorts() { return ports; } /** * List of ports to expose from the container. Exposing a port here gives the system additional * information about the network connections a container uses, but is primarily informational. Not * specifying a port here DOES NOT prevent that port from being exposed. Any port which is * listening on the default "0.0.0.0" address inside a container will be accessible from the * network. Cannot be updated. +optional * @param ports ports or {@code null} for none */ public Container setPorts(java.util.List<ContainerPort> ports) { this.ports = ports; return this; } /** * Periodic probe of container service readiness. Container will be removed from service endpoints * if the probe fails. Cannot be updated. More info: * https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes +optional * @return value or {@code null} for none */ public Probe getReadinessProbe() { return readinessProbe; } /** * Periodic probe of container service readiness. Container will be removed from service endpoints * if the probe fails. Cannot be updated. More info: * https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes +optional * @param readinessProbe readinessProbe or {@code null} for none */ public Container setReadinessProbe(Probe readinessProbe) { this.readinessProbe = readinessProbe; return this; } /** * Compute Resources required by this container. Cannot be updated. More info: * https://kubernetes.io/docs/concepts/storage/persistent-volumes#resources +optional * @return value or {@code null} for none */ public ResourceRequirements getResources() { return resources; } /** * Compute Resources required by this container. Cannot be updated. More info: * https://kubernetes.io/docs/concepts/storage/persistent-volumes#resources +optional * @param resources resources or {@code null} for none */ public Container setResources(ResourceRequirements resources) { this.resources = resources; return this; } /** * Security options the pod should run with. More info: https://kubernetes.io/docs/concepts/policy * /security-context/ More info: https://kubernetes.io/docs/tasks/configure-pod-container * /security-context/ +optional * @return value or {@code null} for none */ public SecurityContext getSecurityContext() { return securityContext; } /** * Security options the pod should run with. More info: https://kubernetes.io/docs/concepts/policy * /security-context/ More info: https://kubernetes.io/docs/tasks/configure-pod-container * /security-context/ +optional * @param securityContext securityContext or {@code null} for none */ public Container setSecurityContext(SecurityContext securityContext) { this.securityContext = securityContext; return this; } /** * Whether this container should allocate a buffer for stdin in the container runtime. If this is * not set, reads from stdin in the container will always result in EOF. Default is false. * +optional * @return value or {@code null} for none */ public java.lang.Boolean getStdin() { return stdin; } /** * Whether this container should allocate a buffer for stdin in the container runtime. If this is * not set, reads from stdin in the container will always result in EOF. Default is false. * +optional * @param stdin stdin or {@code null} for none */ public Container setStdin(java.lang.Boolean stdin) { this.stdin = stdin; return this; } /** * Whether the container runtime should close the stdin channel after it has been opened by a * single attach. When stdin is true the stdin stream will remain open across multiple attach * sessions. If stdinOnce is set to true, stdin is opened on container start, is empty until the * first client attaches to stdin, and then remains open and accepts data until the client * disconnects, at which time stdin is closed and remains closed until the container is restarted. * If this flag is false, a container processes that reads from stdin will never receive an EOF. * Default is false +optional * @return value or {@code null} for none */ public java.lang.Boolean getStdinOnce() { return stdinOnce; } /** * Whether the container runtime should close the stdin channel after it has been opened by a * single attach. When stdin is true the stdin stream will remain open across multiple attach * sessions. If stdinOnce is set to true, stdin is opened on container start, is empty until the * first client attaches to stdin, and then remains open and accepts data until the client * disconnects, at which time stdin is closed and remains closed until the container is restarted. * If this flag is false, a container processes that reads from stdin will never receive an EOF. * Default is false +optional * @param stdinOnce stdinOnce or {@code null} for none */ public Container setStdinOnce(java.lang.Boolean stdinOnce) { this.stdinOnce = stdinOnce; return this; } /** * Optional: Path at which the file to which the container's termination message will be written * is mounted into the container's filesystem. Message written is intended to be brief final * status, such as an assertion failure message. Will be truncated by the node if greater than * 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to * /dev/termination-log. Cannot be updated. +optional * @return value or {@code null} for none */ public java.lang.String getTerminationMessagePath() { return terminationMessagePath; } /** * Optional: Path at which the file to which the container's termination message will be written * is mounted into the container's filesystem. Message written is intended to be brief final * status, such as an assertion failure message. Will be truncated by the node if greater than * 4096 bytes. The total message length across all containers will be limited to 12kb. Defaults to * /dev/termination-log. Cannot be updated. +optional * @param terminationMessagePath terminationMessagePath or {@code null} for none */ public Container setTerminationMessagePath(java.lang.String terminationMessagePath) { this.terminationMessagePath = terminationMessagePath; return this; } /** * Indicate how the termination message should be populated. File will use the contents of * terminationMessagePath to populate the container status message on both success and failure. * FallbackToLogsOnError will use the last chunk of container log output if the termination * message file is empty and the container exited with an error. The log output is limited to 2048 * bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated. +optional * @return value or {@code null} for none */ public java.lang.String getTerminationMessagePolicy() { return terminationMessagePolicy; } /** * Indicate how the termination message should be populated. File will use the contents of * terminationMessagePath to populate the container status message on both success and failure. * FallbackToLogsOnError will use the last chunk of container log output if the termination * message file is empty and the container exited with an error. The log output is limited to 2048 * bytes or 80 lines, whichever is smaller. Defaults to File. Cannot be updated. +optional * @param terminationMessagePolicy terminationMessagePolicy or {@code null} for none */ public Container setTerminationMessagePolicy(java.lang.String terminationMessagePolicy) { this.terminationMessagePolicy = terminationMessagePolicy; return this; } /** * Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. * Default is false. +optional * @return value or {@code null} for none */ public java.lang.Boolean getTty() { return tty; } /** * Whether this container should allocate a TTY for itself, also requires 'stdin' to be true. * Default is false. +optional * @param tty tty or {@code null} for none */ public Container setTty(java.lang.Boolean tty) { this.tty = tty; return this; } /** * volumeDevices is the list of block devices to be used by the container. This is an alpha * feature and may change in the future. +optional * @return value or {@code null} for none */ public java.util.List<VolumeDevice> getVolumeDevices() { return volumeDevices; } /** * volumeDevices is the list of block devices to be used by the container. This is an alpha * feature and may change in the future. +optional * @param volumeDevices volumeDevices or {@code null} for none */ public Container setVolumeDevices(java.util.List<VolumeDevice> volumeDevices) { this.volumeDevices = volumeDevices; return this; } /** * Pod volumes to mount into the container's filesystem. Cannot be updated. +optional * @return value or {@code null} for none */ public java.util.List<VolumeMount> getVolumeMounts() { return volumeMounts; } /** * Pod volumes to mount into the container's filesystem. Cannot be updated. +optional * @param volumeMounts volumeMounts or {@code null} for none */ public Container setVolumeMounts(java.util.List<VolumeMount> volumeMounts) { this.volumeMounts = volumeMounts; return this; } /** * Container's working directory. If not specified, the container runtime's default will be used, * which might be configured in the container image. Cannot be updated. +optional * @return value or {@code null} for none */ public java.lang.String getWorkingDir() { return workingDir; } /** * Container's working directory. If not specified, the container runtime's default will be used, * which might be configured in the container image. Cannot be updated. +optional * @param workingDir workingDir or {@code null} for none */ public Container setWorkingDir(java.lang.String workingDir) { this.workingDir = workingDir; return this; } @Override public Container set(String fieldName, Object value) { return (Container) super.set(fieldName, value); } @Override public Container clone() { return (Container) super.clone(); } }
/* * This file is part of Total Economy, licensed under the MIT License (MIT). * * Copyright (c) Eric Grandt <https://www.ericgrandt.com> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.erigitic.jobs; import com.erigitic.config.AccountManager; import com.erigitic.config.TEAccount; import com.erigitic.main.TotalEconomy; import com.erigitic.sql.SqlManager; import com.erigitic.sql.SqlQuery; import com.erigitic.util.MessageManager; import java.io.File; import java.io.IOException; import java.math.BigDecimal; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.concurrent.TimeUnit; import ninja.leaping.configurate.ConfigurationNode; import ninja.leaping.configurate.commented.CommentedConfigurationNode; import ninja.leaping.configurate.hocon.HoconConfigurationLoader; import ninja.leaping.configurate.loader.ConfigurationLoader; import org.slf4j.Logger; import org.spongepowered.api.Sponge; import org.spongepowered.api.block.BlockState; import org.spongepowered.api.block.tileentity.Sign; import org.spongepowered.api.block.tileentity.TileEntity; import org.spongepowered.api.block.trait.BlockTrait; import org.spongepowered.api.data.Transaction; import org.spongepowered.api.data.manipulator.mutable.item.FishData; import org.spongepowered.api.data.manipulator.mutable.tileentity.SignData; import org.spongepowered.api.entity.Entity; import org.spongepowered.api.entity.living.player.Player; import org.spongepowered.api.entity.living.player.User; import org.spongepowered.api.event.Listener; import org.spongepowered.api.event.action.FishingEvent; import org.spongepowered.api.event.block.ChangeBlockEvent; import org.spongepowered.api.event.block.InteractBlockEvent; import org.spongepowered.api.event.block.tileentity.ChangeSignEvent; import org.spongepowered.api.event.cause.Cause; import org.spongepowered.api.event.cause.EventContext; import org.spongepowered.api.event.cause.EventContextKeys; import org.spongepowered.api.event.cause.entity.damage.source.EntityDamageSource; import org.spongepowered.api.event.entity.DestructEntityEvent; import org.spongepowered.api.item.inventory.ItemStack; import org.spongepowered.api.item.inventory.ItemStackSnapshot; import org.spongepowered.api.scheduler.Scheduler; import org.spongepowered.api.scheduler.Task; import org.spongepowered.api.service.economy.Currency; import org.spongepowered.api.service.economy.transaction.ResultType; import org.spongepowered.api.service.economy.transaction.TransactionResult; import org.spongepowered.api.text.Text; import org.spongepowered.api.text.action.TextActions; import org.spongepowered.api.text.format.TextColors; import org.spongepowered.api.text.format.TextStyles; public class JobManager { private TotalEconomy totalEconomy; private AccountManager accountManager; private MessageManager messageManager; private Logger logger; private SqlManager sqlManager; private File jobSetsFile; private ConfigurationLoader<CommentedConfigurationNode> jobSetsLoader; private ConfigurationNode jobSetsConfig; private Map<String, TEJobSet> jobSets; private File jobsFile; private ConfigurationLoader<CommentedConfigurationNode> jobsLoader; private ConfigurationNode jobsConfig; private Map<String, TEJob> jobsMap; private boolean databaseEnabled; public JobManager(TotalEconomy totalEconomy, AccountManager accountManager, MessageManager messageManager, Logger logger) { this.totalEconomy = totalEconomy; this.accountManager = accountManager; this.messageManager = messageManager; this.logger = logger; databaseEnabled = totalEconomy.isDatabaseEnabled(); if (databaseEnabled) { sqlManager = totalEconomy.getSqlManager(); } setupConfig(); if (totalEconomy.isJobSalaryEnabled()) { startSalaryTask(); } } /** * Start the timer that pays out the salary to each player after a specified time in seconds. */ private void startSalaryTask() { Scheduler scheduler = totalEconomy.getGame().getScheduler(); Task.Builder payTask = scheduler.createTaskBuilder(); payTask.execute(() -> { if (totalEconomy.getGame().isServerAvailable()) { for (Player player : totalEconomy.getServer().getOnlinePlayers()) { Optional<TEJob> optJob = getJob(getPlayerJob(player), true); if (!optJob.isPresent()) { player.sendMessage(Text.of(TextColors.RED, "[TE] Cannot pay your salary! Contact your administrator!")); return; } if (optJob.get().salaryEnabled()) { BigDecimal salary = optJob.get().getSalary(); TEAccount playerAccount = (TEAccount) accountManager.getOrCreateAccount(player.getUniqueId()).get(); EventContext eventContext = EventContext.builder() .add(EventContextKeys.PLAYER, player) .build(); Cause cause = Cause.builder() .append(totalEconomy.getPluginContainer()) .build(eventContext); TransactionResult result = playerAccount.deposit(totalEconomy.getDefaultCurrency(), salary, cause); if (result.getResult() == ResultType.SUCCESS) { Map<String, String> messageValues = new HashMap<>(); messageValues.put("amount", totalEconomy.getDefaultCurrency().format(salary).toPlain()); player.sendMessage(messageManager.getMessage("jobs.salary", messageValues)); } else { player.sendMessage(Text.of(TextColors.RED, "[TE] Failed to pay your salary! You may want to contact your admin - TransactionResult: ", result.getResult().toString())); } } } } }).delay(jobsConfig.getNode("salarydelay").getInt(), TimeUnit.SECONDS).interval(jobsConfig.getNode("salarydelay").getInt(), TimeUnit.SECONDS).name("Pay Day").submit(totalEconomy); } /** * Setup the jobs config. */ public void setupConfig() { jobSetsFile = new File(totalEconomy.getConfigDir(), "jobsets.conf"); jobSetsLoader = HoconConfigurationLoader.builder().setFile(jobSetsFile).build(); jobSets = new HashMap(); reloadJobSetConfig(); jobsFile = new File(totalEconomy.getConfigDir(), "jobs.conf"); jobsLoader = HoconConfigurationLoader.builder().setFile(jobsFile).build(); jobsMap = new HashMap(); reloadJobsConfig(); } /** * Reload the jobSet config. */ public boolean reloadJobSetConfig() { try { if (!jobSetsFile.exists()) { totalEconomy.getPluginContainer().getAsset("jobsets.conf").get().copyToFile(jobSetsFile.toPath()); } jobSetsConfig = jobSetsLoader.load(); ConfigurationNode sets = jobSetsConfig.getNode("sets"); sets.getChildrenMap().forEach((setName, setNode) -> { if (setNode != null) { TEJobSet jobSet = new TEJobSet(setNode); jobSets.put((String) setName, jobSet); } }); return true; } catch (IOException e) { logger.warn("An error occurred while creating/loading the jobSets configuration file!"); return false; } } /** * Reloads the job configuration file. Can be used for initial creation of the configuration file * or for simply reloading it. * * @return boolean Was the reload successful? */ public boolean reloadJobsConfig() { try { if (!jobsFile.exists()) { totalEconomy.getPluginContainer().getAsset("jobs.conf").get().copyToFile(jobsFile.toPath()); } jobsConfig = jobsLoader.load(); ConfigurationNode jobsNode = jobsConfig.getNode("jobs"); // Loop through each job node in the configuration file, create a TEJob object from it, and store in a HashMap jobsNode.getChildrenMap().forEach((k, jobNode) -> { if (jobNode != null) { TEJob job = new TEJob(jobNode); if (job.isValid()) { jobsMap.put(job.getName(), job); } } }); return true; } catch (IOException e) { logger.warn("An error occurred while creating/loading the jobs configuration file!"); return false; } } /** * Reload all job configs (jobs + sets). */ public boolean reloadJobsAndSets() { return reloadJobsConfig() && reloadJobSetConfig(); } /** * Add exp to player's current job. * * @param player The player to give experience to * @param expAmount The amount of experience to add */ public void addExp(Player player, int expAmount) { String jobName = getPlayerJob(player); UUID playerUniqueId = player.getUniqueId(); boolean jobNotifications = accountManager.getJobNotificationState(player); Map<String, String> messageValues = new HashMap<>(); messageValues.put("job", titleize(jobName)); messageValues.put("exp", String.valueOf(expAmount)); if (databaseEnabled) { int newExp = getJobExp(jobName, player) + expAmount; SqlQuery sqlQuery = SqlQuery.builder(sqlManager.dataSource) .update("experience") .set(jobName) .equals(String.valueOf(newExp)) .where("uid") .equals(playerUniqueId.toString()) .build(); if (sqlQuery.getRowsAffected() > 0) { if (jobNotifications) { player.sendMessage(messageManager.getMessage("jobs.addexp", messageValues)); } } else { logger.warn("An error occurred while updating job experience in the database!"); player.sendMessage(Text.of(TextColors.RED, "[TE] Error adding experience! Consult an administrator!")); } } else { ConfigurationNode accountConfig = accountManager.getAccountConfig(); int curExp = accountConfig.getNode(playerUniqueId.toString(), "jobstats", jobName, "exp").getInt(); accountConfig.getNode(playerUniqueId.toString(), "jobstats", jobName, "exp").setValue(curExp + expAmount); if (jobNotifications) { player.sendMessage(messageManager.getMessage("jobs.addexp", messageValues)); } try { accountManager.getConfigManager().save(accountConfig); } catch (IOException e) { logger.warn("An error occurred while saving the account configuration file!"); } } } /** * Checks if the player has enough exp to level up. If they do they will gain a level and their current exp will be * reset. * * @param player player object */ public void checkForLevel(Player player) { UUID playerUniqueId = player.getUniqueId(); String jobName = getPlayerJob(player); int playerLevel = getJobLevel(jobName, player); int playerCurExp = getJobExp(jobName, player); int expToLevel = getExpToLevel(player); if (playerCurExp >= expToLevel) { playerLevel += 1; Map<String, String> messageValues = new HashMap<>(); messageValues.put("job", titleize(jobName)); messageValues.put("level", String.valueOf(playerLevel)); if (databaseEnabled) { SqlQuery.builder(sqlManager.dataSource) .update("levels") .set(jobName) .equals(String.valueOf(playerLevel)) .where("uid") .equals(playerUniqueId.toString()) .build(); SqlQuery.builder(sqlManager.dataSource) .update("experience") .set(jobName) .equals(String.valueOf(playerCurExp)) .where("uid") .equals(playerUniqueId.toString()) .build(); } else { ConfigurationNode accountConfig = accountManager.getAccountConfig(); accountConfig.getNode(playerUniqueId.toString(), "jobstats", jobName, "level").setValue(playerLevel); accountConfig.getNode(playerUniqueId.toString(), "jobstats", jobName, "exp").setValue(playerCurExp); } player.sendMessage(messageManager.getMessage("jobs.levelup", messageValues)); } } /** * Checks the jobs config for the jobName. * * @param jobName name of the job * @return boolean if the job exists or not */ public boolean jobExists(String jobName) { if (jobsConfig.getNode("jobs", jobName.toLowerCase()).getValue() != null) { return true; } return false; } /** * Convert strings to titles (title -> Title). * * @param input the string to be titleized * @return String the titileized version of the input */ public String titleize(String input) { return input.substring(0, 1).toUpperCase() + input.substring(1).toLowerCase(); } private boolean getNotificationState(UUID uuid) { if (databaseEnabled) { SqlQuery sqlQuery = SqlQuery.builder(sqlManager.dataSource) .select("job_notifications") .from("accounts") .where("uid") .equals(uuid.toString()) .build(); return sqlQuery.getBoolean(totalEconomy.isJobNotificationEnabled()); } return accountManager.getAccountConfig().getNode(uuid.toString(), "jobnotifications").getBoolean(); } /** * Notifies a player when they are rewarded for completing a job action. * * @param amount The amount rewarded by the job action */ private void notifyPlayerOfJobReward(Player player, BigDecimal amount, Currency currency) { Text amountText = currency.format(amount, currency.getDefaultFractionDigits()); Map<String, String> messageValues = new HashMap<>(); messageValues.put("amount", amountText.toPlain()); player.sendMessage(messageManager.getMessage("jobs.notify", messageValues)); } /** * Set the users's job. * * @param user User object * @param jobName name of the job */ public boolean setJob(User user, String jobName) { UUID userUniqueId = user.getUniqueId(); // Just in case the job name was not passed in as lowercase, make it lowercase jobName = jobName.toLowerCase(); if (databaseEnabled) { SqlQuery sqlQuery = SqlQuery.builder(sqlManager.dataSource) .update("accounts") .set("job") .equals(jobName) .where("uid") .equals(userUniqueId.toString()) .build(); if (sqlQuery.getRowsAffected() > 0) { return true; } else { logger.warn("An error occurred while changing the job of " + user.getUniqueId() + "/" + user.getName() + "!"); return false; } } else { ConfigurationNode accountConfig = accountManager.getAccountConfig(); accountConfig.getNode(userUniqueId.toString(), "job").setValue(jobName); accountConfig.getNode(userUniqueId.toString(), "jobstats", jobName, "level").setValue( accountConfig.getNode(userUniqueId.toString(), "jobstats", jobName, "level").getInt(1)); accountConfig.getNode(userUniqueId.toString(), "jobstats", jobName, "exp").setValue( accountConfig.getNode(userUniqueId.toString(), "jobstats", jobName, "exp").getInt(0)); try { accountManager.getConfigManager().save(accountConfig); } catch (IOException e) { logger.warn("An error occurred while changing the job of " + user.getUniqueId() + "/" + user.getName() + "!"); } return true; } } /** * Get a job set by name. * * @return Optional */ public Optional<TEJobSet> getJobSet(String name) { return Optional.ofNullable(jobSets.getOrDefault(name, null)); } /** * Get the user's current job as a String for output. * * @param user The user to get the job of * @return String the job the user currently has */ public String getPlayerJob(User user) { UUID uuid = user.getUniqueId(); if (databaseEnabled) { SqlQuery sqlQuery = SqlQuery.builder(sqlManager.dataSource) .select("job") .from("accounts") .where("uid") .equals(uuid.toString()) .build(); return sqlQuery.getString("unemployed").toLowerCase(); } else { ConfigurationNode accountConfig = accountManager.getAccountConfig(); return accountConfig.getNode(user.getUniqueId().toString(), "job").getString("unemployed").toLowerCase(); } } /** * Get a TEJob object by a job name. * * @param jobName Name of the job * @param tryUnemployed Whether or not to try returning the unemployed job when the job wasn't found * @return {@link TEJob} the job; {@code null} for not found */ public Optional<TEJob> getJob(String jobName, boolean tryUnemployed) { TEJob job = jobsMap.getOrDefault(jobName, null); if (job != null || !tryUnemployed) { return Optional.ofNullable(job); } return getJob("unemployed", false); } /** * Get the players level for the passed in job. * * @param jobName The name of the job * @param user The user object * @return int The job level */ public int getJobLevel(String jobName, User user) { UUID playerUniqueId = user.getUniqueId(); // Just in case the job name was not passed in as lowercase, make it lowercase jobName = jobName.toLowerCase(); if (!jobName.equals("unemployed")) { if (databaseEnabled) { SqlQuery sqlQuery = SqlQuery.builder(sqlManager.dataSource) .select(jobName) .from("levels") .where("uid") .equals(playerUniqueId.toString()) .build(); return sqlQuery.getInt(1); } else { ConfigurationNode accountConfig = accountManager.getAccountConfig(); return accountConfig.getNode(user.getUniqueId().toString(), "jobstats", jobName, "level").getInt(1); } } return 1; } /** * Get the players exp for the passed in job. * * @param jobName the name of the job * @param user the user object * @return int the job exp */ public int getJobExp(String jobName, User user) { UUID playerUniqueId = user.getUniqueId(); // Just in case the job name was not passed in as lowercase, make it lowercase jobName = jobName.toLowerCase(); if (!jobName.equals("unemployed")) { if (databaseEnabled) { SqlQuery sqlQuery = SqlQuery.builder(sqlManager.dataSource) .select(jobName) .from("experience") .where("uid") .equals(playerUniqueId.toString()) .build(); return sqlQuery.getInt(0); } else { ConfigurationNode accountConfig = accountManager.getAccountConfig(); return accountConfig.getNode(playerUniqueId.toString(), "jobstats", jobName, "exp").getInt(0); } } return 0; } /** * Get the exp required to level. * * @param user user object * @return int the amount of exp needed to level */ public int getExpToLevel(User user) { String jobName = getPlayerJob(user); int playerLevel = getJobLevel(jobName, user); int nextLevel = playerLevel + 1; int expToLevel = (int) ((Math.pow(nextLevel, 2) + nextLevel) / 2) * 100 - (nextLevel * 100); // TODO: Custom algorithm for this, set from config return expToLevel; } /** * Gets a comma separated string of all of the jobs currently in the jobs config. * * @return Text Comma separated string of jobs */ public Text getJobList() { List<Text> texts = new ArrayList<>(); jobsMap.forEach((jobName, jobObject) -> texts.add(Text.of( TextActions.runCommand("/job set " + jobName), TextActions.showText(Text.of("Click to change job")), jobName)) ); return Text.joinWith(Text.of(", "), texts.toArray(new Text[texts.size()])); } /** * Checks sign contents and converts it to a "Job Changing" sign if conditions are met. * * @param event ChangeSignEvent */ @Listener public void onJobSignCheck(ChangeSignEvent event) { SignData data = event.getText(); Text lineOne = data.lines().get(0); Text lineTwo = data.lines().get(1); String lineOnePlain = lineOne.toPlain(); String lineTwoPlain = lineTwo.toPlain(); if (lineOnePlain.equals("[TEJobs]")) { lineOne = lineOne.toBuilder().style(TextStyles.BOLD).color(TextColors.DARK_BLUE).build(); String jobName = titleize(lineTwoPlain); if (jobExists(lineTwoPlain)) { lineTwo = Text.of(jobName).toBuilder().color(TextColors.BLACK).build(); } else { lineTwo = Text.of(jobName).toBuilder().color(TextColors.RED).build(); } data.set(data.lines().set(0, lineOne)); data.set(data.lines().set(1, lineTwo)); data.set(data.lines().set(2, Text.of())); data.set(data.lines().set(3, Text.of())); } } /** * Called when a player clicks a sign. If the clicked sign is a "Job Changing" sign then the player's job will * be changed on click. * * @param event InteractBlockEvent */ @Listener public void onSignInteract(InteractBlockEvent.Secondary event) { if (event.getCause().first(Player.class).isPresent()) { Player player = event.getCause().first(Player.class).get(); if (event.getTargetBlock().getLocation().isPresent()) { Optional<TileEntity> tileEntityOpt = event.getTargetBlock().getLocation().get().getTileEntity(); if (tileEntityOpt.isPresent()) { TileEntity tileEntity = tileEntityOpt.get(); if (tileEntity instanceof Sign) { Sign sign = (Sign) tileEntity; Optional<SignData> data = sign.getOrCreate(SignData.class); if (data.isPresent()) { SignData signData = data.get(); Text lineOneText = signData.lines().get(0); Text lineTwoText = signData.lines().get(1); String lineOne = lineOneText.toPlain(); String jobName = lineTwoText.toPlain().toLowerCase(); if (lineOne.equals("[TEJobs]")) { Map<String, String> messageValues = new HashMap<>(); messageValues.put("job", titleize(jobName)); Optional<TEJob> optJob = getJob(jobName, false); if (optJob.isPresent()) { Optional<JobBasedRequirement> optRequire = optJob.get().getRequirement(); if (optRequire.isPresent()) { String reqJob = optRequire.get().getRequiredJob(); Integer reqLevel = optRequire.get().getRequiredJobLevel(); String reqPerm = optRequire.get().getRequiredPermission(); int currentReqJobLevel = getJobLevel(reqJob, player); if (reqJob != null && reqLevel > currentReqJobLevel) { messageValues.put("job", titleize(reqJob)); messageValues.put("level", reqLevel.toString()); player.sendMessage(messageManager.getMessage("jobs.unmet.level", messageValues)); return; } if (reqPerm != null && !player.hasPermission(reqPerm)) { player.sendMessage(messageManager.getMessage("jobs.unmet.permission", messageValues)); return; } } if (setJob(player, jobName)) { player.sendMessage(messageManager.getMessage("jobs.sign", messageValues)); } else { player.sendMessage(messageManager.getMessage("jobs.setfailed")); } } else { player.sendMessage(messageManager.getMessage("jobs.notfound")); } } } } } } } } /** * Used for the break option in jobs. Will check if the job has the break node and if it does it will check if the * block that was broken is present in the config of the player's job. If it is, it will grab the job exp reward as * well as the pay. * * @param event ChangeBlockEvent.Break */ @Listener public void onPlayerBlockBreak(ChangeBlockEvent.Break event) { if (event.getCause().first(Player.class).isPresent()) { Player player = event.getCause().first(Player.class).get(); UUID playerUniqueId = player.getUniqueId(); String playerJob = getPlayerJob(player); Optional<TEJob> optPlayerJob = getJob(playerJob, true); BlockState state = event.getTransactions().get(0).getOriginal().getState(); String blockName = state.getType().getName(); Optional<UUID> blockCreator = event.getTransactions().get(0).getOriginal().getCreator(); // Enable admins to determine block information by displaying it to them - WHEN they have the flag enabled if (accountManager.getUserOption("totaleconomy:block-break-info", player).orElse("0").equals("1")) { List<BlockTrait<?>> traits = new ArrayList<>(state.getTraits()); int count = traits.size(); List<Text> traitTexts = new ArrayList<>(count); for (int i = 0; i < count; i++) { Object traitValue = state.getTraitValue(traits.get(i)).orElse(null); traitTexts.add(i, Text.of(traits.get(i).getName(), '=', traitValue != null ? traitValue.toString() : "null")); } Text t = Text.of(TextColors.GRAY, "TRAITS:\n ", Text.joinWith(Text.of(",\n "), traitTexts.toArray(new Text[traits.size()]))); player.sendMessage(Text.of("Block-Name: ", blockName)); player.sendMessage(t); } if (optPlayerJob.isPresent()) { Optional<TEActionReward> reward = Optional.empty(); List<String> sets = optPlayerJob.get().getSets(); for (String s : sets) { Optional<TEJobSet> optSet = getJobSet(s); if (!optSet.isPresent()) { logger.warn("Job " + playerJob + " has the nonexistent set \"" + s + "\""); continue; } Optional<TEAction> action = optSet.get().getActionFor("break", blockName); if (!action.isPresent()) { continue; } Optional<TEActionReward> currentReward = action.get().evaluateBreak(logger, state, blockCreator.orElse(null)); if (!reward.isPresent()) { reward = currentReward; continue; } if (!currentReward.isPresent()) { continue; } // Use the one giving higher exp in case of duplicates if (currentReward.get().getExpReward() > reward.get().getExpReward()) { reward = currentReward; } } if (reward.isPresent()) { BigDecimal payAmount = new BigDecimal(reward.get().getMoneyReward()); Currency currency = totalEconomy.getDefaultCurrency(); if (reward.get().getCurrencyId() != null) { Optional<Currency> currencyOpt = totalEconomy.getTECurrencyRegistryModule().getById("totaleconomy:" + reward.get().getCurrencyId()); if (currencyOpt.isPresent()) { currency = currencyOpt.get(); } } boolean notify = getNotificationState(playerUniqueId); if (notify) { notifyPlayerOfJobReward(player, payAmount, currency); } TEAccount playerAccount = (TEAccount) accountManager.getOrCreateAccount(player.getUniqueId()).get(); playerAccount.deposit(currency, payAmount, event.getCause()); int expAmount = reward.get().getExpReward(); addExp(player, expAmount); checkForLevel(player); } } } } /** * Used for the place option in jobs. Will check if the job has the place node and if it does it will check if the * block that was placed is present in the config of the player's job. If it is, it will grab the job exp reward as * well as the pay. * * @param event ChangeBlockEvent.Place */ @Listener public void onPlayerPlaceBlock(ChangeBlockEvent.Place event) { if (event.getCause().first(Player.class).isPresent()) { Player player = event.getCause().first(Player.class).get(); UUID playerUniqueId = player.getUniqueId(); String playerJob = getPlayerJob(player); Optional<TEJob> optPlayerJob = getJob(playerJob, true); BlockState state = event.getTransactions().get(0).getFinal().getState(); String blockName = state.getType().getName(); // Enable admins to determine block information by displaying it to them - WHEN they have the flag enabled if (accountManager.getUserOption("totaleconomy:block-place-info", player).orElse("0").equals("1")) { List<BlockTrait<?>> traits = new ArrayList<>(state.getTraits()); int count = traits.size(); List<Text> traitTexts = new ArrayList<>(count); for (int i = 0; i < count; i++) { Object traitValue = state.getTraitValue(traits.get(i)).orElse(null); traitTexts.add(i, Text.of(traits.get(i).getName(), '=', traitValue != null ? traitValue.toString() : "null")); } Text t = Text.of(TextColors.GRAY, "TRAITS:\n ", Text.joinWith(Text.of(",\n "), traitTexts.toArray(new Text[traits.size()]))); player.sendMessage(Text.of("Block-Name: ", blockName)); player.sendMessage(t); } if (optPlayerJob.isPresent()) { Optional<TEActionReward> reward = Optional.empty(); List<String> sets = optPlayerJob.get().getSets(); for (String s : sets) { Optional<TEJobSet> optSet = getJobSet(s); if (!optSet.isPresent()) { logger.warn("Job " + playerJob + " has the nonexistent set \"" + s + "\""); continue; } Optional<TEAction> action = optSet.get().getActionFor("place", blockName); if (!action.isPresent()) { continue; } Optional<TEActionReward> currentReward = action.get().evaluatePlace(logger, state); if (!reward.isPresent()) { reward = currentReward; continue; } if (!currentReward.isPresent()) { continue; } // Use the one giving higher exp in case of duplicates if (currentReward.get().getExpReward() > reward.get().getExpReward()) { reward = currentReward; } } if (reward.isPresent()) { boolean notify = getNotificationState(playerUniqueId); BigDecimal payAmount = new BigDecimal(reward.get().getMoneyReward()); Currency currency = totalEconomy.getDefaultCurrency(); if (reward.get().getCurrencyId() != null) { Optional<Currency> currencyOpt = totalEconomy.getTECurrencyRegistryModule().getById("totaleconomy:" + reward.get().getCurrencyId()); if (currencyOpt.isPresent()) { currency = currencyOpt.get(); } } if (notify) { notifyPlayerOfJobReward(player, payAmount, currency); } TEAccount playerAccount = (TEAccount) accountManager.getOrCreateAccount(player.getUniqueId()).get(); playerAccount.deposit(currency, payAmount, event.getCause()); int expAmount = reward.get().getExpReward(); addExp(player, expAmount); checkForLevel(player); } } } } /** * Used for the break option in jobs. Will check if the job has the break node and if it does it will check if the * block that was broken is present in the config of the player's job. If it is, it will grab the job exp reward as * well as the pay. * * @param event DestructEntityEvent.Death */ @Listener public void onPlayerKillEntity(DestructEntityEvent.Death event) { Optional<EntityDamageSource> optDamageSource = event.getCause().first(EntityDamageSource.class); if (optDamageSource.isPresent()) { EntityDamageSource damageSource = optDamageSource.get(); Entity killer = damageSource.getSource(); Entity victim = event.getTargetEntity(); if (!(killer instanceof Player)) { // If a projectile was shot to kill an entity, this will grab the player who shot it Optional<UUID> damageCreator = damageSource.getSource().getCreator(); if (damageCreator.isPresent()) { killer = Sponge.getServer().getPlayer(damageCreator.get()).get(); } } if (killer instanceof Player) { Player player = (Player) killer; UUID playerUniqueId = player.getUniqueId(); String victimName = victim.getType().getName(); String playerJob = getPlayerJob(player); Optional<TEJob> optPlayerJob = getJob(playerJob, true); // Enable admins to determine victim information by displaying it to them - WHEN they have the flag enabled if (accountManager.getUserOption("totaleconomy:entity-kill-info", player).orElse("0").equals("1")) { player.sendMessage(Text.of("Victim-Name: ", victimName)); } if (optPlayerJob.isPresent()) { Optional<TEActionReward> reward = Optional.empty(); List<String> sets = optPlayerJob.get().getSets(); for (String s : sets) { Optional<TEJobSet> optSet = getJobSet(s); if (!optSet.isPresent()) { logger.warn("Job " + playerJob + " has the nonexistent set \"" + s + "\""); continue; } Optional<TEAction> action = optSet.get().getActionFor("kill", victimName); if (!action.isPresent()) { continue; } Optional<TEActionReward> currentReward = action.get().getReward(); if (!reward.isPresent()) { reward = currentReward; continue; } if (!currentReward.isPresent()) { continue; } // Use the one giving higher exp in case of duplicates if (currentReward.get().getExpReward() > reward.get().getExpReward()) { reward = currentReward; } } if (reward.isPresent()) { boolean notify = getNotificationState(playerUniqueId); BigDecimal payAmount = new BigDecimal(reward.get().getMoneyReward()); Currency currency = totalEconomy.getDefaultCurrency(); if (reward.get().getCurrencyId() != null) { Optional<Currency> currencyOpt = totalEconomy.getTECurrencyRegistryModule().getById("totaleconomy:" + reward.get().getCurrencyId()); if (currencyOpt.isPresent()) { currency = currencyOpt.get(); } } if (notify) { notifyPlayerOfJobReward(player, payAmount, currency); } TEAccount playerAccount = (TEAccount) accountManager.getOrCreateAccount(player.getUniqueId()).get(); playerAccount.deposit(currency, payAmount, event.getCause()); int expAmount = reward.get().getExpReward(); addExp(player, expAmount); checkForLevel(player); } } } } } /** * Used for the catch option in jobs. Will check if the job has the catch node and if it does it will check if the * item that was caught is present in the config of the player's job. If it is, it will grab the job exp reward as * well as the pay. * * @param event FishingEvent.Stop */ @Listener public void onPlayerFish(FishingEvent.Stop event) { if (event.getCause().first(Player.class).isPresent()) { // No transaction, so execution can stop if (event.getTransactions().size() == 0) { return; } Transaction<ItemStackSnapshot> itemTransaction = event.getTransactions().get(0); ItemStack itemStack = itemTransaction.getFinal().createStack(); Player player = event.getCause().first(Player.class).get(); UUID playerUniqueId = player.getUniqueId(); String playerJob = getPlayerJob(player); Optional<TEJob> optPlayerJob = getJob(playerJob, true); if (optPlayerJob.isPresent()) { if (itemStack.get(FishData.class).isPresent()) { FishData fishData = itemStack.get(FishData.class).get(); String fishName = fishData.type().get().getName(); // Enable admins to determine fish information by displaying it to them - WHEN they have the flag enabled if (accountManager.getUserOption("totaleconomy:entity-fish-info", player).orElse("0").equals("1")) { player.sendMessage(Text.of("Fish-Name: ", fishName)); } Optional<TEActionReward> reward = Optional.empty(); List<String> sets = optPlayerJob.get().getSets(); for (String s : sets) { Optional<TEJobSet> optSet = getJobSet(s); if (!optSet.isPresent()) { logger.warn("Job " + playerJob + " has the nonexistent set \"" + s + "\""); continue; } Optional<TEAction> action = optSet.get().getActionFor("catch", fishName); if (!action.isPresent()) { continue; } Optional<TEActionReward> currentReward = action.get().getReward(); if (!reward.isPresent()) { reward = currentReward; continue; } if (!currentReward.isPresent()) { continue; } // Use the one giving higher exp in case of duplicates if (currentReward.get().getExpReward() > reward.get().getExpReward()) { reward = currentReward; } } if (reward.isPresent()) { boolean notify = getNotificationState(playerUniqueId); BigDecimal payAmount = new BigDecimal(reward.get().getMoneyReward()); Currency currency = totalEconomy.getDefaultCurrency(); if (reward.get().getCurrencyId() != null) { Optional<Currency> currencyOpt = totalEconomy.getTECurrencyRegistryModule().getById("totaleconomy:" + reward.get().getCurrencyId()); if (currencyOpt.isPresent()) { currency = currencyOpt.get(); } } if (notify) { notifyPlayerOfJobReward(player, payAmount, currency); } TEAccount playerAccount = (TEAccount) accountManager.getOrCreateAccount(player.getUniqueId()).get(); playerAccount.deposit(currency, payAmount, event.getCause()); int expAmount = reward.get().getExpReward(); addExp(player, expAmount); checkForLevel(player); } } } } } }
package ajb.utils; /** * A class containing static math methods useful for image processing. */ public class ImageMath { public final static float PI = (float)Math.PI; public final static float HALF_PI = (float)Math.PI/2.0f; public final static float QUARTER_PI = (float)Math.PI/4.0f; public final static float TWO_PI = (float)Math.PI*2.0f; /** * Apply a bias to a number in the unit interval, moving numbers towards 0 or 1 * according to the bias parameter. * @param a the number to bias * @param b the bias parameter. 0.5 means no change, smaller values bias towards 0, larger towards 1. * @return the output value */ public static float bias(float a, float b) { // return (float)Math.pow(a, Math.log(b) / Math.log(0.5)); return a/((1.0f/b-2)*(1.0f-a)+1); } /** * A variant of the gamma function. * @param a the number to apply gain to * @param b the gain parameter. 0.5 means no change, smaller values reduce gain, larger values increase gain. * @return the output value */ public static float gain(float a, float b) { /* float p = (float)Math.log(1.0 - b) / (float)Math.log(0.5); if (a < .001) return 0.0f; else if (a > .999) return 1.0f; if (a < 0.5) return (float)Math.pow(2 * a, p) / 2; else return 1.0f - (float)Math.pow(2 * (1. - a), p) / 2; */ float c = (1.0f/b-2.0f) * (1.0f-2.0f*a); if (a < 0.5) return a/(c+1.0f); else return (c-a)/(c-1.0f); } /** * The step function. Returns 0 below a threshold, 1 above. * @param a the threshold position * @param x the input parameter * @return the output value - 0 or 1 */ public static float step(float a, float x) { return (x < a) ? 0.0f : 1.0f; } /** * The pulse function. Returns 1 between two thresholds, 0 outside. * @param a the lower threshold position * @param b the upper threshold position * @param x the input parameter * @return the output value - 0 or 1 */ public static float pulse(float a, float b, float x) { return (x < a || x >= b) ? 0.0f : 1.0f; } /** * A smoothed pulse function. A cubic function is used to smooth the step between two thresholds. * @param a1 the lower threshold position for the start of the pulse * @param a2 the upper threshold position for the start of the pulse * @param b1 the lower threshold position for the end of the pulse * @param b2 the upper threshold position for the end of the pulse * @param x the input parameter * @return the output value */ public static float smoothPulse(float a1, float a2, float b1, float b2, float x) { if (x < a1 || x >= b2) return 0; if (x >= a2) { if (x < b1) return 1.0f; x = (x - b1) / (b2 - b1); return 1.0f - (x*x * (3.0f - 2.0f*x)); } x = (x - a1) / (a2 - a1); return x*x * (3.0f - 2.0f*x); } /** * A smoothed step function. A cubic function is used to smooth the step between two thresholds. * @param a the lower threshold position * @param b the upper threshold position * @param x the input parameter * @return the output value */ public static float smoothStep(float a, float b, float x) { if (x < a) return 0; if (x >= b) return 1; x = (x - a) / (b - a); return x*x * (3 - 2*x); } /** * A "circle up" function. Returns y on a unit circle given 1-x. Useful for forming bevels. * @param x the input parameter in the range 0..1 * @return the output value */ public static float circleUp(float x) { x = 1-x; return (float)Math.sqrt(1-x*x); } /** * A "circle down" function. Returns 1-y on a unit circle given x. Useful for forming bevels. * @param x the input parameter in the range 0..1 * @return the output value */ public static float circleDown(float x) { return 1.0f-(float)Math.sqrt(1-x*x); } /** * Clamp a value to an interval. * @param a the lower clamp threshold * @param b the upper clamp threshold * @param x the input parameter * @return the clamped value */ public static float clamp(float x, float a, float b) { return (x < a) ? a : (x > b) ? b : x; } /** * Clamp a value to an interval. * @param a the lower clamp threshold * @param b the upper clamp threshold * @param x the input parameter * @return the clamped value */ public static int clamp(int x, int a, int b) { return (x < a) ? a : (x > b) ? b : x; } /** * Return a mod b. This differs from the % operator with respect to negative numbers. * @param a the dividend * @param b the divisor * @return a mod b */ public static double mod(double a, double b) { int n = (int)(a/b); a -= n*b; if (a < 0) return a + b; return a; } /** * Return a mod b. This differs from the % operator with respect to negative numbers. * @param a the dividend * @param b the divisor * @return a mod b */ public static float mod(float a, float b) { int n = (int)(a/b); a -= n*b; if (a < 0) return a + b; return a; } /** * Return a mod b. This differs from the % operator with respect to negative numbers. * @param a the dividend * @param b the divisor * @return a mod b */ public static int mod(int a, int b) { int n = a/b; a -= n*b; if (a < 0) return a + b; return a; } /** * The triangle function. Returns a repeating triangle shape in the range 0..1 with wavelength 1.0 * @param x the input parameter * @return the output value */ public static float triangle(float x) { float r = mod(x, 1.0f); return 2.0f*(r < 0.5 ? r : 1-r); } /** * Linear interpolation. * @param t the interpolation parameter * @param a the lower interpolation range * @param b the upper interpolation range * @return the interpolated value */ public static float lerp(float t, float a, float b) { return a + t * (b - a); } /** * Linear interpolation. * @param t the interpolation parameter * @param a the lower interpolation range * @param b the upper interpolation range * @return the interpolated value */ public static int lerp(float t, int a, int b) { return (int)(a + t * (b - a)); } /** * Linear interpolation of ARGB values. * @param t the interpolation parameter * @param rgb1 the lower interpolation range * @param rgb2 the upper interpolation range * @return the interpolated value */ public static int mixColors(float t, int rgb1, int rgb2) { int a1 = (rgb1 >> 24) & 0xff; int r1 = (rgb1 >> 16) & 0xff; int g1 = (rgb1 >> 8) & 0xff; int b1 = rgb1 & 0xff; int a2 = (rgb2 >> 24) & 0xff; int r2 = (rgb2 >> 16) & 0xff; int g2 = (rgb2 >> 8) & 0xff; int b2 = rgb2 & 0xff; a1 = lerp(t, a1, a2); r1 = lerp(t, r1, r2); g1 = lerp(t, g1, g2); b1 = lerp(t, b1, b2); return (a1 << 24) | (r1 << 16) | (g1 << 8) | b1; } /** * Bilinear interpolation of ARGB values. * @param x the X interpolation parameter 0..1 * @param y the y interpolation parameter 0..1 * @param rgb array of four ARGB values in the order NW, NE, SW, SE * @return the interpolated value */ public static int bilinearInterpolate(float x, float y, int[] p) { float m0, m1; int a0 = (p[0] >> 24) & 0xff; int r0 = (p[0] >> 16) & 0xff; int g0 = (p[0] >> 8) & 0xff; int b0 = p[0] & 0xff; int a1 = (p[1] >> 24) & 0xff; int r1 = (p[1] >> 16) & 0xff; int g1 = (p[1] >> 8) & 0xff; int b1 = p[1] & 0xff; int a2 = (p[2] >> 24) & 0xff; int r2 = (p[2] >> 16) & 0xff; int g2 = (p[2] >> 8) & 0xff; int b2 = p[2] & 0xff; int a3 = (p[3] >> 24) & 0xff; int r3 = (p[3] >> 16) & 0xff; int g3 = (p[3] >> 8) & 0xff; int b3 = p[3] & 0xff; float cx = 1.0f-x; float cy = 1.0f-y; m0 = cx * a0 + x * a1; m1 = cx * a2 + x * a3; int a = (int)(cy * m0 + y * m1); m0 = cx * r0 + x * r1; m1 = cx * r2 + x * r3; int r = (int)(cy * m0 + y * m1); m0 = cx * g0 + x * g1; m1 = cx * g2 + x * g3; int g = (int)(cy * m0 + y * m1); m0 = cx * b0 + x * b1; m1 = cx * b2 + x * b3; int b = (int)(cy * m0 + y * m1); return (a << 24) | (r << 16) | (g << 8) | b; } /** * Return the NTSC gray level of an RGB value. * @param rgb1 the input pixel * @return the gray level (0-255) */ public static int brightnessNTSC(int rgb) { int r = (rgb >> 16) & 0xff; int g = (rgb >> 8) & 0xff; int b = rgb & 0xff; return (int)(r*0.299f + g*0.587f + b*0.114f); } // Catmull-Rom splines private final static float m00 = -0.5f; private final static float m01 = 1.5f; private final static float m02 = -1.5f; private final static float m03 = 0.5f; private final static float m10 = 1.0f; private final static float m11 = -2.5f; private final static float m12 = 2.0f; private final static float m13 = -0.5f; private final static float m20 = -0.5f; private final static float m21 = 0.0f; private final static float m22 = 0.5f; private final static float m23 = 0.0f; private final static float m30 = 0.0f; private final static float m31 = 1.0f; private final static float m32 = 0.0f; private final static float m33 = 0.0f; /** * Compute a Catmull-Rom spline. * @param x the input parameter * @param numKnots the number of knots in the spline * @param knots the array of knots * @return the spline value */ public static float spline(float x, int numKnots, float[] knots) { int span; int numSpans = numKnots - 3; float k0, k1, k2, k3; float c0, c1, c2, c3; if (numSpans < 1) throw new IllegalArgumentException("Too few knots in spline"); x = clamp(x, 0, 1) * numSpans; span = (int)x; if (span > numKnots-4) span = numKnots-4; x -= span; k0 = knots[span]; k1 = knots[span+1]; k2 = knots[span+2]; k3 = knots[span+3]; c3 = m00*k0 + m01*k1 + m02*k2 + m03*k3; c2 = m10*k0 + m11*k1 + m12*k2 + m13*k3; c1 = m20*k0 + m21*k1 + m22*k2 + m23*k3; c0 = m30*k0 + m31*k1 + m32*k2 + m33*k3; return ((c3*x + c2)*x + c1)*x + c0; } /** * Compute a Catmull-Rom spline, but with variable knot spacing. * @param x the input parameter * @param numKnots the number of knots in the spline * @param xknots the array of knot x values * @param yknots the array of knot y values * @return the spline value */ public static float spline(float x, int numKnots, int[] xknots, int[] yknots) { int span; int numSpans = numKnots - 3; float k0, k1, k2, k3; float c0, c1, c2, c3; if (numSpans < 1) throw new IllegalArgumentException("Too few knots in spline"); for (span = 0; span < numSpans; span++) if (xknots[span+1] > x) break; if (span > numKnots-3) span = numKnots-3; float t = (float)(x-xknots[span]) / (xknots[span+1]-xknots[span]); span--; if (span < 0) { span = 0; t = 0; } k0 = yknots[span]; k1 = yknots[span+1]; k2 = yknots[span+2]; k3 = yknots[span+3]; c3 = m00*k0 + m01*k1 + m02*k2 + m03*k3; c2 = m10*k0 + m11*k1 + m12*k2 + m13*k3; c1 = m20*k0 + m21*k1 + m22*k2 + m23*k3; c0 = m30*k0 + m31*k1 + m32*k2 + m33*k3; return ((c3*t + c2)*t + c1)*t + c0; } /** * Compute a Catmull-Rom spline for RGB values. * @param x the input parameter * @param numKnots the number of knots in the spline * @param knots the array of knots * @return the spline value */ public static int colorSpline(float x, int numKnots, int[] knots) { int span; int numSpans = numKnots - 3; float k0, k1, k2, k3; float c0, c1, c2, c3; if (numSpans < 1) throw new IllegalArgumentException("Too few knots in spline"); x = clamp(x, 0, 1) * numSpans; span = (int)x; if (span > numKnots-4) span = numKnots-4; x -= span; int v = 0; for (int i = 0; i < 4; i++) { int shift = i * 8; k0 = (knots[span] >> shift) & 0xff; k1 = (knots[span+1] >> shift) & 0xff; k2 = (knots[span+2] >> shift) & 0xff; k3 = (knots[span+3] >> shift) & 0xff; c3 = m00*k0 + m01*k1 + m02*k2 + m03*k3; c2 = m10*k0 + m11*k1 + m12*k2 + m13*k3; c1 = m20*k0 + m21*k1 + m22*k2 + m23*k3; c0 = m30*k0 + m31*k1 + m32*k2 + m33*k3; int n = (int)(((c3*x + c2)*x + c1)*x + c0); if (n < 0) n = 0; else if (n > 255) n = 255; v |= n << shift; } return v; } /** * Compute a Catmull-Rom spline for RGB values, but with variable knot spacing. * @param x the input parameter * @param numKnots the number of knots in the spline * @param xknots the array of knot x values * @param yknots the array of knot y values * @return the spline value */ public static int colorSpline(int x, int numKnots, int[] xknots, int[] yknots) { int span; int numSpans = numKnots - 3; float k0, k1, k2, k3; float c0, c1, c2, c3; if (numSpans < 1) throw new IllegalArgumentException("Too few knots in spline"); for (span = 0; span < numSpans; span++) if (xknots[span+1] > x) break; if (span > numKnots-3) span = numKnots-3; float t = (float)(x-xknots[span]) / (xknots[span+1]-xknots[span]); span--; if (span < 0) { span = 0; t = 0; } int v = 0; for (int i = 0; i < 4; i++) { int shift = i * 8; k0 = (yknots[span] >> shift) & 0xff; k1 = (yknots[span+1] >> shift) & 0xff; k2 = (yknots[span+2] >> shift) & 0xff; k3 = (yknots[span+3] >> shift) & 0xff; c3 = m00*k0 + m01*k1 + m02*k2 + m03*k3; c2 = m10*k0 + m11*k1 + m12*k2 + m13*k3; c1 = m20*k0 + m21*k1 + m22*k2 + m23*k3; c0 = m30*k0 + m31*k1 + m32*k2 + m33*k3; int n = (int)(((c3*t + c2)*t + c1)*t + c0); if (n < 0) n = 0; else if (n > 255) n = 255; v |= n << shift; } return v; } /** * An implementation of Fant's resampling algorithm. * @param source the source pixels * @param dest the destination pixels * @param length the length of the scanline to resample * @param offset the start offset into the arrays * @param stride the offset between pixels in consecutive rows * @param out an array of output positions for each pixel */ public static void resample(int[] source, int[] dest, int length, int offset, int stride, float[] out) { int i, j; float intensity; float sizfac; float inSegment; float outSegment; int a, r, g, b, nextA, nextR, nextG, nextB; float aSum, rSum, gSum, bSum; float[] in; int srcIndex = offset; int destIndex = offset; int lastIndex = source.length; int rgb; in = new float[length+1]; i = 0; for (j = 0; j < length; j++) { while (out[i+1] < j) i++; in[j] = i + (float) (j - out[i]) / (out[i + 1] - out[i]); } in[length] = length; inSegment = 1.0f; outSegment = in[1]; sizfac = outSegment; aSum = rSum = gSum = bSum = 0.0f; rgb = source[srcIndex]; a = (rgb >> 24) & 0xff; r = (rgb >> 16) & 0xff; g = (rgb >> 8) & 0xff; b = rgb & 0xff; srcIndex += stride; rgb = source[srcIndex]; nextA = (rgb >> 24) & 0xff; nextR = (rgb >> 16) & 0xff; nextG = (rgb >> 8) & 0xff; nextB = rgb & 0xff; srcIndex += stride; i = 1; while (i < length) { float aIntensity = inSegment * a + (1.0f - inSegment) * nextA; float rIntensity = inSegment * r + (1.0f - inSegment) * nextR; float gIntensity = inSegment * g + (1.0f - inSegment) * nextG; float bIntensity = inSegment * b + (1.0f - inSegment) * nextB; if (inSegment < outSegment) { aSum += (aIntensity * inSegment); rSum += (rIntensity * inSegment); gSum += (gIntensity * inSegment); bSum += (bIntensity * inSegment); outSegment -= inSegment; inSegment = 1.0f; a = nextA; r = nextR; g = nextG; b = nextB; if (srcIndex < lastIndex) rgb = source[srcIndex]; nextA = (rgb >> 24) & 0xff; nextR = (rgb >> 16) & 0xff; nextG = (rgb >> 8) & 0xff; nextB = rgb & 0xff; srcIndex += stride; } else { aSum += (aIntensity * outSegment); rSum += (rIntensity * outSegment); gSum += (gIntensity * outSegment); bSum += (bIntensity * outSegment); dest[destIndex] = ((int)Math.min(aSum/sizfac, 255) << 24) | ((int)Math.min(rSum/sizfac, 255) << 16) | ((int)Math.min(gSum/sizfac, 255) << 8) | (int)Math.min(bSum/sizfac, 255); destIndex += stride; rSum = gSum = bSum = 0.0f; inSegment -= outSegment; outSegment = in[i+1] - in[i]; sizfac = outSegment; i++; } } } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.python; import com.facebook.buck.cxx.CxxBuckConfig; import com.facebook.buck.cxx.CxxPlatform; import com.facebook.buck.cxx.NativeLinkStrategy; import com.facebook.buck.cxx.NativeLinkTarget; import com.facebook.buck.cxx.NativeLinkTargetMode; import com.facebook.buck.cxx.NativeLinkable; import com.facebook.buck.cxx.NativeLinkables; import com.facebook.buck.cxx.Omnibus; import com.facebook.buck.cxx.OmnibusLibraries; import com.facebook.buck.cxx.OmnibusLibrary; import com.facebook.buck.cxx.OmnibusRoot; import com.facebook.buck.cxx.OmnibusRoots; import com.facebook.buck.graph.AbstractBreadthFirstThrowingTraversal; import com.facebook.buck.io.MorePaths; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.HasBuildTarget; import com.facebook.buck.parser.NoSuchBuildTargetException; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.args.Arg; import com.facebook.buck.rules.coercer.SourceList; import com.facebook.buck.rules.macros.LocationMacroExpander; import com.facebook.buck.rules.macros.MacroHandler; import com.facebook.buck.util.HumanReadableException; import com.google.common.base.Preconditions; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; public class PythonUtil { protected static final MacroHandler MACRO_HANDLER = new MacroHandler( ImmutableMap.of( "location", new LocationMacroExpander())); private PythonUtil() {} public static ImmutableMap<Path, SourcePath> toModuleMap( BuildTarget target, SourcePathResolver resolver, String parameter, Path baseModule, Iterable<SourceList> inputs) { ImmutableMap.Builder<Path, SourcePath> moduleNamesAndSourcePaths = ImmutableMap.builder(); for (SourceList input : inputs) { ImmutableMap<String, SourcePath> namesAndSourcePaths; if (input.getUnnamedSources().isPresent()) { namesAndSourcePaths = resolver.getSourcePathNames( target, parameter, input.getUnnamedSources().get()); } else { namesAndSourcePaths = input.getNamedSources().get(); } for (ImmutableMap.Entry<String, SourcePath> entry : namesAndSourcePaths.entrySet()) { moduleNamesAndSourcePaths.put( baseModule.resolve(entry.getKey()), entry.getValue()); } } return moduleNamesAndSourcePaths.build(); } /** Convert a path to a module to it's module name as referenced in import statements. */ public static String toModuleName(BuildTarget target, String name) { int ext = name.lastIndexOf('.'); if (ext == -1) { throw new HumanReadableException( "%s: missing extension for module path: %s", target, name); } name = name.substring(0, ext); return MorePaths.pathWithUnixSeparators(name).replace('/', '.'); } public static ImmutableSortedSet<BuildRule> getDepsFromComponents( SourcePathResolver resolver, PythonPackageComponents components) { return ImmutableSortedSet.<BuildRule>naturalOrder() .addAll(resolver.filterBuildRuleInputs(components.getModules().values())) .addAll(resolver.filterBuildRuleInputs(components.getResources().values())) .addAll(resolver.filterBuildRuleInputs(components.getNativeLibraries().values())) .addAll(resolver.filterBuildRuleInputs(components.getPrebuiltLibraries())) .build(); } public static PythonPackageComponents getAllComponents( BuildRuleParams params, BuildRuleResolver ruleResolver, SourcePathResolver pathResolver, final PythonPackageComponents packageComponents, final PythonPlatform pythonPlatform, CxxBuckConfig cxxBuckConfig, final CxxPlatform cxxPlatform, ImmutableList<? extends Arg> extraLdflags, final NativeLinkStrategy nativeLinkStrategy, final ImmutableSet<BuildTarget> preloadDeps) throws NoSuchBuildTargetException { final PythonPackageComponents.Builder allComponents = new PythonPackageComponents.Builder(params.getBuildTarget()); final Map<BuildTarget, CxxPythonExtension> extensions = new LinkedHashMap<>(); final Map<BuildTarget, NativeLinkable> nativeLinkableRoots = new LinkedHashMap<>(); final OmnibusRoots.Builder omnibusRoots = OmnibusRoots.builder(cxxPlatform, preloadDeps); // Add the top-level components. allComponents.addComponent(packageComponents, params.getBuildTarget()); // Walk all our transitive deps to build our complete package that we'll // turn into an executable. new AbstractBreadthFirstThrowingTraversal<BuildRule, NoSuchBuildTargetException>( params.getDeps()) { private final ImmutableList<BuildRule> empty = ImmutableList.of(); @Override public Iterable<BuildRule> visit(BuildRule rule) throws NoSuchBuildTargetException { Iterable<BuildRule> deps = empty; if (rule instanceof CxxPythonExtension) { CxxPythonExtension extension = (CxxPythonExtension) rule; NativeLinkTarget target = ((CxxPythonExtension) rule).getNativeLinkTarget(pythonPlatform); extensions.put(target.getBuildTarget(), extension); omnibusRoots.addIncludedRoot(target); List<BuildRule> cxxpydeps = new ArrayList<>(); for (BuildRule dep : rule.getDeps()) { if (dep instanceof CxxPythonExtension) { cxxpydeps.add(dep); } } deps = cxxpydeps; } else if (rule instanceof PythonPackagable) { PythonPackagable packagable = (PythonPackagable) rule; PythonPackageComponents comps = packagable.getPythonPackageComponents(pythonPlatform, cxxPlatform); allComponents.addComponent(comps, rule.getBuildTarget()); if (comps.hasNativeCode(cxxPlatform)) { for (BuildRule dep : rule.getDeps()) { if (dep instanceof NativeLinkable) { NativeLinkable linkable = (NativeLinkable) dep; nativeLinkableRoots.put(linkable.getBuildTarget(), linkable); omnibusRoots.addExcludedRoot(linkable); } } } deps = rule.getDeps(); } else if (rule instanceof NativeLinkable) { NativeLinkable linkable = (NativeLinkable) rule; nativeLinkableRoots.put(linkable.getBuildTarget(), linkable); omnibusRoots.addPotentialRoot(linkable); } return deps; } }.start(); // For the merged strategy, build up the lists of included native linkable roots, and the // excluded native linkable roots. if (nativeLinkStrategy == NativeLinkStrategy.MERGED) { OmnibusRoots roots = omnibusRoots.build(); OmnibusLibraries libraries = Omnibus.getSharedLibraries( params, ruleResolver, pathResolver, cxxBuckConfig, cxxPlatform, extraLdflags, roots.getIncludedRoots().values(), roots.getExcludedRoots().values()); // Add all the roots from the omnibus link. If it's an extension, add it as a module. // Otherwise, add it as a native library. for (Map.Entry<BuildTarget, OmnibusRoot> root : libraries.getRoots().entrySet()) { CxxPythonExtension extension = extensions.get(root.getKey()); if (extension != null) { allComponents.addModule(extension.getModule(), root.getValue().getPath(), root.getKey()); } else { NativeLinkTarget target = Preconditions.checkNotNull( roots.getIncludedRoots().get(root.getKey()), "%s: linked unexpected omnibus root: %s", params.getBuildTarget(), root.getKey()); NativeLinkTargetMode mode = target.getNativeLinkTargetMode(cxxPlatform); String soname = Preconditions.checkNotNull( mode.getLibraryName().orElse(null), "%s: omnibus library for %s was built without soname", params.getBuildTarget(), root.getKey()); allComponents.addNativeLibraries( Paths.get(soname), root.getValue().getPath(), root.getKey()); } } // Add all remaining libraries as native libraries. for (OmnibusLibrary library : libraries.getLibraries()) { allComponents.addNativeLibraries( Paths.get(library.getSoname()), library.getPath(), params.getBuildTarget()); } } else { // For regular linking, add all extensions via the package components interface. Map<BuildTarget, NativeLinkable> extensionNativeDeps = new LinkedHashMap<>(); for (Map.Entry<BuildTarget, CxxPythonExtension> entry : extensions.entrySet()) { allComponents.addComponent( entry.getValue().getPythonPackageComponents(pythonPlatform, cxxPlatform), entry.getValue().getBuildTarget()); extensionNativeDeps.putAll( Maps.uniqueIndex( entry.getValue().getNativeLinkTarget(pythonPlatform) .getNativeLinkTargetDeps(cxxPlatform), HasBuildTarget::getBuildTarget)); } // Add all the native libraries. ImmutableMap<BuildTarget, NativeLinkable> nativeLinkables = NativeLinkables.getTransitiveNativeLinkables( cxxPlatform, Iterables.concat(nativeLinkableRoots.values(), extensionNativeDeps.values())); for (NativeLinkable nativeLinkable : nativeLinkables.values()) { NativeLinkable.Linkage linkage = nativeLinkable.getPreferredLinkage(cxxPlatform); if (nativeLinkableRoots.containsKey(nativeLinkable.getBuildTarget()) || linkage != NativeLinkable.Linkage.STATIC) { ImmutableMap<String, SourcePath> libs = nativeLinkable.getSharedLibraries(cxxPlatform); for (Map.Entry<String, SourcePath> ent : libs.entrySet()) { allComponents.addNativeLibraries( Paths.get(ent.getKey()), ent.getValue(), nativeLinkable.getBuildTarget()); } } } } return allComponents.build(); } public static Path getBasePath(BuildTarget target, Optional<String> override) { return override.isPresent() ? Paths.get(override.get().replace('.', '/')) : target.getBasePath(); } public static ImmutableSet<String> getPreloadNames( BuildRuleResolver resolver, CxxPlatform cxxPlatform, Iterable<BuildTarget> preloadDeps) throws NoSuchBuildTargetException { ImmutableSet.Builder<String> builder = ImmutableSortedSet.naturalOrder(); for (NativeLinkable nativeLinkable : FluentIterable.from(preloadDeps) .transform(resolver::getRule) .filter(NativeLinkable.class)) { builder.addAll(nativeLinkable.getSharedLibraries(cxxPlatform).keySet()); } return builder.build(); } }
/* * Copyright (c) 2015. Rick Hightower, Geoff Chandler * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * QBit - The Microservice lib for Java : JSON, WebSocket, REST. Be The Web! * * Note this class was heavily influenced and inspired by the Orbitz Consul client. */ package io.advantageous.consul.endpoints; import io.advantageous.boon.core.Str; import io.advantageous.consul.domain.KeyValue; import io.advantageous.consul.domain.option.KeyValuePutOptions; import io.advantageous.consul.domain.option.RequestOptions; import io.advantageous.qbit.http.HTTP; import io.advantageous.qbit.http.request.HttpRequestBuilder; import io.advantageous.qbit.http.request.HttpTextResponse; import java.net.URI; import java.util.*; import java.util.stream.Collectors; import static io.advantageous.boon.json.JsonFactory.fromJsonArray; import static io.advantageous.consul.domain.ConsulException.die; /** * HTTP Client for /v1/kv/ endpoints. * <p> * Note this class was heavily influenced and inspired by the Orbitz Consul client. */ public class KeyValueStoreEndpoint extends Endpoint{ public KeyValueStoreEndpoint(String scheme, String host, String port, String rootPath) { super(scheme, host, port, rootPath); } public KeyValueStoreEndpoint(URI rootURI, String rootPath) { super(rootURI, rootPath); } /** * Retrieves a {@link io.advantageous.consul.domain.KeyValue} for a specific key * from the key/value store. * <p> * GET /v1/keyValueStore/{key} * * @param key The key to retrieve. * @return An {@link Optional} containing the value or {@link java.util.Optional#empty()} */ public Optional<KeyValue> getValue(String key) { return getValue(key, RequestOptions.BLANK); } /** * Retrieves a {@link io.advantageous.consul.domain.KeyValue} for a specific key * from the key/value store. * <p> * GET /v1/keyValueStore/{key} * * @param key The key to retrieve. * @param requestOptions The query options. * @return An {@link Optional} containing the value or {@link java.util.Optional#empty()} */ public Optional<KeyValue> getValue(final String key, RequestOptions requestOptions) { final URI uri = createURI("/" + key); final HttpRequestBuilder httpRequestBuilder = RequestUtils .getHttpRequestBuilder(null, null, requestOptions, ""); final HTTP.Response httpResponse = HTTP.getResponse(uri.toString() + "?" + httpRequestBuilder.paramString()); if (httpResponse.code() == 404) { return Optional.empty(); } if (httpResponse.code() != 200) { die("Unable to retrieve the key", key, uri, httpResponse.code(), httpResponse.body()); } return getKeyValueOptional(httpResponse); } private Optional<KeyValue> getKeyValueOptional(HttpTextResponse httpResponse) { final List<KeyValue> keyValues = fromJsonArray(httpResponse.body(), KeyValue.class); return keyValues != null && keyValues.size() > 0 ? Optional.of(keyValues.get(0)) : Optional.<KeyValue>empty(); } private Optional<KeyValue> getKeyValueOptional(HTTP.Response httpResponse) { final List<KeyValue> keyValues = fromJsonArray(httpResponse.body(), KeyValue.class); return keyValues != null && keyValues.size() > 0 ? Optional.of(keyValues.get(0)) : Optional.<KeyValue>empty(); } /** * Retrieves a list of {@link io.advantageous.consul.domain.KeyValue} objects for a specific key * from the key/value store. * <p> * GET /v1/keyValueStore/{key}?recurse * * @param key The key to retrieve. * @return A list of zero to many {@link io.advantageous.consul.domain.KeyValue} objects. */ public List<KeyValue> getValues(String key) { final URI uri = createURI("/" + key); final HttpRequestBuilder httpRequestBuilder = RequestUtils .getHttpRequestBuilder(null, null, RequestOptions.BLANK, ""); httpRequestBuilder.addParam("recurse", "true"); final HTTP.Response httpResponse = HTTP.getResponse(uri.toString() + "?" + httpRequestBuilder.paramString()); if (httpResponse.code() != 200) { die("Unable to retrieve the service", uri, httpResponse.code(), httpResponse.body()); } return fromJsonArray(httpResponse.body(), KeyValue.class); } /** * Retrieves a string value for a specific key from the key/value store. * <p> * GET /v1/keyValueStore/{key} * * @param key The key to retrieve. * @return An {@link Optional} containing the value as a string or * {@link java.util.Optional#empty()} */ public Optional<String> getValueAsString(String key) { Optional<KeyValue> value = getValue(key); return value.isPresent() ? Optional.of(RequestUtils.decodeBase64(value.get().getValue())) : Optional.<String>empty(); } /** * Retrieves a list of string values for a specific key from the key/value * store. * <p> * GET /v1/keyValueStore/{key}?recurse * * @param key The key to retrieve. * @return A list of zero to many string values. */ public List<String> getValuesAsString(String key) { @SuppressWarnings("UnnecessaryLocalVariable") List<String> result = getValues(key).stream().map(keyValue -> RequestUtils.decodeBase64(keyValue.getValue())).collect(Collectors.toList()); return result; } /** * Puts a value into the key/value store. * * @param key The key to use as index. * @param value The value to index. * @return <code>true</code> if the value was successfully indexed. */ public boolean putValue(String key, String value) { return putValue(key, value, 0L, KeyValuePutOptions.BLANK); } /** * Puts a value into the key/value store. * * @param key The key to use as index. * @param value The value to index. * @param flags The flags for this key. * @return <code>true</code> if the value was successfully indexed. */ public boolean putValue(String key, String value, long flags) { return putValue(key, value, flags, KeyValuePutOptions.BLANK); } /** * Puts a value into the key/value store. * * @param key The key to use as index. * @param value The value to index. * @param putOptions PUT options (e.g. wait, acquire). * @return <code>true</code> if the value was successfully indexed. */ private boolean putValue(final String key, final String value, final long flags, final KeyValuePutOptions putOptions) { Integer cas = putOptions.getCas(); String release = putOptions.getRelease(); String acquire = putOptions.getAcquire(); final URI uri = createURI("/" + key); final HttpRequestBuilder httpRequestBuilder = RequestUtils .getHttpRequestBuilder(null, null, RequestOptions.BLANK, ""); // httpRequestBuilder.addParam("recurse", "true"); if (cas != null) { httpRequestBuilder.addParam("cas", cas.toString()); } if (!Str.isEmpty(release)) { httpRequestBuilder.addParam("release", release); } if (!Str.isEmpty(acquire)) { httpRequestBuilder.addParam("acquire", acquire); } if (flags != 0) { httpRequestBuilder.addParam("flags", String.valueOf(flags)); } httpRequestBuilder.setBody(value); httpRequestBuilder.setMethodPut(); final HTTP.Response httpResponse = HTTP.jsonRestCallViaPUT(uri.toString() + "?" + httpRequestBuilder.paramString(), value); if (httpResponse.code() == 200) { return Boolean.parseBoolean(httpResponse.body()); } else { die("Unable to put value", uri, putOptions, httpResponse.code(), httpResponse.body()); return false; } } /** * Retrieves a list of matching keys for the given key. * <p> * GET /v1/keyValueStore/{key}?keys * * @param key The key to retrieve. * @return A list of zero to many keys. */ public List<String> getKeys(String key) { final URI uri = createURI("/" + key); final HttpRequestBuilder httpRequestBuilder = RequestUtils .getHttpRequestBuilder(null, null, RequestOptions.BLANK, ""); httpRequestBuilder.addParam("keys", "true"); final HTTP.Response httpResponse = HTTP.getResponse(uri.toString() + "?" + httpRequestBuilder.paramString()); if (httpResponse.code() == 200) { return fromJsonArray(httpResponse.body(), String.class); } else { die("Unable to get nested keys", uri, key, httpResponse.code(), httpResponse.body()); return Collections.emptyList(); } } /** * Deletes a specified key. * <p> * DELETE /v1/keyValueStore/{key} * * @param key The key to delete. */ public void deleteKey(String key) { //noinspection unchecked,unchecked delete(key, Collections.EMPTY_MAP); } /** * Deletes a specified key and any below it. * <p> * DELETE /v1/keyValueStore/{key}?recurse * * @param key The key to delete. */ public void deleteKeys(String key) { delete(key, Collections.singletonMap("recurse", "true")); } /** * Deletes a specified key. * * @param key The key to delete. * @param params Map of parameters, e.g. recurse. */ private void delete(String key, Map<String, String> params) { final URI uri = createURI("/" + key); final HttpRequestBuilder httpRequestBuilder = RequestUtils .getHttpRequestBuilder(null, null, RequestOptions.BLANK, ""); final Set<Map.Entry<String, String>> entries = params.entrySet(); for (Map.Entry<String, String> entry : entries) { httpRequestBuilder.addParam(entry.getKey(), entry.getValue()); } httpRequestBuilder.setMethodDelete(); final HTTP.Response httpResponse = HTTP.deleteResponse(uri.toString() + "?" + httpRequestBuilder.paramString()); if (httpResponse.code() != 200) { die("Unable to delete key", uri, key, httpResponse.code(), httpResponse.body()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.cql3.statements; import java.util.*; import com.google.common.base.Optional; import com.google.common.base.Strings; import com.google.common.collect.Iterables; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.auth.Permission; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.config.ColumnDefinition; import org.apache.cassandra.config.Schema; import org.apache.cassandra.cql3.CFName; import org.apache.cassandra.cql3.ColumnIdentifier; import org.apache.cassandra.cql3.IndexName; import org.apache.cassandra.db.marshal.MapType; import org.apache.cassandra.exceptions.InvalidRequestException; import org.apache.cassandra.exceptions.RequestValidationException; import org.apache.cassandra.exceptions.UnauthorizedException; import org.apache.cassandra.schema.IndexMetadata; import org.apache.cassandra.schema.Indexes; import org.apache.cassandra.service.ClientState; import org.apache.cassandra.service.MigrationManager; import org.apache.cassandra.thrift.ThriftValidation; import org.apache.cassandra.transport.Event; /** A <code>CREATE INDEX</code> statement parsed from a CQL query. */ public class CreateIndexStatement extends SchemaAlteringStatement { private static final Logger logger = LoggerFactory.getLogger(CreateIndexStatement.class); private final String indexName; private final List<IndexTarget.Raw> rawTargets; private final IndexPropDefs properties; private final boolean ifNotExists; public CreateIndexStatement(CFName name, IndexName indexName, List<IndexTarget.Raw> targets, IndexPropDefs properties, boolean ifNotExists) { super(name); this.indexName = indexName.getIdx(); this.rawTargets = targets; this.properties = properties; this.ifNotExists = ifNotExists; } public void checkAccess(ClientState state) throws UnauthorizedException, InvalidRequestException { state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.ALTER); } public void validate(ClientState state) throws RequestValidationException { CFMetaData cfm = ThriftValidation.validateColumnFamily(keyspace(), columnFamily()); if (cfm.isCounter()) throw new InvalidRequestException("Secondary indexes are not supported on counter tables"); if (cfm.isView()) throw new InvalidRequestException("Secondary indexes are not supported on materialized views"); if (cfm.isCompactTable() && !cfm.isStaticCompactTable()) throw new InvalidRequestException("Secondary indexes are not supported on COMPACT STORAGE tables that have clustering columns"); List<IndexTarget> targets = new ArrayList<>(rawTargets.size()); for (IndexTarget.Raw rawTarget : rawTargets) targets.add(rawTarget.prepare(cfm)); if (targets.isEmpty() && !properties.isCustom) throw new InvalidRequestException("Only CUSTOM indexes can be created without specifying a target column"); if (targets.size() > 1) validateTargetsForMultiColumnIndex(targets); for (IndexTarget target : targets) { ColumnDefinition cd = cfm.getColumnDefinition(target.column); if (cd == null) throw new InvalidRequestException("No column definition found for column " + target.column); // TODO: we could lift that limitation if (cfm.isCompactTable() && cd.isPrimaryKeyColumn()) throw new InvalidRequestException("Secondary indexes are not supported on PRIMARY KEY columns in COMPACT STORAGE tables"); if (cd.kind == ColumnDefinition.Kind.PARTITION_KEY && cfm.getKeyValidatorAsClusteringComparator().size() == 1) throw new InvalidRequestException(String.format("Cannot create secondary index on partition key column %s", target.column)); boolean isMap = cd.type instanceof MapType; boolean isFrozenCollection = cd.type.isCollection() && !cd.type.isMultiCell(); if (isFrozenCollection) { validateForFrozenCollection(target); } else { validateNotFullIndex(target); validateIsSimpleIndexIfTargetColumnNotCollection(cd, target); validateTargetColumnIsMapIfIndexInvolvesKeys(isMap, target); } } if (!Strings.isNullOrEmpty(indexName)) { if (Schema.instance.getKSMetaData(keyspace()).existingIndexNames(null).contains(indexName)) { if (ifNotExists) return; else throw new InvalidRequestException(String.format("Index %s already exists", indexName)); } } properties.validate(); } private void validateForFrozenCollection(IndexTarget target) throws InvalidRequestException { if (target.type != IndexTarget.Type.FULL) throw new InvalidRequestException(String.format("Cannot create %s() index on frozen column %s. " + "Frozen collections only support full() indexes", target.type, target.column)); } private void validateNotFullIndex(IndexTarget target) throws InvalidRequestException { if (target.type == IndexTarget.Type.FULL) throw new InvalidRequestException("full() indexes can only be created on frozen collections"); } private void validateIsSimpleIndexIfTargetColumnNotCollection(ColumnDefinition cd, IndexTarget target) throws InvalidRequestException { if (!cd.type.isCollection() && target.type != IndexTarget.Type.SIMPLE) throw new InvalidRequestException(String.format("Cannot create %s() index on %s. " + "Non-collection columns support only simple indexes", target.type.toString(), target.column)); } private void validateTargetColumnIsMapIfIndexInvolvesKeys(boolean isMap, IndexTarget target) throws InvalidRequestException { if (target.type == IndexTarget.Type.KEYS || target.type == IndexTarget.Type.KEYS_AND_VALUES) { if (!isMap) throw new InvalidRequestException(String.format("Cannot create index on %s of column %s with non-map type", target.type, target.column)); } } private void validateTargetsForMultiColumnIndex(List<IndexTarget> targets) { if (!properties.isCustom) throw new InvalidRequestException("Only CUSTOM indexes support multiple columns"); Set<ColumnIdentifier> columns = new HashSet<>(); for (IndexTarget target : targets) if (!columns.add(target.column)) throw new InvalidRequestException("Duplicate column " + target.column + " in index target list"); } public Event.SchemaChange announceMigration(boolean isLocalOnly) throws RequestValidationException { CFMetaData cfm = Schema.instance.getCFMetaData(keyspace(), columnFamily()).copy(); List<IndexTarget> targets = new ArrayList<>(rawTargets.size()); for (IndexTarget.Raw rawTarget : rawTargets) targets.add(rawTarget.prepare(cfm)); String acceptedName = indexName; if (Strings.isNullOrEmpty(acceptedName)) { acceptedName = Indexes.getAvailableIndexName(keyspace(), columnFamily(), targets.size() == 1 ? targets.get(0).column.toString() : null); } if (Schema.instance.getKSMetaData(keyspace()).existingIndexNames(null).contains(acceptedName)) { if (ifNotExists) return null; else throw new InvalidRequestException(String.format("Index %s already exists", acceptedName)); } IndexMetadata.Kind kind; Map<String, String> indexOptions; if (properties.isCustom) { kind = IndexMetadata.Kind.CUSTOM; indexOptions = properties.getOptions(); } else { indexOptions = Collections.emptyMap(); kind = cfm.isCompound() ? IndexMetadata.Kind.COMPOSITES : IndexMetadata.Kind.KEYS; } IndexMetadata index = IndexMetadata.fromIndexTargets(cfm, targets, acceptedName, kind, indexOptions); // check to disallow creation of an index which duplicates an existing one in all but name Optional<IndexMetadata> existingIndex = Iterables.tryFind(cfm.getIndexes(), existing -> existing.equalsWithoutName(index)); if (existingIndex.isPresent()) { if (ifNotExists) return null; else throw new InvalidRequestException(String.format("Index %s is a duplicate of existing index %s", index.name, existingIndex.get().name)); } logger.trace("Updating index definition for {}", indexName); cfm.indexes(cfm.getIndexes().with(index)); MigrationManager.announceColumnFamilyUpdate(cfm, false, isLocalOnly); // Creating an index is akin to updating the CF return new Event.SchemaChange(Event.SchemaChange.Change.UPDATED, Event.SchemaChange.Target.TABLE, keyspace(), columnFamily()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.datanode; import java.io.File; import java.io.IOException; import java.net.ConnectException; import java.net.InetSocketAddress; import java.net.SocketTimeoutException; import java.util.AbstractList; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hdfs.AvatarZooKeeperClient; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.AvatarProtocol; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.BlockListAsLongs; import org.apache.hadoop.hdfs.protocol.DataTransferProtocol; import org.apache.hadoop.hdfs.protocol.FSConstants; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.server.common.Storage; import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption; import org.apache.hadoop.hdfs.server.namenode.AvatarNode; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.protocol.BlockReport; import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol; import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration; import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.DisallowedDatanodeException; import org.apache.hadoop.hdfs.server.protocol.UpgradeCommand; import org.apache.hadoop.hdfs.server.common.IncorrectVersionException; import org.apache.hadoop.hdfs.util.InjectionEvent; import org.apache.hadoop.hdfs.util.InjectionHandler; import org.apache.hadoop.hdfs.protocol.UnregisteredDatanodeException; import org.apache.hadoop.util.DiskChecker.DiskErrorException; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.util.DiskChecker; import org.apache.hadoop.util.StringUtils; import org.apache.zookeeper.data.Stat; /** * This is an implementation of the AvatarDataNode, a wrapper * for a regular datanode that works with AvatarNode. * * The AvatarDataNode is needed to make a vanilla DataNode send * block reports to Primary and standby namenodes. The AvatarDataNode * does not know which one of the namenodes is primary and which is * secondary. * * Typically, an adminstrator will have to specify the pair of * AvatarNodes via fs1.default.name and fs2.default.name * */ public class AvatarDataNode extends DataNode { static { Configuration.addDefaultResource("avatar-default.xml"); Configuration.addDefaultResource("avatar-site.xml"); } public static final Log LOG = LogFactory.getLog(AvatarDataNode.class.getName()); public AvatarDataNode(Configuration conf, AbstractList<File> dataDirs, String dnThreadName) throws IOException { super(conf, dataDirs); AvatarDataNode.dnThreadName = dnThreadName; } private static List<InetSocketAddress> getDatanodeProtocolAddresses( Configuration conf, Collection<String> serviceIds) throws IOException { // Use default address as fall back String defaultAddress; try { defaultAddress = conf.get(FileSystem.FS_DEFAULT_NAME_KEY); if (defaultAddress != null) { Configuration newConf = new Configuration(conf); newConf.set(FileSystem.FS_DEFAULT_NAME_KEY, defaultAddress); defaultAddress = NameNode.getHostPortString(NameNode.getAddress(newConf)); } } catch (IllegalArgumentException e) { defaultAddress = null; } List<InetSocketAddress> addressList = DFSUtil.getAddresses(conf, serviceIds, defaultAddress, NameNode.DATANODE_PROTOCOL_ADDRESS, FSConstants.DFS_NAMENODE_RPC_ADDRESS_KEY); if (addressList == null) { throw new IOException("Incorrect configuration: namenode address " + FSConstants.DFS_NAMENODE_RPC_ADDRESS_KEY + " is not configured."); } return addressList; } @Override void startDataNode(Configuration conf, AbstractList<File> dataDirs ) throws IOException { initGlobalSetting(conf, dataDirs); Collection<String> serviceIds = DFSUtil.getNameServiceIds(conf); List<InetSocketAddress> defaultNameAddrs = AvatarDataNode.getDatanodeProtocolAddresses(conf, serviceIds); List<InetSocketAddress> nameAddrs0 = DFSUtil.getRPCAddresses("0", conf, serviceIds, NameNode.DATANODE_PROTOCOL_ADDRESS, FSConstants.DFS_NAMENODE_RPC_ADDRESS_KEY); List<InetSocketAddress> nameAddrs1 = DFSUtil.getRPCAddresses("1", conf, serviceIds, NameNode.DATANODE_PROTOCOL_ADDRESS, FSConstants.DFS_NAMENODE_RPC_ADDRESS_KEY); List<InetSocketAddress> avatarAddrs0 = AvatarDataNode.getAvatarNodeAddresses("0", conf, serviceIds); List<InetSocketAddress> avatarAddrs1 = AvatarDataNode.getAvatarNodeAddresses("1", conf, serviceIds); namespaceManager = new AvatarNamespaceManager(nameAddrs0, nameAddrs1, avatarAddrs0, avatarAddrs1, defaultNameAddrs, DFSUtil.getNameServiceIds(conf)); initDataSetAndScanner(conf, dataDirs, nameAddrs0.size()); } @Override protected void notifyNamenodeReceivedBlock(int namespaceId, Block block, String delHint) throws IOException { if (block == null) { throw new IllegalArgumentException("Block is null"); } ((AvatarNamespaceManager)namespaceManager).notifyNamenodeReceivedBlock( namespaceId, block, delHint); } @Override protected void notifyNamenodeDeletedBlock(int namespaceId, Block block) throws IOException { if (block == null) { throw new IllegalArgumentException("Block is null"); } ((AvatarNamespaceManager)namespaceManager).notifyNamenodeDeletedBlock(namespaceId, block); } /** TODO: will add more details to this later on * Manages OfferService objects for the data node namespaces. * Each namespace has two OfferServices, one for pirmary and one for standby. * Creation, removal, starting, stopping, shutdown on OfferService * objects must be done via APIs in this class. */ class AvatarNamespaceManager extends NamespaceManager { private final Object refreshNamenodesLock = new Object(); AvatarNamespaceManager( List<InetSocketAddress> nameAddrs0, List<InetSocketAddress> nameAddrs1, List<InetSocketAddress> avatarAddrs0, List<InetSocketAddress> avatarAddrs1, List<InetSocketAddress> defaultAddrs, Collection<String> nameserviceIds) throws IOException { Iterator<String> it = nameserviceIds.iterator(); for ( int i = 0; i<nameAddrs0.size(); i++) { InetSocketAddress nameAddr0 = nameAddrs0.get(i); String nameserviceId = it.hasNext()? it.next(): null; nameNodeThreads.put(nameAddr0, new ServicePair(nameAddr0, nameAddrs1.get(i), avatarAddrs0.get(i), avatarAddrs1.get(i), defaultAddrs.get(i), nameserviceId)); } } /** * Notify both namenode(s) that we have received a block */ protected void notifyNamenodeReceivedBlock(int namespaceId, Block block, String delHint) throws IOException { NamespaceService servicePair = get(namespaceId); if (servicePair == null) { throw new IOException("Cannot locate OfferService thread for namespace=" + namespaceId); } servicePair.notifyNamenodeReceivedBlock(block, delHint); } /** * Notify both namenode(s) that we have deleted a block */ protected void notifyNamenodeDeletedBlock(int namespaceId, Block block) throws IOException { NamespaceService servicePair = this.get(namespaceId); if (servicePair == null) { throw new IOException("Cannot locate OfferService thread for namespace=" + namespaceId); } servicePair.notifyNamenodeDeletedBlock(block); } void refreshNamenodes( List<InetSocketAddress> nameAddrs0, List<InetSocketAddress> nameAddrs1, List<InetSocketAddress> avatarAddrs0, List<InetSocketAddress> avatarAddrs1, List<InetSocketAddress> defaultAddrs, Collection<String> nameserviceIds) throws IOException, InterruptedException{ List<Integer> toStart = new ArrayList<Integer>(); List<String> toStartNameserviceIds = new ArrayList<String>(); List<NamespaceService> toStop = new ArrayList<NamespaceService>(); synchronized (refreshNamenodesLock) { synchronized (this) { for (InetSocketAddress nnAddr : nameNodeThreads.keySet()) { if (!nameAddrs0.contains(nnAddr)){ toStop.add(nameNodeThreads.get(nnAddr)); } } Iterator<String> it = nameserviceIds.iterator(); for (int i = 0; i < nameAddrs0.size(); i++) { String nameserviceId = it.hasNext()? it.next() : null; if (!nameNodeThreads.containsKey(nameAddrs0.get(i))) { toStart.add(i); toStartNameserviceIds.add(nameserviceId); } } it = toStartNameserviceIds.iterator(); for (Integer i : toStart) { InetSocketAddress nameAddr0 = nameAddrs0.get(i); nameNodeThreads.put(nameAddr0, new ServicePair(nameAddr0, nameAddrs1.get(i), avatarAddrs0.get(i), avatarAddrs1.get(i), defaultAddrs.get(i), it.next())); } for (NamespaceService nsos : toStop) { remove(nsos); } } } for (NamespaceService nsos : toStop) { nsos.stop(); } startAll(); } } public class ServicePair extends NamespaceService { String defaultAddr; InetSocketAddress nameAddr1; InetSocketAddress nameAddr2; DatanodeProtocol namenode1; DatanodeProtocol namenode2; AvatarProtocol avatarnode1; AvatarProtocol avatarnode2; InetSocketAddress avatarAddr1; InetSocketAddress avatarAddr2; boolean doneRegister1 = false; // not yet registered with namenode1 boolean doneRegister2 = false; // not yet registered with namenode2 OfferService offerService1; OfferService offerService2; volatile OfferService primaryOfferService = null; Thread of1; Thread of2; int namespaceId; String nameserviceId; Thread spThread; AvatarZooKeeperClient zkClient; private NamespaceInfo nsInfo; DatanodeRegistration nsRegistration; private UpgradeManagerDatanode upgradeManager; private volatile boolean initialized = false; private volatile boolean shouldServiceRun = true; volatile long lastBeingAlive = now(); private ServicePair(InetSocketAddress nameAddr1, InetSocketAddress nameAddr2, InetSocketAddress avatarAddr1, InetSocketAddress avatarAddr2, InetSocketAddress defaultAddr, String nameserviceId) { this.nameAddr1 = nameAddr1; this.nameAddr2 = nameAddr2; this.avatarAddr1 = avatarAddr1; this.avatarAddr2 = avatarAddr2; this.defaultAddr = defaultAddr.getHostName() + ":" + defaultAddr.getPort(); this.nameserviceId = nameserviceId; zkClient = new AvatarZooKeeperClient(getConf(), null); this.nsRegistration = new DatanodeRegistration(getMachineName()); } private void setNamespaceInfo(NamespaceInfo nsinfo) { this.nsInfo = nsinfo; this.namespaceId = nsinfo.getNamespaceID(); namespaceManager.addNamespace(this); } private void setupNS() throws IOException { // handshake with NN NamespaceInfo nsInfo; nsInfo = handshake(true); setNamespaceInfo(nsInfo); synchronized(AvatarDataNode.this){ setupNSStorage(); } nsRegistration.setIpcPort(ipcServer.getListenerAddress().getPort()); nsRegistration.setInfoPort(infoServer.getPort()); } private void setupNSStorage() throws IOException { Configuration conf = getConf(); StartupOption startOpt = getStartupOption(conf); assert startOpt != null : "Startup option must be set."; boolean simulatedFSDataset = conf.getBoolean("dfs.datanode.simulateddatastorage", false); if (simulatedFSDataset) { nsRegistration.setStorageID(storage.getStorageID()); //same as DN nsRegistration.storageInfo.layoutVersion = FSConstants.LAYOUT_VERSION; nsRegistration.storageInfo.namespaceID = nsInfo.namespaceID; } else { // read storage info, lock data dirs and transition fs state if necessary // first do it at the top level dataDirs // This is done only once when among all namespaces storage.recoverTransitionRead(AvatarDataNode.this, nsInfo, dataDirs, startOpt); // Then do it for this namespace's directory storage.recoverTransitionRead(AvatarDataNode.this, nsInfo.namespaceID, nsInfo, dataDirs, startOpt, nameserviceId); LOG.info("setting up storage: namespaceId=" + namespaceId + ";lv=" + storage.layoutVersion + ";nsInfo=" + nsInfo); nsRegistration.setStorageInfo( storage.getNStorage(nsInfo.namespaceID), storage.getStorageID()); data.initialize(storage); } data.addNamespace(namespaceId, storage.getNameSpaceDataDir(namespaceId), conf); if (blockScanner != null) { blockScanner.start(); blockScanner.addNamespace(namespaceId); } } @Override public UpgradeManagerDatanode getUpgradeManager() { synchronized (AvatarDataNode.this) { if(upgradeManager == null) upgradeManager = new UpgradeManagerDatanode(AvatarDataNode.this, namespaceId); } return upgradeManager; } public void processUpgradeCommand(UpgradeCommand comm) throws IOException { assert upgradeManager != null : "DataNode.upgradeManager is null."; upgradeManager.processUpgradeCommand(comm); } /** * Start distributed upgrade if it should be initiated by the data-node. */ private void startDistributedUpgradeIfNeeded() throws IOException { UpgradeManagerDatanode um = getUpgradeManager(); if(!um.getUpgradeState()) return; um.setUpgradeState(false, um.getUpgradeVersion()); um.startUpgrade(); return; } public void start() { if ((spThread != null) && (spThread.isAlive())) { //Thread is started already return; } spThread = new Thread(this, dnThreadName + " for namespace " + namespaceId); spThread.setDaemon(true); spThread.start(); } public void stop() { stopServices(); if (spThread != null) { spThread.interrupt(); } } private void initProxy1() throws IOException { synchronized (avatarAddr1) { if (namenode1 == null) { namenode1 = (DatanodeProtocol) RPC.getProxy(DatanodeProtocol.class, DatanodeProtocol.versionID, nameAddr1, getConf()); } if (avatarnode1 == null) { avatarnode1 = (AvatarProtocol) RPC.getProxy(AvatarProtocol.class, AvatarProtocol.versionID, avatarAddr1, getConf()); } } } private void initProxy2() throws IOException { synchronized (avatarAddr2) { if (namenode2 == null) { namenode2 = (DatanodeProtocol) RPC.getProxy(DatanodeProtocol.class, DatanodeProtocol.versionID, nameAddr2, getConf()); } if (avatarnode2 == null) { avatarnode2 = (AvatarProtocol) RPC.getProxy(AvatarProtocol.class, AvatarProtocol.versionID, avatarAddr2, getConf()); } } } public void restartService1() throws IOException { // Rely on handshake to restart the service. synchronized (avatarAddr1) { stopService1(); joinService1(); doneRegister1 = false; } } private void stopService1() { RPC.stopProxy(avatarnode1); RPC.stopProxy(namenode1); avatarnode1 = null; namenode1 = null; if (offerService1 != null) { offerService1.stop(); } if (of1 != null) { of1.interrupt(); } } private void stopService2() { RPC.stopProxy(avatarnode2); RPC.stopProxy(namenode2); avatarnode2 = null; namenode2 = null; if (offerService2 != null) { offerService2.stop(); } if (of2 != null) { of2.interrupt(); } } private void joinService1() { if (of1 != null) { try { of1.join(); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); } } } private void joinService2() { if (of2 != null) { try { of2.join(); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); } } } public void restartService2() throws IOException { // Rely on handshake to restart the service. synchronized (avatarAddr2) { stopService2(); joinService2(); doneRegister2 = false; } } /** stop two offer services */ private void stopServices() { this.shouldServiceRun = false; LOG.info("stop services " + this.nameserviceId); stopService1(); stopService2(); if (zkClient != null) { try { zkClient.shutdown(); } catch (InterruptedException ie) { LOG.warn("Zk shutdown is interrupted: ", ie); } } } public void join() { joinServices(); if (spThread != null) { try { spThread.join(); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); } spThread = null; } } /** Join two offer services */ private void joinServices() { joinService1(); joinService2(); } public void cleanUp() { if(upgradeManager != null) upgradeManager.shutdownUpgrade(); namespaceManager.remove(this); shouldServiceRun = false; try { RPC.stopProxy(namenode1); } catch (Exception e){ LOG.warn("Exception stop the namenode RPC threads", e); } try { RPC.stopProxy(namenode2); } catch (Exception e){ LOG.warn("Exception stop the namenode RPC threads", e); } if (blockScanner != null) { blockScanner.removeNamespace(this.getNamespaceId()); } if (data != null) { data.removeNamespace(this.getNamespaceId()); } if (storage != null) { storage.removeNamespaceStorage(this.getNamespaceId()); } } public void shutdown() { stop(); join(); } // connect to both name node if possible. // If doWait is true, then return only when at least one handshake is // successful. // private NamespaceInfo handshake(boolean startup) throws IOException { NamespaceInfo nsInfo = null; boolean firstIsPrimary = false; // When true indicates ZK is null and there is no primary. This is to // enable datanode startups during failover. The assumption is that the // layout version of the Standby and Primary would be consistent when // we failover and hence we can speak to any one of the nodes to find out // the NamespaceInfo. boolean noPrimary = false; do { if (startup) { // The startup option is used when the datanode is first created // We only need to connect to the primary at this point and as soon // as possible. So figure out who the primary is from the ZK Stat stat = new Stat(); try { String primaryAddress = zkClient.getPrimaryAvatarAddress(defaultAddr, stat, false); noPrimary = (primaryAddress == null); String firstNNAddress = nameAddr1.getHostName() + ":" + nameAddr1.getPort(); firstIsPrimary = firstNNAddress.equalsIgnoreCase(primaryAddress); } catch (Exception ex) { LOG.error("Could not get the primary address from ZooKeeper", ex); } } try { if ((firstIsPrimary && startup) || !startup || noPrimary) { // only try to connect to the first NN if it is not the // startup connection or if it is primary on startup // This way if it is standby we are not wasting datanode startup time initProxy1(); if (startup) { nsInfo = handshake(namenode1, nameAddr1); } } } catch(ConnectException se) { // namenode has not been started LOG.info("Server at " + nameAddr1 + " not available yet, Zzzzz..."); } catch(SocketTimeoutException te) { // namenode is busy LOG.info("Problem connecting to server timeout. " + nameAddr1); } catch (IOException ioe) { LOG.info("Problem connecting to server. " + nameAddr1, ioe); } try { if ((!firstIsPrimary && startup) || !startup || noPrimary) { initProxy2(); if (startup) { NamespaceInfo tempInfo = handshake(namenode2, nameAddr2); // During failover both layouts should match. if (noPrimary && nsInfo != null && tempInfo.getLayoutVersion() != nsInfo.getLayoutVersion()) { throw new IOException("Layout versions don't match on zero, one: " + nsInfo.getLayoutVersion() + ", " + tempInfo.getLayoutVersion()); } nsInfo = tempInfo; } } } catch(ConnectException se) { // namenode has not been started LOG.info("Server at " + nameAddr2 + " not available yet, Zzzzz..."); } catch(SocketTimeoutException te) { // namenode is busy LOG.info("Problem connecting to server timeout. " + nameAddr2); } catch (RemoteException re) { handleRegistrationError(re); } catch (IOException ioe) { LOG.info("Problem connecting to server. " + nameAddr2, ioe); } } while (startup && nsInfo == null && shouldServiceRun); return nsInfo; } private NamespaceInfo handshake(DatanodeProtocol node, InetSocketAddress machine) throws IOException { NamespaceInfo nsInfo = new NamespaceInfo(); while (shouldServiceRun) { try { nsInfo = node.versionRequest(); break; } catch(SocketTimeoutException e) { // namenode is busy LOG.info("Problem connecting to server: " + machine); try { Thread.sleep(1000); } catch (InterruptedException ie) {} } } String errorMsg = null; // do not fail on incompatible build version if( ! nsInfo.getBuildVersion().equals( Storage.getBuildVersion() )) { errorMsg = "Incompatible build versions: namenode BV = " + nsInfo.getBuildVersion() + "; datanode BV = " + Storage.getBuildVersion(); LOG.warn( errorMsg ); } if (FSConstants.LAYOUT_VERSION != nsInfo.getLayoutVersion()) { errorMsg = "Data-node and name-node layout versions must be the same." + "Expected: "+ FSConstants.LAYOUT_VERSION + " actual "+ nsInfo.getLayoutVersion(); LOG.fatal(errorMsg); try { node.errorReport(nsRegistration, DatanodeProtocol.NOTIFY, errorMsg ); } catch( SocketTimeoutException e ) { // namenode is busy LOG.info("Problem connecting to server: " + machine); } shutdownDN(); throw new IOException(errorMsg); } return nsInfo; } /** * Returns true if we are able to successfully register with namenode */ boolean register(DatanodeProtocol node, InetSocketAddress machine) throws IOException { if (nsRegistration.getStorageID().equals("")) { setNewStorageID(nsRegistration); } DatanodeRegistration tmp = new DatanodeRegistration(nsRegistration.getName()); tmp.setInfoPort(nsRegistration.getInfoPort()); tmp.setIpcPort(nsRegistration.getIpcPort()); boolean simulatedFSDataset = conf.getBoolean("dfs.datanode.simulateddatastorage", false); if (simulatedFSDataset) { tmp.setStorageID(storage.getStorageID()); //same as DN tmp.storageInfo.layoutVersion = FSConstants.LAYOUT_VERSION; tmp.storageInfo.namespaceID = nsInfo.namespaceID; } else { tmp.setStorageInfo(storage.getNStorage(namespaceId), storage.getStorageID()); } // reset name to machineName. Mainly for web interface. tmp.name = machineName + ":" + nsRegistration.getPort(); try { tmp = node.register(tmp, DataTransferProtocol.DATA_TRANSFER_VERSION); // if we successded registering for the first time, then we update // the global registration objct if (!doneRegister1 && !doneRegister2) { nsRegistration = tmp; } } catch(SocketTimeoutException e) { // namenode is busy LOG.info("Problem connecting to server: " + machine); return false; } assert ("".equals(storage.getStorageID()) && !"".equals(nsRegistration.getStorageID())) || storage.getStorageID().equals(nsRegistration.getStorageID()) : "New storageID can be assigned only if data-node is not formatted"; if (storage.getStorageID().equals("")) { storage.setStorageID(nsRegistration.getStorageID()); storage.writeAll(); LOG.info("New storage id " + nsRegistration.getStorageID() + " is assigned to data-node " + nsRegistration.getName()); } if(! storage.getStorageID().equals(nsRegistration.getStorageID())) { throw new IOException("Inconsistent storage IDs. Name-node returned " + nsRegistration.getStorageID() + ". Expecting " + storage.getStorageID()); } sendBlocksBeingWrittenReport(node, namespaceId, nsRegistration); return true; } boolean isPrimaryOfferService(OfferService service) { return primaryOfferService == service; } void setPrimaryOfferService(OfferService service) { this.primaryOfferService = service; if (service != null) LOG.info("Primary namenode is set to be " + service.avatarnodeAddress); else { LOG.info("Failover has happened. Stop accessing commands from " + "either namenode until the new primary is completely in" + "sync with all the datanodes"); } } private void register1() throws IOException { synchronized(avatarAddr1) { InjectionHandler.processEvent(InjectionEvent.AVATARDATANODE_BEFORE_START_OFFERSERVICE1); if (avatarnode1 != null && namenode1 != null && !doneRegister1 && register(namenode1, nameAddr1)) { InjectionHandler.processEvent(InjectionEvent.AVATARDATANODE_START_OFFERSERVICE1); doneRegister1 = true; offerService1 = new OfferService(AvatarDataNode.this, this, namenode1, nameAddr1, avatarnode1, avatarAddr1); of1 = new Thread(offerService1, "OfferService1 " + nameAddr1); of1.start(); } } } private void register2() throws IOException { synchronized(avatarAddr2) { if (avatarnode2 != null && namenode2 != null && !doneRegister2 && register(namenode2, nameAddr2)) { InjectionHandler.processEvent(InjectionEvent.AVATARDATANODE_START_OFFERSERVICE2); doneRegister2 = true; offerService2 = new OfferService(AvatarDataNode.this, this, namenode2, nameAddr2, avatarnode2, avatarAddr2); of2 = new Thread(offerService2, "OfferService2 " + nameAddr2); of2.start(); } } } @Override public void run() { LOG.info(nsRegistration + "In AvatarDataNode.run, data = " + data); try { // set up namespace try { setupNS(); } catch (IOException ioe) { // Initial handshake, storage recovery or registration failed LOG.fatal(nsRegistration + " initialization failed for namespaceId " + namespaceId, ioe); return; } while (shouldServiceRun && shouldRun) { try { // try handshaking with any namenode that we have not yet tried handshake(false); try { register1(); } finally { register2(); } this.initialized = true; startDistributedUpgradeIfNeeded(); } catch (RemoteException re) { handleRegistrationError(re); } catch (Exception ex) { LOG.error("Exception: ", ex); } if (shouldServiceRun && shouldRun) { try { Thread.sleep(5000); } catch (InterruptedException ie) { } } } } finally { LOG.info(nsRegistration + ":Finishing AvatarDataNode in: "+data); stopServices(); joinServices(); cleanUp(); } } /** * Notify both namenode(s) that we have received a block */ @Override public void notifyNamenodeReceivedBlock(Block block, String delHint) { if (offerService1 != null) { offerService1.notifyNamenodeReceivedBlock(block, delHint); } if (offerService2 != null) { offerService2.notifyNamenodeReceivedBlock(block, delHint); } } /** * Notify both namenode(s) that we have deleted a block */ @Override public void notifyNamenodeDeletedBlock(Block block) { if (offerService1 != null) { offerService1.notifyNamenodeDeletedBlock(block); } if (offerService2 != null) { offerService2.notifyNamenodeDeletedBlock(block); } } /** * Update received and retry list, when blocks are deleted */ void removeReceivedBlocks(Block[] list) { if (offerService1 != null) { offerService1.removeReceivedBlocks(list); } if (offerService2 != null) { offerService2.removeReceivedBlocks(list); } } @Override public DatanodeRegistration getNsRegistration() { return nsRegistration; } @Override public DatanodeProtocol getDatanodeProtocol() { return this.primaryOfferService.namenode; } @Override public InetSocketAddress getNNSocketAddress() { return this.nameAddr1; } @Override public int getNamespaceId() { return this.namespaceId; } @Override public String getNameserviceId() { return this.nameserviceId; } @Override public boolean initialized() { return initialized; } @Override public boolean isAlive() { return shouldServiceRun && spThread.isAlive(); } @Override public void reportBadBlocks(LocatedBlock[] blocks) throws IOException { if (this.offerService1 != null) this.offerService1.reportBadBlocks(blocks); if (this.offerService2 != null) this.offerService2.reportBadBlocks(blocks); } @Override public LocatedBlock syncBlock(Block block, List<BlockRecord> syncList, boolean closeFile, List<InterDatanodeProtocol> datanodeProxies, long deadline) throws IOException { if (offerService1 != null && isPrimaryOfferService(offerService1)) return offerService1.syncBlock(block, syncList, closeFile, datanodeProxies, deadline); if (offerService2 != null && isPrimaryOfferService(offerService2)) return offerService2.syncBlock(block, syncList, closeFile, datanodeProxies, deadline); return null; } @Override public void scheduleBlockReport(long delay) { if (this.offerService1 != null) this.offerService1.scheduleBlockReport(delay); if (this.offerService2 != null) this.offerService2.scheduleBlockReport(delay); } // Only use for testing public void scheduleBlockReceivedAndDeleted(long delay) { if (this.offerService1 != null) this.offerService1.scheduleBlockReceivedAndDeleted(delay); if (this.offerService2 != null) this.offerService2.scheduleBlockReceivedAndDeleted(delay); } } /** * Tells the datanode to start the shutdown process. */ public synchronized void shutdownDN() { shouldRun = false; if (namespaceManager != null) { namespaceManager.stopAll(); } } DataStorage getStorage() { return storage; } private static void printUsage() { System.err.println("Usage: java DataNode"); System.err.println(" [-rollback]"); } /** * Parse and verify command line arguments and set configuration parameters. * * @return false if passed argements are incorrect */ private static boolean parseArguments(String args[], Configuration conf) { int argsLen = (args == null) ? 0 : args.length; StartupOption startOpt = StartupOption.REGULAR; for(int i=0; i < argsLen; i++) { String cmd = args[i]; if ("-r".equalsIgnoreCase(cmd) || "--rack".equalsIgnoreCase(cmd)) { LOG.error("-r, --rack arguments are not supported anymore. RackID " + "resolution is handled by the NameNode."); System.exit(-1); } else if ("-rollback".equalsIgnoreCase(cmd)) { startOpt = StartupOption.ROLLBACK; } else if ("-regular".equalsIgnoreCase(cmd)) { startOpt = StartupOption.REGULAR; } else return false; } setStartupOption(conf, startOpt); return true; } private static void setStartupOption(Configuration conf, StartupOption opt) { conf.set("dfs.datanode.startup", opt.toString()); } /** * Returns the IP address of the namenode */ static InetSocketAddress getNameNodeAddress(Configuration conf, String cname, String rpcKey, String cname2) { String fs = conf.get(cname); String fs1 = conf.get(rpcKey); String fs2 = conf.get(cname2); Configuration newconf = new Configuration(conf); newconf.set("fs.default.name", fs); if (fs1 != null) { newconf.set(DFS_NAMENODE_RPC_ADDRESS_KEY, fs1); } if (fs2 != null) { newconf.set("dfs.namenode.dn-address", fs2); } return DataNode.getNameNodeAddress(newconf); } @Override public InetSocketAddress getNameNodeAddr() { return NameNode.getAddress(getConf()); } /** * Returns the IP:port address of the avatar node */ private static InetSocketAddress getAvatarNodeAddress(Configuration conf, String cname) { String fs = conf.get(cname); Configuration newconf = new Configuration(conf); newconf.set("fs.default.name", fs); return AvatarNode.getAddress(newconf); } /** * Returns the IP:port address of the avatar node */ public static List<InetSocketAddress> getAvatarNodeAddresses(String suffix, Configuration conf, Collection<String> serviceIds) throws IOException{ List<InetSocketAddress> namenodeAddresses = DFSUtil.getRPCAddresses(suffix, conf, serviceIds, FSConstants.DFS_NAMENODE_RPC_ADDRESS_KEY); List<InetSocketAddress> avatarnodeAddresses = new ArrayList<InetSocketAddress>(namenodeAddresses.size()); for (InetSocketAddress namenodeAddress : namenodeAddresses) { avatarnodeAddresses.add( new InetSocketAddress(namenodeAddress.getHostName(),conf.getInt( "dfs.avatarnode.port", namenodeAddress.getPort() + 1))); } return avatarnodeAddresses; } public static AvatarDataNode makeInstance(String[] dataDirs, Configuration conf) throws IOException { ArrayList<File> dirs = new ArrayList<File>(); for (int i = 0; i < dataDirs.length; i++) { File data = new File(dataDirs[i]); try { DiskChecker.checkDir(data); dirs.add(data); } catch(DiskErrorException e) { LOG.warn("Invalid directory in dfs.data.dir: " + e.getMessage()); } } if (dirs.size() > 0) { String dnThreadName = "AvatarDataNode: [" + StringUtils.arrayToString(dataDirs) + "]"; return new AvatarDataNode(conf, dirs, dnThreadName); } LOG.error("All directories in dfs.data.dir are invalid."); return null; } /** Instantiate a single datanode object. This must be run by invoking * {@link DataNode#runDatanodeDaemon(DataNode)} subsequently. */ public static AvatarDataNode instantiateDataNode(String args[], Configuration conf) throws IOException { if (conf == null) conf = new Configuration(); if (!parseArguments(args, conf)) { printUsage(); return null; } if (conf.get("dfs.network.script") != null) { LOG.error("This configuration for rack identification is not supported" + " anymore. RackID resolution is handled by the NameNode."); System.exit(-1); } String[] dataDirs = conf.getStrings("dfs.data.dir"); return makeInstance(dataDirs, conf); } public static AvatarDataNode createDataNode(String args[], Configuration conf) throws IOException { AvatarDataNode dn = instantiateDataNode(args, conf); dn.runDatanodeDaemon(); return dn; } @Override public void refreshNamenodes(Configuration conf) throws IOException { LOG.info("refresh namenodes"); try { Collection<String> serviceIds = DFSUtil.getNameServiceIds(conf); List<InetSocketAddress> defaultNameAddrs = AvatarDataNode.getDatanodeProtocolAddresses(conf, serviceIds); List<InetSocketAddress> nameAddrs0 = DFSUtil.getRPCAddresses("0", conf, serviceIds, NameNode.DATANODE_PROTOCOL_ADDRESS, FSConstants.DFS_NAMENODE_RPC_ADDRESS_KEY); List<InetSocketAddress> nameAddrs1 = DFSUtil.getRPCAddresses("1", conf, serviceIds, NameNode.DATANODE_PROTOCOL_ADDRESS, FSConstants.DFS_NAMENODE_RPC_ADDRESS_KEY); List<InetSocketAddress> avatarAddrs0 = AvatarDataNode.getAvatarNodeAddresses("0", conf, serviceIds); List<InetSocketAddress> avatarAddrs1 = AvatarDataNode.getAvatarNodeAddresses("1", conf, serviceIds); ((AvatarNamespaceManager)namespaceManager).refreshNamenodes( nameAddrs0, nameAddrs1, avatarAddrs0, avatarAddrs1, defaultNameAddrs, serviceIds); } catch (InterruptedException e) { throw new IOException(e.getCause()); } } void handleRegistrationError(RemoteException re) { // If either the primary or standby NN throws these exceptions, this // datanode will exit. I think this is the right behaviour because // the excludes list on both namenode better be the same. String reClass = re.getClassName(); if (UnregisteredDatanodeException.class.getName().equals(reClass) || DisallowedDatanodeException.class.getName().equals(reClass) || IncorrectVersionException.class.getName().equals(reClass)) { LOG.warn("DataNode is shutting down: ", re); shutdownDN(); } else { LOG.warn(re); } } public static void main(String argv[]) { try { StringUtils.startupShutdownMessage(AvatarDataNode.class, argv, LOG); AvatarDataNode avatarnode = createDataNode(argv, null); if (avatarnode != null) { avatarnode.waitAndShutdown(); } } catch (Throwable e) { LOG.error(StringUtils.stringifyException(e)); System.exit(-1); } } }
/* * Copyright 2015 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.apiman.manager.api.core.config; import io.apiman.common.config.ConfigFactory; import io.apiman.manager.api.core.logging.IApimanLogger; import java.net.MalformedURLException; import java.net.URL; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import org.apache.commons.configuration.Configuration; /** * Configuration object for the API Manager. * * @author eric.wittmann@redhat.com */ public abstract class ApiManagerConfig { public static final String APIMAN_MANAGER_CONFIG_LOGGER = "apiman-manager.config.logger"; //$NON-NLS-1$ public static final String APIMAN_API_KEY_GENERATOR_TYPE = "apiman-manager.api-keys.generator.type"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_NEW_USER_BOOTSTRAPPER_TYPE = "apiman-manager.user-bootstrapper.type"; //$NON-NLS-1$ /* ------------------------------------------------------- * Storage * ------------------------------------------------------- */ public static final String APIMAN_MANAGER_STORAGE_TYPE = "apiman-manager.storage.type"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_STORAGE_ES_PROTOCOL = "apiman-manager.storage.es.protocol"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_STORAGE_ES_HOST = "apiman-manager.storage.es.host"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_STORAGE_ES_PORT = "apiman-manager.storage.es.port"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_STORAGE_ES_CLUSTER_NAME = "apiman-manager.storage.es.cluster-name"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_STORAGE_ES_USERNAME = "apiman-manager.storage.es.username"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_STORAGE_ES_PASSWORD = "apiman-manager.storage.es.password"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_STORAGE_ES_INITIALIZE = "apiman-manager.storage.es.initialize"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_STORAGE_ES_TIMEOUT = "apiman-manager.storage.es.timeout"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_STORAGE_QUERY_TYPE = "apiman-manager.storage-query.type"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_SERVICE_CATALOG_TYPE = "apiman-manager.service-catalog.type"; //$NON-NLS-1$ /* ------------------------------------------------------- * Metrics * ------------------------------------------------------- */ public static final String APIMAN_MANAGER_METRICS_TYPE = "apiman-manager.metrics.type"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_METRICS_ES_PROTOCOL = "apiman-manager.metrics.es.protocol"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_METRICS_ES_HOST = "apiman-manager.metrics.es.host"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_METRICS_ES_PORT = "apiman-manager.metrics.es.port"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_METRICS_ES_CLUSTER_NAME = "apiman-manager.metrics.es.cluster-name"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_METRICS_ES_USERNAME = "apiman-manager.metrics.es.username"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_METRICS_ES_PASSWORD = "apiman-manager.metrics.es.password"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_METRICS_ES_TIMEOUT = "apiman-manager.metrics.es.timeout"; //$NON-NLS-1$ public static final String APIMAN_MANAGER_SECURITY_CONTEXT_TYPE = "apiman-manager.security-context.type"; //$NON-NLS-1$ public static final String APIMAN_PLUGIN_REPOSITORIES = "apiman.plugins.repositories"; //$NON-NLS-1$ public static final String APIMAN_PLUGIN_REGISTRIES = "apiman-manager.plugins.registries"; //$NON-NLS-1$ public static final String DEFAULT_ES_CLUSTER_NAME = "apiman"; //$NON-NLS-1$ public static final int DEFAULT_JEST_TIMEOUT = 6000; private final Configuration config; /** * Constructor. */ public ApiManagerConfig() { config = loadProperties(); } /** * Loads the config properties. */ protected Configuration loadProperties() { return ConfigFactory.createConfig(); } /** * @return the configured plugin repositories */ public Set<URL> getPluginRepositories() { Set<URL> rval = new HashSet<>(); String repositories = config.getString(APIMAN_PLUGIN_REPOSITORIES); if (repositories != null) { String[] split = repositories.split(","); //$NON-NLS-1$ for (String repository : split) { try { rval.add(new URL(repository.trim())); } catch (MalformedURLException e) { throw new RuntimeException(e); } } } return rval; } /** * @return the configured plugin registries */ public Set<URL> getPluginRegistries() { Set<URL> rval = new HashSet<>(); String registries = config.getString(APIMAN_PLUGIN_REGISTRIES); if (registries != null) { String[] split = registries.split(","); //$NON-NLS-1$ for (String registry : split) { try { rval.add(new URL(registry.trim())); } catch (MalformedURLException e) { throw new RuntimeException(e); } } } return rval; } public String getSecurityContextType() { return config.getString(APIMAN_MANAGER_SECURITY_CONTEXT_TYPE, "default"); //$NON-NLS-1$ } /** * @return the configured user bootstrapper type */ public String getNewUserBootstrapperType() { return config.getString(APIMAN_MANAGER_NEW_USER_BOOTSTRAPPER_TYPE, null); } /** * @return any custom properties associated with the user bootstrapper (useful for custom impls) */ public Map<String, String> getNewUserBootstrapperProperties() { return getPrefixedProperties("apiman-manager.user-bootstrapper."); //$NON-NLS-1$ } /** * @return the configured storage type */ public String getStorageType() { return config.getString(APIMAN_MANAGER_STORAGE_TYPE, "jpa"); //$NON-NLS-1$ } /** * @return the configured storage query type */ public String getStorageQueryType() { return config.getString(APIMAN_MANAGER_STORAGE_QUERY_TYPE, getStorageType()); } /** * @return the configured service catalog query type */ public String getServiceCatalogType() { return config.getString(APIMAN_MANAGER_SERVICE_CATALOG_TYPE, null); } /** * @return the elasticsearch protocol */ public String getStorageESProtocol() { return config.getString(APIMAN_MANAGER_STORAGE_ES_PROTOCOL, "http"); //$NON-NLS-1$ } /** * @return the elasticsearch host */ public String getStorageESHost() { return config.getString(APIMAN_MANAGER_STORAGE_ES_HOST, "localhost"); //$NON-NLS-1$ } /** * @return the elasticsearch port */ public int getStorageESPort() { return config.getInt(APIMAN_MANAGER_STORAGE_ES_PORT, 19200); } /** * @return the elasticsearch cluster name */ public String getStorageESClusterName() { return config.getString(APIMAN_MANAGER_STORAGE_ES_CLUSTER_NAME, DEFAULT_ES_CLUSTER_NAME); } /** * @return the elasticsearch username */ public String getStorageESUsername() { return config.getString(APIMAN_MANAGER_STORAGE_ES_USERNAME, null); } /** * @return the elasticsearch username */ public String getStorageESPassword() { return config.getString(APIMAN_MANAGER_STORAGE_ES_PASSWORD, null); } public int getStorageESTimeout() { return config.getInt(APIMAN_MANAGER_STORAGE_ES_TIMEOUT, DEFAULT_JEST_TIMEOUT); } /** * @return true if the elasticsearch index should be initialized if not found */ public boolean isInitializeStorageES() { return config.getBoolean(APIMAN_MANAGER_STORAGE_ES_INITIALIZE, true); } /** * @return the configured API key generator type */ public String getApiKeyGeneratorType() { return config.getString(APIMAN_API_KEY_GENERATOR_TYPE, "uuid"); //$NON-NLS-1$ } /** * @return the configured storage type */ public String getMetricsType() { return config.getString(APIMAN_MANAGER_METRICS_TYPE, "es"); //$NON-NLS-1$ } /** * @return the elasticsearch protocol */ public String getMetricsESProtocol() { return config.getString(APIMAN_MANAGER_METRICS_ES_PROTOCOL, "http"); //$NON-NLS-1$ } /** * @return the elasticsearch host */ public String getMetricsESHost() { return config.getString(APIMAN_MANAGER_METRICS_ES_HOST, "localhost"); //$NON-NLS-1$ } /** * @return the elasticsearch port */ public int getMetricsESPort() { return config.getInt(APIMAN_MANAGER_METRICS_ES_PORT, 19200); } /** * @return the elasticsearch cluster name */ public String getMetricsESClusterName() { return config.getString(APIMAN_MANAGER_METRICS_ES_CLUSTER_NAME, DEFAULT_ES_CLUSTER_NAME); } /** * @return the elasticsearch username */ public String getMetricsESUsername() { return config.getString(APIMAN_MANAGER_METRICS_ES_USERNAME, null); } /** * @return the elasticsearch password */ public String getMetricsESPassword() { return config.getString(APIMAN_MANAGER_METRICS_ES_PASSWORD, null); } public int getMetricsESTimeout() { return config.getInt(APIMAN_MANAGER_METRICS_ES_TIMEOUT, DEFAULT_JEST_TIMEOUT); } /** * @return any custom properties associated with the storage (useful for custom impls) */ public Map<String, String> getStorageProperties() { return getPrefixedProperties("apiman-manager.storage."); //$NON-NLS-1$ } /** * @return any custom properties associated with the storage query impl */ public Map<String, String> getStorageQueryProperties() { return getPrefixedProperties("apiman-manager.storage-query.type."); //$NON-NLS-1$ } /** * @return any custom properties associated with the IDM storage impl (useful for custom impls) */ public Map<String, String> getIdmStorageProperties() { return getPrefixedProperties("apiman-manager.idm-storage."); //$NON-NLS-1$ } /** * @return any custom properties associated with the metrics accessor impl */ public Map<String, String> getMetricsProperties() { return getPrefixedProperties("apiman-manager.metrics."); //$NON-NLS-1$ } /** * @return any custom properties associated with the custom API Key generator */ public Map<String, String> getApiKeyGeneratorProperties() { return getPrefixedProperties("apiman-manager.api-keys.generator."); //$NON-NLS-1$ } /** * @return any custom properties associated with the Service Catalog impl */ public Map<String, String> getServiceCatalogProperties() { return getPrefixedProperties("apiman-manager.service-catalog."); //$NON-NLS-1$ } /** * Gets a map of properties prefixed by the given string. */ protected Map<String, String> getPrefixedProperties(String prefix) { Map<String, String> rval = new HashMap<>(); Iterator<String> keys = getConfig().getKeys(); while (keys.hasNext()) { String key = keys.next(); if (key.startsWith(prefix)) { String value = getConfig().getString(key); key = key.substring(prefix.length()); rval.put(key, value); } } return rval; } /** * @return the configuration */ public Configuration getConfig() { return config; } /** * 'Simple', 'JSON' or FQDN with {@link IApimanLogger} implementation. * * @return Logger name or FQDN */ public String getLoggerName() { return config.getString(APIMAN_MANAGER_CONFIG_LOGGER); } }
/* * Copyright (c) 2002-2004, Martian Software, Inc. * This file is made available under the LGPL as described in the accompanying * LICENSE.TXT file. */ package com.martiansoftware.jsap; import java.io.IOException; import java.net.URL; import java.util.Map; import java.util.List; import java.util.Iterator; import com.martiansoftware.jsap.stringparsers.BigDecimalStringParser; import com.martiansoftware.jsap.stringparsers.BigIntegerStringParser; import com.martiansoftware.jsap.stringparsers.BooleanStringParser; import com.martiansoftware.jsap.stringparsers.ByteStringParser; import com.martiansoftware.jsap.stringparsers.CharacterStringParser; import com.martiansoftware.jsap.stringparsers.ClassStringParser; import com.martiansoftware.jsap.stringparsers.ColorStringParser; import com.martiansoftware.jsap.stringparsers.DoubleStringParser; import com.martiansoftware.jsap.stringparsers.FloatStringParser; import com.martiansoftware.jsap.stringparsers.InetAddressStringParser; import com.martiansoftware.jsap.stringparsers.IntSizeStringParser; import com.martiansoftware.jsap.stringparsers.IntegerStringParser; import com.martiansoftware.jsap.stringparsers.LongSizeStringParser; import com.martiansoftware.jsap.stringparsers.LongStringParser; import com.martiansoftware.jsap.stringparsers.PackageStringParser; import com.martiansoftware.jsap.stringparsers.ShortStringParser; import com.martiansoftware.jsap.stringparsers.StringStringParser; import com.martiansoftware.jsap.stringparsers.URLStringParser; import com.martiansoftware.jsap.xml.JSAPConfig; import com.martiansoftware.util.StringUtils; /** * The core class of the JSAP (Java Simple Argument Parser) API. * * <p>A JSAP is responsible for converting an array of Strings, typically * received from a command line in the main class' main() method, into a * collection of Objects that are retrievable by a unique ID assigned by the * developer.</p> * * <p>Before a JSAP parses a command line, it is configured with the Switches, * FlaggedOptions, and UnflaggedOptions it will accept. As a result, the * developer can rest assured that if no Exceptions are thrown by the JSAP's * parse() method, the entire command line was parsed successfully.</p> * * <p>For example, to parse a command line with the syntax "[--verbose] * {-n|--number} Mynumber", the following code could be used:</p. * * <code><pre> * JSAP myJSAP = new JSAP(); * myJSAP.registerParameter( new Switch( "verboseSwitch", JSAP.NO_SHORTFLAG, * "verbose" ) ); * myJSAP.registerParameter( new FlaggedOption( "numberOption", new * IntegerStringParser(), JSAP.NO_DEFAULT, * JSAP.NOT_REQUIRED, 'n', "number" ) ); * JSAPResult result = myJSAP.parse(args); * </pre></code> * * <p>The results of the parse could then be obtained with:</p> * * <code><pre> * int n = result.getInt("numberOption"); * boolean isVerbose = result.getBoolean("verboseSwitch"); * </pre></code> * * <h3>Generating a JSAP from ANT</h3> * <p>If you don't want to register all your parameters manually as shown * above, the JSAP API provides a custom ANT task that will generate a * custom JSAP subclass to suit your needs. See * JSAPAntTask for details.</p> * <p>See the accompanying documentation for examples and further information. * * @author <a href="http://www.com.martiansoftware.com/contact.html">Marty Lamb</a> * @author Klaus Berg (bug fixes in help generation) * @author Wolfram Esser (contributed code for custom line separators in help) * @see com.martiansoftware.jsap.ant.JSAPAntTask */ public class JSAP { /** * Map of this JSAP's AbstractParameters keyed on their unique ID. */ private Map paramsByID = null; /** * Map of this JSAP's AbstractParameters keyed on their short flag. */ private Map paramsByShortFlag = null; /** * Map of this JSAP's AbstractParameters keyed on their long flag. */ private Map paramsByLongFlag = null; /** * List of this JSAP's UnflaggedOptions, in order of declaration. */ private List unflaggedOptions = null; /** * List of all of this JSAP's AbstractParameters, in order of * declaration. */ private List paramsByDeclarationOrder = null; /** * List of all of this JSAP's DefaultSources, in order of declaration. */ private List defaultSources = null; /** * If not null, overrides the automatic usage info. */ private String usage = null; /** * If not null, overrides the automatic help info. */ private String help = null; /** * Does not have a short flag. * * @see FlaggedOption * @see UnflaggedOption */ public static final char NO_SHORTFLAG = '\0'; /** * Does not have a long flag. * * @see FlaggedOption * @see UnflaggedOption */ public static final String NO_LONGFLAG = null; /** * The default separator for list parameters (equivalent to * java.io.File.pathSeparatorChar) * * @see FlaggedOption#setListSeparator(char) */ public static final char DEFAULT_LISTSEPARATOR = java.io.File.pathSeparatorChar; /** * The default separator between parameters in generated help (a newline * by default) */ public static final String DEFAULT_PARAM_HELP_SEPARATOR = "\n"; /** * The parameter is required. * * @see FlaggedOption#setRequired(boolean) */ public static final boolean REQUIRED = true; /** * The parameter is not required. * * @see FlaggedOption#setRequired(boolean) */ public static final boolean NOT_REQUIRED = false; /** * The parameter is a list. * * @see FlaggedOption#setList(boolean) */ public static final boolean LIST = true; /** * The parameter is not a list. * * @see FlaggedOption#setList(boolean) */ public static final boolean NOT_LIST = false; /** * The parameter allows multiple declarations. * * @see FlaggedOption#setAllowMultipleDeclarations(boolean) */ public static final boolean MULTIPLEDECLARATIONS = true; /** * The parameter does not allow multiple declarations. * * @see FlaggedOption#setAllowMultipleDeclarations(boolean) */ public static final boolean NO_MULTIPLEDECLARATIONS = false; /** * The parameter consumes the command line. * * @see UnflaggedOption#setGreedy(boolean) */ public static final boolean GREEDY = true; /** * The parameter does not consume the command line. * * @see UnflaggedOption#setGreedy(boolean) */ public static final boolean NOT_GREEDY = false; /** The parameter has no default value. */ public static final String NO_DEFAULT = null; /** * The parameter has no help text. * * @see Parameter#setHelp(String) */ public static final String NO_HELP = null; /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.BigDecimalStringParser}. */ public static final BigDecimalStringParser BIGDECIMAL_PARSER = BigDecimalStringParser.getParser(); /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.BigIntegerStringParser}. */ public static final BigIntegerStringParser BIGINTEGER_PARSER = BigIntegerStringParser.getParser(); /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.BooleanStringParser}. */ public static final BooleanStringParser BOOLEAN_PARSER = BooleanStringParser.getParser(); /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.ByteStringParser}. */ public static final ByteStringParser BYTE_PARSER = ByteStringParser.getParser(); /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.CharacterStringParser}. */ public static final CharacterStringParser CHARACTER_PARSER = CharacterStringParser.getParser(); /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.ClassStringParser}. */ public static final ClassStringParser CLASS_PARSER = ClassStringParser.getParser(); /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.ColorStringParser}. */ public static final ColorStringParser COLOR_PARSER = ColorStringParser.getParser(); /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.DoubleStringParser}. */ public static final DoubleStringParser DOUBLE_PARSER = DoubleStringParser.getParser(); /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.FloatStringParser}. */ public static final FloatStringParser FLOAT_PARSER = FloatStringParser.getParser(); /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.InetAddressStringParser}. */ public static final InetAddressStringParser INETADDRESS_PARSER = InetAddressStringParser.getParser(); /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.IntegerStringParser}. */ public static final IntegerStringParser INTEGER_PARSER = IntegerStringParser.getParser(); /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.IntSizeStringParser}. */ public static final IntSizeStringParser INTSIZE_PARSER = IntSizeStringParser.getParser(); /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.LongSizeStringParser}. */ public static final LongSizeStringParser LONGSIZE_PARSER = LongSizeStringParser.getParser(); /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.LongStringParser}. */ public static final LongStringParser LONG_PARSER = LongStringParser.getParser(); /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.PackageStringParser}. */ public static final PackageStringParser PACKAGE_PARSER = PackageStringParser.getParser(); /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.ShortStringParser}. */ public static final ShortStringParser SHORT_PARSER = ShortStringParser.getParser(); /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.StringStringParser}. */ public static final StringStringParser STRING_PARSER = StringStringParser.getParser(); /** * The only instance of a {@link com.martiansoftware.jsap.stringparsers.URLStringParser}. */ public static final URLStringParser URL_PARSER = URLStringParser.getParser(); /** * The default screen width used for formatting help. */ public static final int DEFAULT_SCREENWIDTH = 80; /** * Temporary fix for bad console encodings screwing up non-breaking spaces. */ static char SYNTAX_SPACECHAR = ' '; static { if (Boolean.valueOf(System.getProperty("com.martiansoftware.jsap.usenbsp", "false")).booleanValue()) { SYNTAX_SPACECHAR = '\u00a0'; } } /** * Creates a new JSAP with an empty configuration. It must be configured * with registerParameter() before its parse() methods may be called. */ public JSAP() { init(); } /** * Creates a new JSAP configured as specified in the referenced xml. * @param jsapXML reference to xml representation of the JSAP configuration * @throws IOException if an I/O error occurs * @throws JSAPException if the configuration is not valid */ public JSAP(URL jsapXML) throws IOException, JSAPException { init(); JSAPConfig.configure(this, jsapXML); } /** * Creates a new JSAP configured as specified in the referenced xml. * @param resourceName name of the resource (accessible via this JSAP's classloader) * containing the xml representation of the JSAP configuration * @throws IOException if an I/O error occurs * @throws JSAPException if the configuration is not valid */ public JSAP(String resourceName) throws IOException, JSAPException { this(JSAP.class.getClassLoader().getResource(resourceName)); } private void init() { paramsByID = new java.util.HashMap(); paramsByShortFlag = new java.util.HashMap(); paramsByLongFlag = new java.util.HashMap(); unflaggedOptions = new java.util.ArrayList(); paramsByDeclarationOrder = new java.util.ArrayList(); defaultSources = new java.util.ArrayList(); } /** * Sets the usage string manually, overriding the automatically- * generated String. To remove the override, call setUsage(null). * @param usage the manually-set usage string. */ public void setUsage(String usage) { this.usage = usage; } /** * Sets the help string manually, overriding the automatically- * generated String. To remove the override, call setHelp(null). * @param help the manualy-set help string. */ public void setHelp(String help) { this.help = help; } /** * A shortcut method for calling getHelp(80, "\n"). * @see #getHelp(int,String) * @return the same as gethelp(80, "\n") */ public String getHelp() { return (getHelp(DEFAULT_SCREENWIDTH, DEFAULT_PARAM_HELP_SEPARATOR)); } /** * A shortcut method for calling getHelp(screenWidth, "\n"). * @param screenWidth the screen width for which to format the help. * @see #getHelp(int,String) * @return the same as gethelp(screenWidth, "\n") */ public String getHelp(int screenWidth) { return (getHelp(screenWidth, DEFAULT_PARAM_HELP_SEPARATOR)); } /** * If the help text has been manually set, this method simply * returns it, ignoring the screenWidth parameter. Otherwise, * an automatically-formatted help message is returned, tailored * to the specified screen width. * @param screenWidth the screen width (in characters) for which * the help text will be formatted. If zero, help will not be * line-wrapped. * @return complete help text for this JSAP. */ public String getHelp(int screenWidth, String paramSeparator) { String result = help; if (result == null) { StringBuffer buf = new StringBuffer(); // We'll wrap at screenWidth - 8 int wrapWidth = screenWidth - 8; // now loop through all the params again and display their help info for (Iterator i = paramsByDeclarationOrder.iterator(); i.hasNext();) { Parameter param = (Parameter) i.next(); StringBuffer defaultText = new StringBuffer(); String[] defaultValue = param.getDefault(); if ( !(param instanceof Switch) && defaultValue != null ) { defaultText.append(" (default: "); for(int j = 0; j < defaultValue.length; j++ ) { if (j > 0) defaultText.append( ", " ); defaultText.append(defaultValue[ j ]); } defaultText.append(")"); } Iterator helpInfo = StringUtils .wrapToList(param.getHelp() + defaultText, wrapWidth) .iterator(); buf.append(" "); // the two leading spaces buf.append(param.getSyntax()); buf.append("\n"); while (helpInfo.hasNext()) { buf.append(" "); buf.append( helpInfo.next() ); buf.append("\n"); } if (i.hasNext()) { buf.append(paramSeparator); } } result = buf.toString(); } return (result); } /** * Returns an automatically generated usage description based upon this * JSAP's current configuration. * * @return an automatically generated usage description based upon this * JSAP's current configuration. */ public String getUsage() { String result = usage; if (result == null) { StringBuffer buf = new StringBuffer(); for (Iterator i = paramsByDeclarationOrder.iterator(); i.hasNext();) { Parameter param = (Parameter) i.next(); if (buf.length() > 0) { buf.append(" "); } buf.append(param.getSyntax()); } result = buf.toString(); } return (result); } /** * Returns an automatically generated usage description based upon this * JSAP's current configuration. This returns exactly the same result * as getUsage(). * * @return an automatically generated usage description based upon this * JSAP's current configuration. */ public String toString() { return (getUsage()); } /** * Returns an IDMap associating long and short flags with their associated * parameters' IDs, and allowing the listing of IDs. This is probably only * useful for developers creating their own DefaultSource classes. * @return an IDMap based upon this JSAP's current configuration. */ public IDMap getIDMap() { List ids = new java.util.ArrayList(paramsByDeclarationOrder.size()); for (Iterator i = paramsByDeclarationOrder.iterator(); i.hasNext();) { Parameter param = (Parameter) i.next(); ids.add(param.getID()); } Map byShortFlag = new java.util.HashMap(); for (Iterator i = paramsByShortFlag.keySet().iterator(); i.hasNext();) { Character c = (Character) i.next(); byShortFlag.put( c, ((Parameter) paramsByShortFlag.get(c)).getID()); } Map byLongFlag = new java.util.HashMap(); for (Iterator i = paramsByLongFlag.keySet().iterator(); i.hasNext();) { String s = (String) i.next(); byLongFlag.put( s, ((Parameter) paramsByLongFlag.get(s)).getID()); } return (new IDMap(ids, byShortFlag, byLongFlag)); } /** * Returns the requested Switch, FlaggedOption, or UnflaggedOption with the * specified ID. Depending upon what you intend to do with the result, it * may be necessary to re-cast the result as a Switch, FlaggedOption, or * UnflaggedOption as appropriate. * * @param id the ID of the requested Switch, FlaggedOption, or * UnflaggedOption. * @return the requested Switch, FlaggedOption, or UnflaggedOption, or null * if no Parameter with the specified ID is defined in this JSAP. */ public Parameter getByID(String id) { return ((Parameter) paramsByID.get(id)); } /** * Returns the requested Switch or FlaggedOption with the specified long * flag. Depending upon what you intend to do with the result, it may be * necessary to re-cast the result as a Switch or FlaggedOption as * appropriate. * * @param longFlag the long flag of the requested Switch or FlaggedOption. * @return the requested Switch or FlaggedOption, or null if no Flagged * object with the specified long flag is defined in this JSAP. */ public Flagged getByLongFlag(String longFlag) { return ((Flagged) paramsByLongFlag.get(longFlag)); } /** * Returns the requested Switch or FlaggedOption with the specified short * flag. Depending upon what you intend to do with the result, it may be * necessary to re-cast the result as a Switch or FlaggedOption as * appropriate. * * @param shortFlag the short flag of the requested Switch or FlaggedOption. * @return the requested Switch or FlaggedOption, or null if no Flagged * object with the specified short flag is defined in this JSAP. */ public Flagged getByShortFlag(Character shortFlag) { return ((Flagged) paramsByShortFlag.get(shortFlag)); } /** * Returns the requested Switch or FlaggedOption with the specified short * flag. Depending upon what you intend to do with the result, it may be * necessary to re-cast the result as a Switch or FlaggedOption as * appropriate. * * @param shortFlag the short flag of the requested Switch or FlaggedOption. * @return the requested Switch or FlaggedOption, or null if no Flagged * object with the specified short flag is defined in this JSAP. */ public Flagged getByShortFlag(char shortFlag) { return (getByShortFlag(new Character(shortFlag))); } /** * Returns an Iterator over all UnflaggedOptions currently registered with * this JSAP. * * @return an Iterator over all UnflaggedOptions currently registered with * this JSAP. * @see java.util.Iterator */ public Iterator getUnflaggedOptionsIterator() { return (unflaggedOptions.iterator()); } /** * Registers a new DefaultSource with this JSAP, at the end of the current * DefaultSource chain, but before the defaults defined within the * AbstractParameters themselves. * * @param ds the DefaultSource to append to the DefaultSource chain. * @see DefaultSource */ public void registerDefaultSource(DefaultSource ds) { defaultSources.add(ds); } /** * Removes the specified DefaultSource from this JSAP's DefaultSource chain. * If this specified DefaultSource is not currently in this JSAP's * DefaultSource chain, this method does nothing. * * @param ds the DefaultSource to remove from the DefaultSource chain. */ public void unregisterDefaultSource(DefaultSource ds) { defaultSources.remove(ds); } /** * Returns a Defaults object representing the default values defined within * this JSAP's AbstractParameters themselves. * * @return a Defaults object representing the default values defined within * this JSAP's AbstractParameters themselves. */ private Defaults getSystemDefaults() { Defaults defaults = new Defaults(); for (Iterator i = paramsByDeclarationOrder.iterator(); i.hasNext();) { Parameter param = (Parameter) i.next(); defaults.setDefault(param.getID(), param.getDefault()); } return (defaults); } /** * Merges the specified Defaults objects, only copying Default values from * the source to the destination if they are NOT currently defined in the * destination. * * @param dest the destination Defaults object into which the source should * be merged. * @param src the source Defaults object. */ private void combineDefaults(Defaults dest, Defaults src) { if (src != null) { for (Iterator i = src.idIterator(); i.hasNext();) { String paramID = (String) i.next(); dest.setDefaultIfNeeded(paramID, src.getDefault(paramID)); } } } /** * Returns a Defaults object representing the merged Defaults of every * DefaultSource in the DefaultSource chain and the default values specified * in the AbstractParameters themselves. * * @param exceptionMap the ExceptionMap object within which any encountered * exceptions will be returned. * @return a Defaults object representing the Defaults of the entire JSAP. * @see DefaultSource#getDefaults(IDMap, ExceptionMap) */ protected Defaults getDefaults(ExceptionMap exceptionMap) { Defaults defaults = new Defaults(); IDMap idMap = getIDMap(); for (Iterator dsi = defaultSources.iterator(); dsi.hasNext();) { DefaultSource ds = (DefaultSource) dsi.next(); combineDefaults(defaults, ds.getDefaults(idMap, exceptionMap)); } combineDefaults(defaults, getSystemDefaults()); return (defaults); } /** * Registers the specified Parameter (i.e., Switch, FlaggedOption, * or UnflaggedOption) with this JSAP. * * <p>Registering an Parameter <b>locks</b> the parameter. * Attempting to change its properties (ID, flags, etc.) while it is locked * will result in a JSAPException. To unlock an Parameter, it must * be unregistered from the JSAP. * * @param param the Parameter to register. * @throws JSAPException if this Parameter cannot be added. Possible * reasons include: * <ul> * <li>Another Parameter with the same ID has already been * registered.</li> * <li>You are attempting to register a Switch or FlaggedOption with * neither a short nor long flag.</li> * <li>You are attempting to register a Switch or FlaggedOption with a long * or short flag that is already * defined in this JSAP.</li> * <li>You are attempting to register a second greedy UnflaggedOption</li> * </ul> */ public void registerParameter(Parameter param) throws JSAPException { String paramID = param.getID(); if (paramsByID.containsKey(paramID)) { throw ( new JSAPException( "A parameter with ID '" + paramID + "' has already been registered.")); } if (param instanceof Flagged) { Flagged f = (Flagged) param; if ((f.getShortFlagCharacter() == null) && (f.getLongFlag() == null)) { throw ( new JSAPException( "FlaggedOption '" + paramID + "' has no flags defined.")); } if (paramsByShortFlag.containsKey(f.getShortFlagCharacter())) { throw ( new JSAPException( "A parameter with short flag '" + f.getShortFlag() + "' has already been registered.")); } if (paramsByLongFlag.containsKey(f.getLongFlag())) { throw ( new JSAPException( "A parameter with long flag '" + f.getLongFlag() + "' has already been registered.")); } } else { if ((unflaggedOptions.size() > 0) && (((UnflaggedOption) unflaggedOptions .get(unflaggedOptions.size() - 1)) .isGreedy())) { throw ( new JSAPException( "A greedy unflagged option has already been registered;" + " option '" + paramID + "' will never be reached.")); } } if (param instanceof Option) { ((Option) param).register(); } // if we got this far, it's safe to insert it. param.setLocked(true); paramsByID.put(paramID, param); paramsByDeclarationOrder.add(param); if (param instanceof Flagged) { Flagged f = (Flagged) param; if (f.getShortFlagCharacter() != null) { paramsByShortFlag.put(f.getShortFlagCharacter(), param); } if (f.getLongFlag() != null) { paramsByLongFlag.put(f.getLongFlag(), param); } } else if (param instanceof Option) { unflaggedOptions.add(param); } } /** * Unregisters the specified Parameter (i.e., Switch, FlaggedOption, * or UnflaggedOption) from this JSAP. Unregistering an Parameter * also unlocks it, allowing changes to its properties (ID, flags, etc.). * * @param param the Parameter to unregister from this JSAP. */ public void unregisterParameter(Parameter param) { if (paramsByID.containsKey(param.getID())) { if (param instanceof Option) { ((Option) param).unregister(); } paramsByID.remove(param.getID()); paramsByDeclarationOrder.remove(param); if (param instanceof Flagged) { Flagged f = (Flagged) param; paramsByShortFlag.remove(f.getShortFlagCharacter()); paramsByLongFlag.remove(f.getLongFlag()); } else if (param instanceof UnflaggedOption) { unflaggedOptions.remove(param); } param.setLocked(false); } } /** * Parses the specified command line array. If no Exception is thrown, the * entire command line has been parsed successfully, and its results have * been successfully instantiated. * * @param args An array of command line arguments to parse. This array is * typically provided in the application's main class' main() method. * @return a JSAPResult containing the resulting Objects. */ public JSAPResult parse(String[] args) { Parser p = new Parser(this, args); return (p.parse()); } /** * Parses the specified command line. The specified command line is first * parsed into an array, much like the operating system does for the JVM * prior to calling your application's main class' main() method. If no * Exception is thrown, the entire command line has been parsed * successfully, and its results have been successfully instantiated. * * @param cmdLine An array of command line arguments to parse. This array * is typically provided in the application's main class' main() method. * @return a JSAPResult containing the resulting Objects. */ public JSAPResult parse(String cmdLine) { String[] args = CommandLineTokenizer.tokenize(cmdLine); return (parse(args)); } /** * Unregisters all registered AbstractParameters, allowing them to perform * their cleanup. */ public void finalize() { Parameter[] params = (Parameter[]) paramsByDeclarationOrder.toArray( new Parameter[0]); int paramCount = params.length; for (int i = 0; i < paramCount; ++i) { unregisterParameter(params[i]); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.datatorrent.bufferserver.internal; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.datatorrent.bufferserver.packet.MessageType; import com.datatorrent.bufferserver.packet.Tuple; import com.datatorrent.bufferserver.util.SerializedData; /** * <p>FastDataList class.</p> * * @since 0.3.2 */ public class FastDataList extends DataList { public FastDataList(String identifier) { super(identifier); } public FastDataList(String identifier, int blocksize, int numberOfCacheBlocks, boolean backPressureEnabled) { super(identifier, blocksize, numberOfCacheBlocks, backPressureEnabled); } long item; @Override public void flush(final int writeOffset) { flush: do { while (size == 0) { if (writeOffset - processingOffset >= 2) { size = last.data[processingOffset]; size |= (last.data[processingOffset + 1] << 8); // logger.debug("read item = {} of size = {} at offset = {}", item++, size, processingOffset); } else { if (writeOffset == last.data.length) { processingOffset = 0; size = 0; } break flush; } } processingOffset += 2; if (processingOffset + size <= writeOffset) { switch (last.data[processingOffset]) { case MessageType.BEGIN_WINDOW_VALUE: Tuple btw = Tuple.getTuple(last.data, processingOffset, size); if (last.starting_window == -1) { last.starting_window = baseSeconds | btw.getWindowId(); last.ending_window = last.starting_window; } else { last.ending_window = baseSeconds | btw.getWindowId(); } break; case MessageType.RESET_WINDOW_VALUE: Tuple rwt = Tuple.getTuple(last.data, processingOffset, size); baseSeconds = (long)rwt.getBaseSeconds() << 32; break; default: break; } processingOffset += size; size = 0; } else { if (writeOffset == last.data.length) { processingOffset = 0; size = 0; } break; } } while (true); last.writingOffset = writeOffset; notifyListeners(); } @Override protected FastDataListIterator getIterator(Block block) { return new FastDataListIterator(block); } /* TODO: Are these functions required? @Override public void purge(int baseSeconds, int windowId) { long longWindowId = (long)baseSeconds << 32 | windowId; Block prev = null; for (Block temp = first; temp != null && temp.starting_window <= longWindowId; temp = temp.next) { if (temp.ending_window > longWindowId || temp == last) { if (prev != null) { first = temp; } first.purge(longWindowId, true); break; } if (storage != null && temp.uniqueIdentifier > 0) { // logger.debug("discarding {} {} in purge", identifier, temp.uniqueIdentifier); storage.discard(identifier, temp.uniqueIdentifier); } prev = temp; } } @Override public void rewind(int baseSeconds, int windowId) throws IOException { long longWindowId = (long)baseSeconds << 32 | windowId; for (Block temp = first; temp != null; temp = temp.next) { if (temp.starting_window >= longWindowId || temp.ending_window > longWindowId) { if (temp != last) { temp.next = null; last = temp; } if(temp.data == null){ temp.acquire(storage, false); } this.baseSeconds = temp.rewind(longWindowId, true); processingOffset = temp.writingOffset; size = 0; } } for (DataListIterator dli: iterators.values()) { dli.rewind(processingOffset); } } */ /** * <p>FastDataListIterator class.</p> * * @since 0.3.2 */ protected class FastDataListIterator extends DataListIterator { FastDataListIterator(Block da) { super(da); } @Override public boolean hasNext() { while (size == 0) { if (da.writingOffset - readOffset >= 2) { size = buffer[readOffset]; size |= (buffer[readOffset + 1] << 8); } else { if (da.writingOffset == buffer.length && switchToNextBlock()) { continue; } else { return false; } } } if (readOffset + size + 2 <= da.writingOffset) { current = new SerializedData(buffer, readOffset, size + 2); current.dataOffset = readOffset + 2; return true; } else { if (da.writingOffset == buffer.length) { if (!switchToNextBlock()) { return false; } nextOffset.integer = da.readingOffset; return hasNext(); } else { return false; } } } } private static final Logger logger = LoggerFactory.getLogger(FastDataList.class); }
package com.vapourdrive.magtools.items.tools; import java.util.List; import net.minecraft.block.Block; import net.minecraft.block.material.Material; import net.minecraft.client.Minecraft; import net.minecraft.client.renderer.texture.IIconRegister; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.item.ItemPickaxe; import net.minecraft.item.ItemStack; import net.minecraft.network.play.client.C07PacketPlayerDigging; import net.minecraft.network.play.server.S23PacketBlockChange; import net.minecraft.util.EnumChatFormatting; import net.minecraft.util.MovingObjectPosition; import net.minecraft.util.StatCollector; import net.minecraft.world.World; import net.minecraftforge.common.ForgeHooks; import net.minecraftforge.event.world.BlockEvent.BreakEvent; import com.vapourdrive.magtools.MagTools; import com.vapourdrive.magtools.Reference; import com.vapourdrive.magtools.items.MagItemRef; import com.vapourdrive.magtools.utils.RandomUtils; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; public class MagHammer extends ItemPickaxe { public MagHammer(ToolMaterial material) { super(material); this.setUnlocalizedName(MagItemRef.MagHammerName); this.setCreativeTab(MagTools.MagCreativeTab); } @Override @SideOnly(Side.CLIENT) public void registerIcons(IIconRegister register) { itemIcon = register.registerIcon(Reference.ResourcePath + MagItemRef.MagHammerName); } @Override @SideOnly(Side.CLIENT) public void addInformation(ItemStack stack, EntityPlayer player, List list, boolean useExtraInformation) { list.add(EnumChatFormatting.GREEN + StatCollector.translateToLocal("phrase.magtools.hammerinfo")); } @Override public boolean onBlockStartBreak(ItemStack stack, int x, int y, int z, EntityPlayer player) { World world = player.worldObj; Block block = world.getBlock(x, y, z); MovingObjectPosition object = RandomUtils.raytraceFromEntity(world, player, false, 4.5D); if (object == null) { return super.onBlockDestroyed(stack, world, block, x, y, z, player); } int side = object.sideHit; int xmove = 0; int ymove = 0; int zmove = 0; if (side == 0 || side == 1) { xmove = 1; zmove = 1; } else { ymove = 1; if (side == 4 || side == 5) { zmove = 1; } else { xmove = 1; } } float strength = ForgeHooks.blockStrength(block, player, world, x, y, z); if(player.isSneaking() && (player.experienceLevel >= 20 || player.capabilities.isCreativeMode)) { checkBlockBreak(world, player, x, y, z, stack, strength, block, side); } else { for (int i = -xmove; i <= xmove; i++) { for (int j = -ymove; j <= ymove; j++) { for (int k = -zmove; k <= zmove; k++) { if (i != x && j != y && k != z) { checkBlockBreak(world, player, x + i, y + j, z + k, stack, strength, block, side); } } } } } return false; } public void checkBlockBreak(World world, EntityPlayer player, int x, int y, int z, ItemStack stack, float strength, Block originalBlock, int side) { Block breakBlock = world.getBlock(x, y, z); if (this.canHarvestBlock(breakBlock, stack)) { float newStrength = ForgeHooks.blockStrength(breakBlock, player, world, x, y, z); Material material = originalBlock.getMaterial(); if (newStrength > 0f && strength / newStrength <= 10f && breakBlock.getMaterial() == material) { breakBlock(world, breakBlock, x, y, z, side, player); if ((double) breakBlock.getBlockHardness(world, x, y, z) != 0.0D) { stack.damageItem(1, player); } } } } public boolean breakBlock(World world, Block block, int x, int y, int z, int side, EntityPlayer player) { if (world.isAirBlock(x, y, z)) { return false; } EntityPlayerMP playerMP = null; if (player instanceof EntityPlayerMP) { playerMP = (EntityPlayerMP) player; } int meta = world.getBlockMetadata(x, y, z); if (block.getHarvestTool(meta) != "pickaxe" || !canHarvestBlock(block, player.getCurrentEquippedItem()) || !ForgeHooks.canHarvestBlock(block, player, meta)) { return false; } if (playerMP != null) { BreakEvent event = ForgeHooks.onBlockBreakEvent(world, playerMP.theItemInWorldManager.getGameType(), playerMP, x, y, z); int drop = event.getExpToDrop(); block.dropXpOnBlockBreak(world, x, y, z, drop); world.playAuxSFX(2001, x, y, z, Block.getIdFromBlock(block) | (world.getBlockMetadata(x, y, z) << 12)); if (event.isCanceled()) { return false; } } if (player.capabilities.isCreativeMode) { if (!world.isRemote) { block.onBlockHarvested(world, x, y, z, meta, player); } if (block.removedByPlayer(world, player, x, y, z, false)) { block.onBlockDestroyedByPlayer(world, x, y, z, meta); } if (!world.isRemote) { playerMP.playerNetServerHandler.sendPacket(new S23PacketBlockChange(x, y, z, world)); } else { Minecraft.getMinecraft().getNetHandler().addToSendQueue(new C07PacketPlayerDigging(2, x, y, z, side)); } return true; } if (!world.isRemote) { block.onBlockHarvested(world, x, y, z, meta, player); if (block.removedByPlayer(world, player, x, y, z, true)) { block.onBlockDestroyedByPlayer(world, x, y, z, meta); if (!player.capabilities.isCreativeMode) { block.harvestBlock(world, player, x, y, z, meta); } } playerMP.playerNetServerHandler.sendPacket(new S23PacketBlockChange(x, y, z, world)); } else { if (block.removedByPlayer(world, player, x, y, z, true)) { block.onBlockDestroyedByPlayer(world, x, y, z, meta); } Minecraft.getMinecraft().getNetHandler().addToSendQueue(new C07PacketPlayerDigging(2, x, y, z, side)); } return true; } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.pkgcache; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Verify; import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.PackageIdentifier; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.packages.BuildFileName; import com.google.devtools.build.lib.packages.BuildFileNotFoundException; import com.google.devtools.build.lib.packages.NoSuchPackageException; import com.google.devtools.build.lib.vfs.FileStatus; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.Root; import com.google.devtools.build.lib.vfs.Symlinks; import com.google.devtools.build.lib.vfs.UnixGlob; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.concurrent.atomic.AtomicReference; /** * A mapping from the name of a package to the location of its BUILD file. * The implementation composes an ordered sequence of directories according to * the package-path rules. * * <p>All methods are thread-safe, and (assuming no change to the underlying * filesystem) idempotent. */ public class PathPackageLocator implements Serializable { private static final String WORKSPACE_WILDCARD = "%workspace%"; private final ImmutableList<Root> pathEntries; // Transient because this is an injected value in Skyframe, and as such, its serialized // representation is used as a key. We want a change to output base not to invalidate things. private final transient Path outputBase; private final ImmutableList<BuildFileName> buildFilesByPriority; @VisibleForTesting public PathPackageLocator( Path outputBase, List<Root> pathEntries, List<BuildFileName> buildFilesByPriority) { this.outputBase = outputBase; this.pathEntries = ImmutableList.copyOf(pathEntries); this.buildFilesByPriority = ImmutableList.copyOf(buildFilesByPriority); } /** * Returns the path to the build file for this package. * * <p>The package's root directory may be computed by calling getParentFile() * on the result of this function. * * <p>Instances of this interface do not attempt to do any caching, nor * implement checks for package-boundary crossing logic; the PackageCache * does that. * * <p>If the same package exists beneath multiple package path entries, the * first path that matches always wins. */ public Path getPackageBuildFile(PackageIdentifier packageName) throws NoSuchPackageException { Path buildFile = getPackageBuildFileNullable(packageName, UnixGlob.DEFAULT_SYSCALLS_REF); if (buildFile == null) { throw new BuildFileNotFoundException(packageName, "BUILD file not found on package path"); } return buildFile; } /** * Like #getPackageBuildFile(), but returns null instead of throwing. * * @param packageIdentifier the name of the package. * @param cache a filesystem-level cache of stat() calls. * @return the {@link Path} to the correct build file, or {@code null} if none was found */ public Path getPackageBuildFileNullable( PackageIdentifier packageIdentifier, AtomicReference<? extends UnixGlob.FilesystemCalls> cache) { Preconditions.checkArgument(!packageIdentifier.getRepository().isDefault()); if (packageIdentifier.getRepository().isMain()) { for (BuildFileName buildFileName : buildFilesByPriority) { Path buildFilePath = getFilePath( packageIdentifier .getPackageFragment() .getRelative(buildFileName.getFilenameFragment()), cache); if (buildFilePath != null) { return buildFilePath; } } } else { Verify.verify(outputBase != null, String.format( "External package '%s' needs to be loaded but this PathPackageLocator instance does not " + "support external packages", packageIdentifier)); // This works only to some degree, because it relies on the presence of the repository under // $OUTPUT_BASE/external, which is created by the appropriate RepositoryDirectoryValue. This // is true for the invocation in GlobCache, but not for the locator.getBuildFileForPackage() // invocation in Parser#include(). for (BuildFileName buildFileName : buildFilesByPriority) { Path buildFile = outputBase .getRelative(packageIdentifier.getSourceRoot()) .getRelative(buildFileName.getFilenameFragment()); try { FileStatus stat = cache.get().statIfFound(buildFile, Symlinks.FOLLOW); if (stat != null && stat.isFile()) { return buildFile; } } catch (IOException e) { return null; } } } return null; } /** Returns an immutable ordered list of the directories on the package path. */ public ImmutableList<Root> getPathEntries() { return pathEntries; } @Override public String toString() { return "PathPackageLocator" + pathEntries; } public static String maybeReplaceWorkspaceInString(String pathElement, Path workspace) { return pathElement.replace(WORKSPACE_WILDCARD, workspace.getPathString()); } /** * A factory of PathPackageLocators from a list of path elements. Elements may contain * "%workspace%", indicating the workspace. * * <p>If any of the paths given do not exist, an exception will be thrown. * * @param outputBase the output base. Can be null if remote repositories are not in use. * @param pathElements Each element must be an absolute path, relative path, or some string * "%workspace%" + relative, where relative is itself a relative path. The special symbol * "%workspace%" means to interpret the path relative to the nearest enclosing workspace. * Relative paths are interpreted relative to the client's working directory, which may be * below the workspace. * @param eventHandler The eventHandler. * @param workspace The nearest enclosing package root directory. * @param clientWorkingDirectory The client's working directory. * @param buildFilesByPriority The ordered collection of {@link BuildFileName}s to check in each * potential package directory. * @return a {@link PathPackageLocator} that uses the {@code outputBase} and {@code pathElements} * provided. */ public static PathPackageLocator create( Path outputBase, List<String> pathElements, EventHandler eventHandler, Path workspace, Path clientWorkingDirectory, List<BuildFileName> buildFilesByPriority) { return createInternal( outputBase, pathElements, eventHandler, workspace, clientWorkingDirectory, buildFilesByPriority, true); } /** * A factory of PathPackageLocators from a list of path elements. * * @param outputBase the output base. Can be null if remote repositories are not in use. * @param pathElements Each element must be a {@link Root} object. * @param buildFilesByPriority The ordered collection of {@link BuildFileName}s to check in each * potential package directory. * @return a {@link PathPackageLocator} that uses the {@code outputBase} and {@code pathElements} * provided. */ public static PathPackageLocator createWithoutExistenceCheck( Path outputBase, List<Root> pathElements, List<BuildFileName> buildFilesByPriority) { return new PathPackageLocator(outputBase, pathElements, buildFilesByPriority); } private static PathPackageLocator createInternal( Path outputBase, List<String> pathElements, EventHandler eventHandler, Path workspace, Path clientWorkingDirectory, List<BuildFileName> buildFilesByPriority, boolean checkExistence) { List<Root> resolvedPaths = new ArrayList<>(); for (String pathElement : pathElements) { // Replace "%workspace%" with the path of the enclosing workspace directory. pathElement = maybeReplaceWorkspaceInString(pathElement, workspace); PathFragment pathElementFragment = PathFragment.create(pathElement); // If the path string started with "%workspace%" or "/", it is already absolute, // so the following line is a no-op. Path rootPath = clientWorkingDirectory.getRelative(pathElementFragment); if (!pathElementFragment.isAbsolute() && !clientWorkingDirectory.equals(workspace)) { eventHandler.handle( Event.warn( "The package path element '" + pathElementFragment + "' will be taken relative to your working directory. You may have intended to" + " have the path taken relative to your workspace directory. If so, please use" + "the '" + WORKSPACE_WILDCARD + "' wildcard.")); } if (!checkExistence || rootPath.exists()) { resolvedPaths.add(Root.fromPath(rootPath)); } } return new PathPackageLocator(outputBase, resolvedPaths, buildFilesByPriority); } /** * Returns the path to the WORKSPACE file for this build. * * <p>If there are WORKSPACE files beneath multiple package path entries, the first one always * wins. */ public Path getWorkspaceFile() { AtomicReference<? extends UnixGlob.FilesystemCalls> cache = UnixGlob.DEFAULT_SYSCALLS_REF; // TODO(bazel-team): correctness in the presence of changes to the location of the WORKSPACE // file. return getFilePath(Label.WORKSPACE_FILE_NAME, cache); } private Path getFilePath(PathFragment suffix, AtomicReference<? extends UnixGlob.FilesystemCalls> cache) { for (Root pathEntry : pathEntries) { Path buildFile = pathEntry.getRelative(suffix); try { FileStatus stat = cache.get().statIfFound(buildFile, Symlinks.FOLLOW); if (stat != null && stat.isFile()) { return buildFile; } } catch (IOException ignored) { // Treat IOException as a missing file. } } return null; } @Override public int hashCode() { return Objects.hash(pathEntries, outputBase); } @Override public boolean equals(Object other) { if (this == other) { return true; } if (!(other instanceof PathPackageLocator)) { return false; } PathPackageLocator pathPackageLocator = (PathPackageLocator) other; return Objects.equals(getPathEntries(), pathPackageLocator.getPathEntries()) && Objects.equals(outputBase, pathPackageLocator.outputBase); } public Path getOutputBase() { return outputBase; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.scxml2.io; import java.io.StringReader; import org.apache.commons.scxml2.SCXMLExecutor; import org.apache.commons.scxml2.SCXMLTestHelper; import org.apache.commons.scxml2.model.ModelException; import org.apache.commons.scxml2.model.SCXML; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Test enforcement of required SCXML element attributes, spec http://www.w3.org/TR/2013/WD-scxml-20130801 * <p> * TODO required attributes for elements: * <ul> * <li>&lt;raise&gt; required attribute: 'id'</li> * </ul> * </p> */ public class SCXMLRequiredAttributesTest { private static final String VALID_SCXML = "<scxml xmlns=\"http://www.w3.org/2005/07/scxml\" datamodel=\"jexl\" version=\"1.0\">\n" + " <state id=\"s1\">\n" + " <transition target=\"fine\">\n" + " <if cond=\"true\"><log expr=\"'hello'\"/></if>\n" + " </transition>\n" + " </state>\n" + " <final id=\"fine\"/>\n" + "</scxml>"; private static final String SCXML_WITH_MISSING_VERSION = "<scxml xmlns=\"http://www.w3.org/2005/07/scxml\">\n" + " <final id=\"fine\"/>\n" + "</scxml>"; private static final String SCXML_WITH_INVALID_VERSION = "<scxml xmlns=\"http://www.w3.org/2005/07/scxml\" version=\"2.0\">\n" + " <final id=\"fine\"/>\n" + "</scxml>"; private static final String SCXML_WITH_MISSING_IF_COND = "<scxml xmlns=\"http://www.w3.org/2005/07/scxml\" version=\"1.0\">\n" + " <state id=\"s1\">\n" + " <transition target=\"fine\">\n" + " <if><log expr=\"'hello'\"/></if>\n" + " </transition>\n" + " </state>\n" + " <final id=\"fine\"/>\n" + "</scxml>"; private static final String SCXML_WITH_MISSING_ELSEIF_COND = "<scxml xmlns=\"http://www.w3.org/2005/07/scxml\" version=\"1.0\">\n" + " <state id=\"s1\">\n" + " <transition target=\"fine\">\n" + " <if cond=\"false\"><elseif/><log expr=\"'hello'\"/></if>\n" + " </transition>\n" + " </state>\n" + " <final id=\"fine\"/>\n" + "</scxml>"; private static final String SCXML_WITH_MISSING_DATA_ID = "<scxml xmlns=\"http://www.w3.org/2005/07/scxml\" version=\"1.0\">\n" + " <datamodel><data></data></datamodel>\n" + " <final id=\"fine\"/>\n" + "</scxml>"; private static final String SCXML_WITH_MISSING_ASSIGN_LOCATION = "<scxml xmlns=\"http://www.w3.org/2005/07/scxml\" version=\"1.0\">\n" + " <state id=\"s1\">\n" + " <transition target=\"fine\">\n" + " <assign expr=\"1\"/>\n" + " </transition>\n" + " </state>\n" + " <final id=\"fine\"/>\n" + "</scxml>"; private static final String SCXML_WITH_MISSING_PARAM_NAME = "<scxml xmlns=\"http://www.w3.org/2005/07/scxml\" version=\"1.0\">\n" + " <state id=\"s1\">\n" + " <invoke type=\"scxml\" src=\"foo\">\n" + // Note: invalid src, but not executed during test " <param expr=\"1\"/>\n" + " </invoke>\n" + " </state>\n" + " <final id=\"fine\"/>\n" + "</scxml>"; private static final String SCXML_WITH_PARAM_AND_NAME = "<scxml xmlns=\"http://www.w3.org/2005/07/scxml\" version=\"1.0\">\n" + " <state id=\"s1\">\n" + " <invoke type=\"scxml\" src=\"foo\">\n" + // Note: invalid src, but not executed during test " <param name=\"bar\" expr=\"1\"/>\n" + " </invoke>\n" + " </state>\n" + " <final id=\"fine\"/>\n" + "</scxml>"; private static final String SCXML_WITH_MISSING_FOREACH_ARRAY = "<scxml xmlns=\"http://www.w3.org/2005/07/scxml\" version=\"1.0\">\n" + " <state id=\"s1\">\n" + " <transition target=\"fine\">\n" + " <foreach item=\"y\"></foreach>\n" + " </transition>\n" + " </state>\n" + " <final id=\"fine\"/>\n" + "</scxml>"; private static final String SCXML_WITH_MISSING_FOREACH_ITEM = "<scxml xmlns=\"http://www.w3.org/2005/07/scxml\" version=\"1.0\">\n" + " <state id=\"s1\">\n" + " <transition target=\"fine\">\n" + " <foreach array=\"[1,2]\"></foreach>\n" + " </transition>\n" + " </state>\n" + " <final id=\"fine\"/>\n" + "</scxml>"; private static final String SCXML_WITH_FOREACH = "<scxml xmlns=\"http://www.w3.org/2005/07/scxml\" datamodel=\"jexl\" version=\"1.0\">\n" + " <state id=\"s1\">\n" + " <transition target=\"fine\">\n" + " <foreach array=\"[1,2]\" item=\"x\"></foreach>\n" + " </transition>\n" + " </state>\n" + " <final id=\"fine\"/>\n" + "</scxml>"; @Test public void testValidSCXML() throws Exception { SCXML scxml = SCXMLTestHelper.parse(new StringReader(VALID_SCXML), null); SCXMLExecutor exec = SCXMLTestHelper.getExecutor(scxml); exec.go(); assertTrue(exec.getStatus().isFinal()); } @Test public void testSCXMLMissingVersion() throws Exception { try { SCXMLTestHelper.parse(new StringReader(SCXML_WITH_MISSING_VERSION), null); fail("SCXML reading should have failed due to missing version in SCXML"); } catch (ModelException e) { assertTrue(e.getMessage().startsWith("<scxml> is missing required attribute \"version\" value")); } } @Test public void testSCXMLInvalidVersion() throws Exception { try { SCXMLTestHelper.parse(new StringReader(SCXML_WITH_INVALID_VERSION), null); fail("SCXML reading should have failed due to missing version in SCXML"); } catch (ModelException e) { assertEquals("The <scxml> element defines an unsupported version \"2.0\", only version \"1.0\" is supported.", e.getMessage()); } } @Test public void testSCXMLMissingIfCond() throws Exception { try { SCXMLTestHelper.parse(new StringReader(SCXML_WITH_MISSING_IF_COND), null); fail("SCXML reading should have failed due to missing if condition in SCXML"); } catch (ModelException e) { assertTrue(e.getMessage().startsWith("<if> is missing required attribute \"cond\" value")); } } @Test public void testSCXMLMissingElseIfCond() throws Exception { try { SCXMLTestHelper.parse(new StringReader(SCXML_WITH_MISSING_ELSEIF_COND), null); fail("SCXML reading should have failed due to missing elseif condition in SCXML"); } catch (ModelException e) { assertTrue(e.getMessage().startsWith("<elseif> is missing required attribute \"cond\" value")); } } @Test public void testSCXMLMissingDataId() throws Exception { try { SCXMLTestHelper.parse(new StringReader(SCXML_WITH_MISSING_DATA_ID), null); fail("SCXML reading should have failed due to missing data id in SCXML"); } catch (ModelException e) { assertTrue(e.getMessage().startsWith("<data> is missing required attribute \"id\" value")); } } @Test public void testSCXMLMissingAssignLocation() throws Exception { try { SCXMLTestHelper.parse(new StringReader(SCXML_WITH_MISSING_ASSIGN_LOCATION), null); fail("SCXML reading should have failed due to missing assign location in SCXML"); } catch (ModelException e) { assertTrue(e.getMessage().startsWith("<assign> is missing required attribute \"location\" value")); } } @Test public void testSCXMLMissingParamName() throws Exception { try { SCXMLTestHelper.parse(new StringReader(SCXML_WITH_MISSING_PARAM_NAME), null); fail("SCXML reading should have failed due to missing param name in SCXML"); } catch (ModelException e) { assertTrue(e.getMessage().startsWith("<param> is missing required attribute \"name\" value")); } } @Test public void testSCXMLParamWithName() throws Exception { SCXMLTestHelper.parse(new StringReader(SCXML_WITH_PARAM_AND_NAME), null); // Note: cannot execute this instance without providing proper <invoke> src attribute } @Test public void testSCXMLMissingForeachArray() throws Exception { try { SCXMLTestHelper.parse(new StringReader(SCXML_WITH_MISSING_FOREACH_ARRAY), null); fail("SCXML reading should have failed due to missing foreach array in SCXML"); } catch (ModelException e) { assertTrue(e.getMessage().startsWith("<foreach> is missing required attribute \"array\" value")); } } @Test public void testSCXMLMissingForeachItem() throws Exception { try { SCXMLTestHelper.parse(new StringReader(SCXML_WITH_MISSING_FOREACH_ITEM), null); fail("SCXML reading should have failed due to missing foreach item in SCXML"); } catch (ModelException e) { assertTrue(e.getMessage().startsWith("<foreach> is missing required attribute \"item\" value")); } } @Test public void testSCXMLWithForEach() throws Exception { SCXML scxml = SCXMLTestHelper.parse(new StringReader(SCXML_WITH_FOREACH), null); SCXMLExecutor exec = SCXMLTestHelper.getExecutor(scxml); exec.go(); assertTrue(exec.getStatus().isFinal()); } }
/* * Copyright 2008 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.base.Preconditions; import com.google.common.collect.Sets; import com.google.javascript.jscomp.ControlFlowGraph.Branch; import com.google.javascript.jscomp.Scope.Var; import com.google.javascript.jscomp.graph.DiGraph.DiGraphEdge; import com.google.javascript.jscomp.graph.LatticeElement; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.BitSet; import java.util.List; import java.util.Set; /** * Compute the "liveness" of all local variables. A variable is "live" at a * point of a program if the value it is currently holding might be read later. * Otherwise, the variable is considered "dead" if we know for sure that it will * no longer be read. Dead variables are candidates for dead assignment * elimination and variable name sharing. The worst case safe assumption is to * assume that all variables are live. In that case, we will have no opportunity * for optimizations. This is especially the case within a TRY block when an * assignment is not guaranteed to take place. We bail out by assuming that * all variables are live. * <p> * Due to the possibility of inner functions and closures, certain "local" * variables can escape the function. These variables will be considered as * global and they can be retrieved with {@link #getEscapedLocals()}. * */ class LiveVariablesAnalysis extends DataFlowAnalysis<Node, LiveVariablesAnalysis.LiveVariableLattice> { // 100 = ((# of original Power Rangers) ^ // (# years of Warren Harding in office)) * // (# of Ninja Turtles) static final int MAX_VARIABLES_TO_ANALYZE = 100; public static final String ARGUMENT_ARRAY_ALIAS = "arguments"; private static class LiveVariableJoinOp implements JoinOp<LiveVariableLattice> { @Override public LiveVariableLattice apply(List<LiveVariableLattice> in) { LiveVariableLattice result = new LiveVariableLattice(in.get(0)); for (int i = 1; i < in.size(); i++) { result.liveSet.or(in.get(i).liveSet); } return result; } } /** * The lattice that stores the liveness of all local variables at a given * point in the program. The whole lattice is the power set of all local * variables and a variable is live if it is in the set. */ static class LiveVariableLattice implements LatticeElement { private final BitSet liveSet; /** * @param numVars Number of all local variables. */ private LiveVariableLattice(int numVars) { this.liveSet = new BitSet(numVars); } private LiveVariableLattice(LiveVariableLattice other) { Preconditions.checkNotNull(other); this.liveSet = (BitSet) other.liveSet.clone(); } @Override public boolean equals(Object other) { Preconditions.checkNotNull(other); return (other instanceof LiveVariableLattice) && this.liveSet.equals(((LiveVariableLattice) other).liveSet); } public boolean isLive(Var v) { Preconditions.checkNotNull(v); return liveSet.get(v.index); } public boolean isLive(int index) { return liveSet.get(index); } @Override public String toString() { return liveSet.toString(); } @Override public int hashCode() { return liveSet.hashCode(); } } // The scope of the function that we are analyzing. private final Scope jsScope; private final Set<Var> escaped; LiveVariablesAnalysis(ControlFlowGraph<Node> cfg, Scope jsScope, AbstractCompiler compiler) { super(cfg, new LiveVariableJoinOp()); this.jsScope = jsScope; this.escaped = Sets.newHashSet(); computeEscaped(jsScope, escaped, compiler); } public Set<Var> getEscapedLocals() { return escaped; } public int getVarIndex(String var) { return jsScope.getVar(var).index; } @Override boolean isForward() { return false; } @Override LiveVariableLattice createEntryLattice() { return new LiveVariableLattice(jsScope.getVarCount()); } @Override LiveVariableLattice createInitialEstimateLattice() { return new LiveVariableLattice(jsScope.getVarCount()); } @Override LiveVariableLattice flowThrough(Node node, LiveVariableLattice input) { final BitSet gen = new BitSet(input.liveSet.size()); final BitSet kill = new BitSet(input.liveSet.size()); // Make kills conditional if the node can end abruptly by an exception. boolean conditional = false; List<DiGraphEdge<Node, Branch>> edgeList = getCfg().getOutEdges(node); for (DiGraphEdge<Node, Branch> edge : edgeList) { if (Branch.ON_EX.equals(edge.getValue())) { conditional = true; } } computeGenKill(node, gen, kill, conditional); LiveVariableLattice result = new LiveVariableLattice(input); // L_in = L_out - Kill + Gen result.liveSet.andNot(kill); result.liveSet.or(gen); return result; } /** * Computes the GEN and KILL set. * * @param n Root node. * @param gen Local variables that are live because of the instruction at * {@code n} will be added to this set. * @param kill Local variables that are killed because of the instruction at * {@code n} will be added to this set. * @param conditional {@code true} if any assignments encountered are * conditionally executed. These assignments might not kill a variable. */ private void computeGenKill(Node n, BitSet gen, BitSet kill, boolean conditional) { switch (n.getType()) { case Token.SCRIPT: case Token.BLOCK: case Token.FUNCTION: return; case Token.WHILE: case Token.DO: case Token.IF: computeGenKill(NodeUtil.getConditionExpression(n), gen, kill, conditional); return; case Token.FOR: if (!NodeUtil.isForIn(n)) { computeGenKill(NodeUtil.getConditionExpression(n), gen, kill, conditional); } else { // for(x in y) {...} Node lhs = n.getFirstChild(); if (lhs.isVar()) { // for(var x in y) {...} lhs = lhs.getLastChild(); } if (lhs.isName()) { addToSetIfLocal(lhs, kill); addToSetIfLocal(lhs, gen); } else { computeGenKill(lhs, gen, kill, conditional); } // rhs is executed only once so we don't go into it every loop. } return; case Token.VAR: for (Node c = n.getFirstChild(); c != null; c = c.getNext()) { if (c.hasChildren()) { computeGenKill(c.getFirstChild(), gen, kill, conditional); if (!conditional) { addToSetIfLocal(c, kill); } } } return; case Token.AND: case Token.OR: computeGenKill(n.getFirstChild(), gen, kill, conditional); // May short circuit. computeGenKill(n.getLastChild(), gen, kill, true); return; case Token.HOOK: computeGenKill(n.getFirstChild(), gen, kill, conditional); // Assume both sides are conditional. computeGenKill(n.getFirstChild().getNext(), gen, kill, true); computeGenKill(n.getLastChild(), gen, kill, true); return; case Token.NAME: if (isArgumentsName(n)) { markAllParametersEscaped(); } else { addToSetIfLocal(n, gen); } return; default: if (NodeUtil.isAssignmentOp(n) && n.getFirstChild().isName()) { Node lhs = n.getFirstChild(); if (!conditional) { addToSetIfLocal(lhs, kill); } if (!n.isAssign()) { // assignments such as a += 1 reads a. addToSetIfLocal(lhs, gen); } computeGenKill(lhs.getNext(), gen, kill, conditional); } else { for (Node c = n.getFirstChild(); c != null; c = c.getNext()) { computeGenKill(c, gen, kill, conditional); } } return; } } private void addToSetIfLocal(Node node, BitSet set) { Preconditions.checkState(node.isName()); String name = node.getString(); if (!jsScope.isDeclared(name, false)) { return; } Var var = jsScope.getVar(name); if (!escaped.contains(var)) { set.set(var.index); } } /** * Give up computing liveness of formal parameter by putting all the parameter * names in the escaped set. */ void markAllParametersEscaped() { Node lp = jsScope.getRootNode().getFirstChild().getNext(); <<<<<<< HEAD for(Node arg = lp.getFirstChild(); arg != null; arg = arg.getNext()) { ======= for (Node arg = lp.getFirstChild(); arg != null; arg = arg.getNext()) { >>>>>>> 5c522db6e745151faa1d8dc310d145e94f78ac77 escaped.add(jsScope.getVar(arg.getString())); } } private boolean isArgumentsName(Node n) { <<<<<<< HEAD if (!n.isName()|| ======= if (!n.isName() || >>>>>>> 5c522db6e745151faa1d8dc310d145e94f78ac77 !n.getString().equals(ARGUMENT_ARRAY_ALIAS) || jsScope.isDeclared(ARGUMENT_ARRAY_ALIAS, false)) { return false; } else { return true; } } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.compute.v1; import com.google.api.core.ApiFuture; import com.google.api.core.ApiFutures; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.paging.AbstractFixedSizeCollection; import com.google.api.gax.paging.AbstractPage; import com.google.api.gax.paging.AbstractPagedListResponse; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.compute.v1.stub.RegionsStub; import com.google.cloud.compute.v1.stub.RegionsStubSettings; import com.google.common.util.concurrent.MoreExecutors; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: The Regions API. * * <p>This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * * <pre>{@code * try (RegionsClient regionsClient = RegionsClient.create()) { * String project = "project-309310695"; * String region = "region-934795532"; * Region response = regionsClient.get(project, region); * } * }</pre> * * <p>Note: close() needs to be called on the RegionsClient object to clean up resources such as * threads. In the example above, try-with-resources is used, which automatically calls close(). * * <p>The surface of this class includes several types of Java methods for each of the API's * methods: * * <ol> * <li>A "flattened" method. With this type of method, the fields of the request type have been * converted into function parameters. It may be the case that not all fields are available as * parameters, and not every API method will have a flattened method entry point. * <li>A "request object" method. This type of method only takes one parameter, a request object, * which must be constructed before the call. Not every API method will have a request object * method. * <li>A "callable" method. This type of method takes no parameters and returns an immutable API * callable object, which can be used to initiate calls to the service. * </ol> * * <p>See the individual methods for example code. * * <p>Many parameters require resource names to be formatted in a particular way. To assist with * these names, this class includes a format method for each type of name, and additionally a parse * method to extract the individual identifiers contained within names that are returned. * * <p>This class can be customized by passing in a custom instance of RegionsSettings to create(). * For example: * * <p>To customize credentials: * * <pre>{@code * RegionsSettings regionsSettings = * RegionsSettings.newBuilder() * .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials)) * .build(); * RegionsClient regionsClient = RegionsClient.create(regionsSettings); * }</pre> * * <p>To customize the endpoint: * * <pre>{@code * RegionsSettings regionsSettings = RegionsSettings.newBuilder().setEndpoint(myEndpoint).build(); * RegionsClient regionsClient = RegionsClient.create(regionsSettings); * }</pre> * * <p>Please refer to the GitHub repository's samples for more quickstart code snippets. */ @Generated("by gapic-generator-java") public class RegionsClient implements BackgroundResource { private final RegionsSettings settings; private final RegionsStub stub; /** Constructs an instance of RegionsClient with default settings. */ public static final RegionsClient create() throws IOException { return create(RegionsSettings.newBuilder().build()); } /** * Constructs an instance of RegionsClient, using the given settings. The channels are created * based on the settings passed in, or defaults for any settings that are not set. */ public static final RegionsClient create(RegionsSettings settings) throws IOException { return new RegionsClient(settings); } /** * Constructs an instance of RegionsClient, using the given stub for making calls. This is for * advanced usage - prefer using create(RegionsSettings). */ @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public static final RegionsClient create(RegionsStub stub) { return new RegionsClient(stub); } /** * Constructs an instance of RegionsClient, using the given settings. This is protected so that it * is easy to make a subclass, but otherwise, the static factory methods should be preferred. */ protected RegionsClient(RegionsSettings settings) throws IOException { this.settings = settings; this.stub = ((RegionsStubSettings) settings.getStubSettings()).createStub(); } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") protected RegionsClient(RegionsStub stub) { this.settings = null; this.stub = stub; } public final RegionsSettings getSettings() { return settings; } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public RegionsStub getStub() { return stub; } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the specified Region resource. Gets a list of available regions by making a list() * request. To decrease latency for this method, you can optionally omit any unneeded information * from the response by using a field mask. This practice is especially recommended for unused * quota information (the `quotas` field). To exclude one or more fields, set your request's * `fields` query parameter to only include the fields you need. For example, to only include the * `id` and `selfLink` fields, add the query parameter `?fields=id,selfLink` to your request. * * <p>Sample code: * * <pre>{@code * try (RegionsClient regionsClient = RegionsClient.create()) { * String project = "project-309310695"; * String region = "region-934795532"; * Region response = regionsClient.get(project, region); * } * }</pre> * * @param project Project ID for this request. * @param region Name of the region resource to return. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final Region get(String project, String region) { GetRegionRequest request = GetRegionRequest.newBuilder().setProject(project).setRegion(region).build(); return get(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the specified Region resource. Gets a list of available regions by making a list() * request. To decrease latency for this method, you can optionally omit any unneeded information * from the response by using a field mask. This practice is especially recommended for unused * quota information (the `quotas` field). To exclude one or more fields, set your request's * `fields` query parameter to only include the fields you need. For example, to only include the * `id` and `selfLink` fields, add the query parameter `?fields=id,selfLink` to your request. * * <p>Sample code: * * <pre>{@code * try (RegionsClient regionsClient = RegionsClient.create()) { * GetRegionRequest request = * GetRegionRequest.newBuilder() * .setProject("project-309310695") * .setRegion("region-934795532") * .build(); * Region response = regionsClient.get(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final Region get(GetRegionRequest request) { return getCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the specified Region resource. Gets a list of available regions by making a list() * request. To decrease latency for this method, you can optionally omit any unneeded information * from the response by using a field mask. This practice is especially recommended for unused * quota information (the `quotas` field). To exclude one or more fields, set your request's * `fields` query parameter to only include the fields you need. For example, to only include the * `id` and `selfLink` fields, add the query parameter `?fields=id,selfLink` to your request. * * <p>Sample code: * * <pre>{@code * try (RegionsClient regionsClient = RegionsClient.create()) { * GetRegionRequest request = * GetRegionRequest.newBuilder() * .setProject("project-309310695") * .setRegion("region-934795532") * .build(); * ApiFuture<Region> future = regionsClient.getCallable().futureCall(request); * // Do something. * Region response = future.get(); * } * }</pre> */ public final UnaryCallable<GetRegionRequest, Region> getCallable() { return stub.getCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves the list of region resources available to the specified project. To decrease latency * for this method, you can optionally omit any unneeded information from the response by using a * field mask. This practice is especially recommended for unused quota information (the * `items.quotas` field). To exclude one or more fields, set your request's `fields` query * parameter to only include the fields you need. For example, to only include the `id` and * `selfLink` fields, add the query parameter `?fields=id,selfLink` to your request. * * <p>Sample code: * * <pre>{@code * try (RegionsClient regionsClient = RegionsClient.create()) { * String project = "project-309310695"; * for (Region element : regionsClient.list(project).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param project Project ID for this request. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListPagedResponse list(String project) { ListRegionsRequest request = ListRegionsRequest.newBuilder().setProject(project).build(); return list(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves the list of region resources available to the specified project. To decrease latency * for this method, you can optionally omit any unneeded information from the response by using a * field mask. This practice is especially recommended for unused quota information (the * `items.quotas` field). To exclude one or more fields, set your request's `fields` query * parameter to only include the fields you need. For example, to only include the `id` and * `selfLink` fields, add the query parameter `?fields=id,selfLink` to your request. * * <p>Sample code: * * <pre>{@code * try (RegionsClient regionsClient = RegionsClient.create()) { * ListRegionsRequest request = * ListRegionsRequest.newBuilder() * .setFilter("filter-1274492040") * .setMaxResults(1128457243) * .setOrderBy("orderBy-1207110587") * .setPageToken("pageToken873572522") * .setProject("project-309310695") * .setReturnPartialSuccess(true) * .build(); * for (Region element : regionsClient.list(request).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListPagedResponse list(ListRegionsRequest request) { return listPagedCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves the list of region resources available to the specified project. To decrease latency * for this method, you can optionally omit any unneeded information from the response by using a * field mask. This practice is especially recommended for unused quota information (the * `items.quotas` field). To exclude one or more fields, set your request's `fields` query * parameter to only include the fields you need. For example, to only include the `id` and * `selfLink` fields, add the query parameter `?fields=id,selfLink` to your request. * * <p>Sample code: * * <pre>{@code * try (RegionsClient regionsClient = RegionsClient.create()) { * ListRegionsRequest request = * ListRegionsRequest.newBuilder() * .setFilter("filter-1274492040") * .setMaxResults(1128457243) * .setOrderBy("orderBy-1207110587") * .setPageToken("pageToken873572522") * .setProject("project-309310695") * .setReturnPartialSuccess(true) * .build(); * ApiFuture<Region> future = regionsClient.listPagedCallable().futureCall(request); * // Do something. * for (Region element : future.get().iterateAll()) { * // doThingsWith(element); * } * } * }</pre> */ public final UnaryCallable<ListRegionsRequest, ListPagedResponse> listPagedCallable() { return stub.listPagedCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Retrieves the list of region resources available to the specified project. To decrease latency * for this method, you can optionally omit any unneeded information from the response by using a * field mask. This practice is especially recommended for unused quota information (the * `items.quotas` field). To exclude one or more fields, set your request's `fields` query * parameter to only include the fields you need. For example, to only include the `id` and * `selfLink` fields, add the query parameter `?fields=id,selfLink` to your request. * * <p>Sample code: * * <pre>{@code * try (RegionsClient regionsClient = RegionsClient.create()) { * ListRegionsRequest request = * ListRegionsRequest.newBuilder() * .setFilter("filter-1274492040") * .setMaxResults(1128457243) * .setOrderBy("orderBy-1207110587") * .setPageToken("pageToken873572522") * .setProject("project-309310695") * .setReturnPartialSuccess(true) * .build(); * while (true) { * RegionList response = regionsClient.listCallable().call(request); * for (Region element : response.getResponsesList()) { * // doThingsWith(element); * } * String nextPageToken = response.getNextPageToken(); * if (!Strings.isNullOrEmpty(nextPageToken)) { * request = request.toBuilder().setPageToken(nextPageToken).build(); * } else { * break; * } * } * } * }</pre> */ public final UnaryCallable<ListRegionsRequest, RegionList> listCallable() { return stub.listCallable(); } @Override public final void close() { stub.close(); } @Override public void shutdown() { stub.shutdown(); } @Override public boolean isShutdown() { return stub.isShutdown(); } @Override public boolean isTerminated() { return stub.isTerminated(); } @Override public void shutdownNow() { stub.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return stub.awaitTermination(duration, unit); } public static class ListPagedResponse extends AbstractPagedListResponse< ListRegionsRequest, RegionList, Region, ListPage, ListFixedSizeCollection> { public static ApiFuture<ListPagedResponse> createAsync( PageContext<ListRegionsRequest, RegionList, Region> context, ApiFuture<RegionList> futureResponse) { ApiFuture<ListPage> futurePage = ListPage.createEmptyPage().createPageAsync(context, futureResponse); return ApiFutures.transform( futurePage, input -> new ListPagedResponse(input), MoreExecutors.directExecutor()); } private ListPagedResponse(ListPage page) { super(page, ListFixedSizeCollection.createEmptyCollection()); } } public static class ListPage extends AbstractPage<ListRegionsRequest, RegionList, Region, ListPage> { private ListPage( PageContext<ListRegionsRequest, RegionList, Region> context, RegionList response) { super(context, response); } private static ListPage createEmptyPage() { return new ListPage(null, null); } @Override protected ListPage createPage( PageContext<ListRegionsRequest, RegionList, Region> context, RegionList response) { return new ListPage(context, response); } @Override public ApiFuture<ListPage> createPageAsync( PageContext<ListRegionsRequest, RegionList, Region> context, ApiFuture<RegionList> futureResponse) { return super.createPageAsync(context, futureResponse); } } public static class ListFixedSizeCollection extends AbstractFixedSizeCollection< ListRegionsRequest, RegionList, Region, ListPage, ListFixedSizeCollection> { private ListFixedSizeCollection(List<ListPage> pages, int collectionSize) { super(pages, collectionSize); } private static ListFixedSizeCollection createEmptyCollection() { return new ListFixedSizeCollection(null, 0); } @Override protected ListFixedSizeCollection createCollection(List<ListPage> pages, int collectionSize) { return new ListFixedSizeCollection(pages, collectionSize); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.spi.security.authentication.external; import java.security.Principal; import java.util.Collections; import java.util.Map; import java.util.Set; import java.util.UUID; import javax.jcr.SimpleCredentials; import javax.jcr.ValueFactory; import javax.security.auth.login.AppConfigurationEntry; import javax.security.auth.login.Configuration; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.apache.jackrabbit.api.security.user.Group; import org.apache.jackrabbit.api.security.user.User; import org.apache.jackrabbit.api.security.user.UserManager; import org.apache.jackrabbit.oak.api.ContentSession; import org.apache.jackrabbit.oak.api.Root; import org.apache.jackrabbit.oak.spi.security.authentication.external.basic.DefaultSyncConfig; import org.apache.jackrabbit.oak.spi.security.authentication.external.impl.DefaultSyncConfigImpl; import org.apache.jackrabbit.oak.spi.security.authentication.external.impl.DefaultSyncHandler; import org.apache.jackrabbit.oak.spi.security.authentication.external.impl.ExternalLoginModule; import org.apache.jackrabbit.oak.spi.security.authentication.external.impl.SyncHandlerMapping; import org.apache.jackrabbit.oak.spi.security.principal.EveryonePrincipal; import org.apache.jackrabbit.oak.spi.security.principal.PrincipalImpl; import org.apache.jackrabbit.oak.spi.whiteboard.Registration; import org.apache.jackrabbit.oak.spi.whiteboard.WhiteboardUtils; import org.apache.sling.testing.mock.osgi.junit.OsgiContext; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; public class ExternalLoginAutoMembershipTest extends ExternalLoginTestBase { private static final String NON_EXISTING_NAME = "nonExisting"; private Root r; private UserManager userManager; private ValueFactory valueFactory; private ExternalSetup setup1; private ExternalSetup setup2; private ExternalSetup setup3; private ExternalSetup setup4; private ExternalSetup setup5; @Override public void before() throws Exception { super.before(); r = getSystemRoot(); userManager = getUserManager(r); valueFactory = getValueFactory(r); syncConfig.user().setDynamicMembership(true); // first configuration based on test base-setup with // - dynamic membership = true // - auto-membership = 'gr_default' and 'nonExisting' syncConfig.user().setDynamicMembership(true); setup1 = new ExternalSetup(idp, syncConfig, WhiteboardUtils.getService(whiteboard, SyncHandler.class), "gr" + UUID.randomUUID()); // second configuration with different IDP ('idp2') and // - dynamic membership = true // - auto-membership = 'gr_name2' and 'nonExisting' DefaultSyncConfig sc2 = new DefaultSyncConfig(); sc2.setName("name2").user().setDynamicMembership(true); setup2 = new ExternalSetup(new TestIdentityProvider("idp2"), sc2); // third configuration with different IDP ('idp3') and // - dynamic membership = false // - auto-membership = 'gr_name3' and 'nonExisting' DefaultSyncConfig sc3 = new DefaultSyncConfig(); sc3.setName("name3"); setup3 = new ExternalSetup(new TestIdentityProvider("idp3"), sc3); // forth configuration based on different IDP ('idp4') but re-using // sync-handler configuration (sc2) setup4 = new ExternalSetup(new TestIdentityProvider("idp4"), sc2); // fifth configuration with different IDP ('idp5') and // - dynamic membership = true // - auto-membership => nothing configured DefaultSyncConfig sc5 = new DefaultSyncConfig(); sc5.setName("name5").user().setDynamicMembership(true); setup5 = new ExternalSetup(new TestIdentityProvider("idp5"), sc5, new DefaultSyncHandler(sc5), null); } @Override public void after() throws Exception { options.clear(); try { syncConfig.user().setAutoMembership().setExpirationTime(0); setup1.close(); setup2.close(); setup3.close(); setup4.close(); } finally { super.after(); } } @Override protected Configuration getConfiguration() { return new Configuration() { @Override public AppConfigurationEntry[] getAppConfigurationEntry(String s) { AppConfigurationEntry[] entries = new AppConfigurationEntry[5]; int i = 0; for (ExternalSetup setup : new ExternalSetup[] {setup1, setup2, setup3, setup4, setup5}) { entries[i++] = setup.asConfigurationEntry(); } return entries; } }; } private static void registerSyncHandlerMapping(@NotNull OsgiContext ctx, @NotNull ExternalSetup setup) { String syncHandlerName = setup.sc.getName(); Map<String, Object> props = ImmutableMap.of( DefaultSyncConfigImpl.PARAM_NAME, syncHandlerName, DefaultSyncConfigImpl.PARAM_USER_DYNAMIC_MEMBERSHIP, setup.sc.user().getDynamicMembership(), DefaultSyncConfigImpl.PARAM_USER_AUTO_MEMBERSHIP, setup.sc.user().getAutoMembership()); ctx.registerService(SyncHandler.class, setup.sh, props); Map<String, String> mappingProps = ImmutableMap.of( SyncHandlerMapping.PARAM_IDP_NAME, setup.idp.getName(), SyncHandlerMapping.PARAM_SYNC_HANDLER_NAME, syncHandlerName); ctx.registerService(SyncHandlerMapping.class, new SyncHandlerMapping() {}, mappingProps); } @Test public void testLoginSyncAutoMembershipSetup1() throws Exception { try (ContentSession cs = login(new SimpleCredentials(USER_ID, new char[0]))){ // the login must set the existing auto-membership principals to the subject Set<Principal> principals = cs.getAuthInfo().getPrincipals(); assertTrue(principals.contains(setup1.gr.getPrincipal())); assertFalse(principals.contains(new PrincipalImpl(NON_EXISTING_NAME))); assertFalse(principals.contains(setup2.gr.getPrincipal())); assertFalse(principals.contains(setup3.gr.getPrincipal())); // however, the existing auto-membership group must _not_ have changed // and the test user must not be a stored member of this group. root.refresh(); UserManager uMgr = getUserManager(root); User user = uMgr.getAuthorizable(USER_ID, User.class); Group gr = uMgr.getAuthorizable(setup1.gr.getID(), Group.class); assertFalse(gr.isDeclaredMember(user)); assertFalse(gr.isMember(user)); } } @Test public void testLoginAfterSyncSetup1() throws Exception { setup1.sync(USER_ID, false); try (ContentSession cs = login(new SimpleCredentials(USER_ID, new char[0]))) { // the login must set the configured + existing auto-membership principals // to the subject; non-existing auto-membership entries must be ignored. Set<Principal> principals = cs.getAuthInfo().getPrincipals(); assertTrue(principals.contains(setup1.gr.getPrincipal())); assertFalse(principals.contains(new PrincipalImpl(NON_EXISTING_NAME))); assertFalse(principals.contains(setup2.gr.getPrincipal())); assertFalse(principals.contains(setup3.gr.getPrincipal())); // however, the existing auto-membership group must _not_ have changed // and the test user must not be a stored member of this group. root.refresh(); UserManager uMgr = getUserManager(root); User user = uMgr.getAuthorizable(USER_ID, User.class); Group gr = uMgr.getAuthorizable(setup1.gr.getID(), Group.class); assertFalse(gr.isDeclaredMember(user)); assertFalse(gr.isMember(user)); } } @Test public void testLoginAfterSyncSetup2() throws Exception { setup2.sync(USER_ID, false); try (ContentSession cs = login(new SimpleCredentials(USER_ID, new char[0]))) { // the login must set the existing auto-membership principals to the subject Set<Principal> principals = cs.getAuthInfo().getPrincipals(); assertTrue(principals.contains(setup2.gr.getPrincipal())); assertFalse(principals.contains(new PrincipalImpl(NON_EXISTING_NAME))); assertFalse(principals.contains(setup1.gr.getPrincipal())); assertFalse(principals.contains(setup3.gr.getPrincipal())); // however, the existing auto-membership group must _not_ have changed // and the test user must not be a stored member of this group. root.refresh(); UserManager uMgr = getUserManager(root); User user = uMgr.getAuthorizable(USER_ID, User.class); Group gr = uMgr.getAuthorizable(setup2.gr.getID(), Group.class); assertFalse(gr.isDeclaredMember(user)); assertFalse(gr.isMember(user)); } } @Test public void testLoginAfterSyncSetup3() throws Exception { setup3.sync(USER_ID, false); try (ContentSession cs = login(new SimpleCredentials(USER_ID, new char[0]))) { // the login must set the existing auto-membership principals to the subject Set<Principal> principals = cs.getAuthInfo().getPrincipals(); assertTrue(principals.contains(setup3.gr.getPrincipal())); assertFalse(principals.contains(new PrincipalImpl(NON_EXISTING_NAME))); assertFalse(principals.contains(setup1.gr.getPrincipal())); assertFalse(principals.contains(setup2.gr.getPrincipal())); // however, the existing auto-membership group must _not_ have changed // and the test user must not be a stored member of this group. root.refresh(); UserManager uMgr = getUserManager(root); User user = uMgr.getAuthorizable(USER_ID, User.class); Group gr = uMgr.getAuthorizable(setup3.gr.getID(), Group.class); assertTrue(gr.isDeclaredMember(user)); assertTrue(gr.isMember(user)); } } @Test public void testLoginAfterSyncSetup4() throws Exception { setup4.sync(USER_ID, false); try (ContentSession cs = login(new SimpleCredentials(USER_ID, new char[0]))) { // the login must set the existing auto-membership principals to the subject Set<Principal> principals = cs.getAuthInfo().getPrincipals(); assertTrue(principals.contains(setup4.gr.getPrincipal())); assertTrue(principals.contains(setup2.gr.getPrincipal())); assertFalse(principals.contains(new PrincipalImpl(NON_EXISTING_NAME))); assertFalse(principals.contains(setup1.gr.getPrincipal())); assertFalse(principals.contains(setup3.gr.getPrincipal())); // however, the existing auto-membership group must _not_ have changed // and the test user must not be a stored member of this group. root.refresh(); UserManager uMgr = getUserManager(root); User user = uMgr.getAuthorizable(USER_ID, User.class); Group gr = uMgr.getAuthorizable(setup4.gr.getID(), Group.class); assertFalse(gr.isDeclaredMember(user)); assertFalse(gr.isMember(user)); } } @Test public void testLoginAfterSyncSetup5() throws Exception { setup5.sync(USER_ID, false); try (ContentSession cs = login(new SimpleCredentials(USER_ID, new char[0]))) { // the login must not set any auto-membership principals to the subject // as auto-membership is not configured on this setup. Set<Principal> principals = cs.getAuthInfo().getPrincipals(); Set<Principal> expected = ImmutableSet.of(EveryonePrincipal.getInstance(), userManager.getAuthorizable(USER_ID).getPrincipal()); assertEquals(expected, principals); assertFalse(principals.contains(new PrincipalImpl(NON_EXISTING_NAME))); assertFalse(principals.contains(setup1.gr.getPrincipal())); assertFalse(principals.contains(setup2.gr.getPrincipal())); assertFalse(principals.contains(setup3.gr.getPrincipal())); assertFalse(principals.contains(setup4.gr.getPrincipal())); } } private final class ExternalSetup { private final ExternalIdentityProvider idp; private final Registration idpRegistration; private final DefaultSyncConfig sc; private final SyncHandler sh; private final Registration shRegistration; private final Group gr; private SyncContext ctx; private ExternalSetup(@NotNull ExternalIdentityProvider idp, @NotNull DefaultSyncConfig sc) throws Exception { this(idp, sc, new DefaultSyncHandler(sc), "gr_" + sc.getName()); } private ExternalSetup(@NotNull ExternalIdentityProvider idp, @NotNull DefaultSyncConfig sc, @NotNull SyncHandler sh, @Nullable String groupId) throws Exception { this.idp = idp; this.sc = sc; this.sh = sh; if (groupId != null) { Group g = userManager.getAuthorizable(groupId, Group.class); if (g != null) { gr = g; } else { gr = userManager.createGroup(groupId); } r.commit(); sc.user().setAutoMembership(gr.getID(), NON_EXISTING_NAME).setExpirationTime(Long.MAX_VALUE); } else { gr = null; } idpRegistration = whiteboard.register(ExternalIdentityProvider.class, idp, Collections.<String, Object>emptyMap()); shRegistration = whiteboard.register(SyncHandler.class, sh, ImmutableMap.of( DefaultSyncConfigImpl.PARAM_NAME, sh.getName(), DefaultSyncConfigImpl.PARAM_USER_DYNAMIC_MEMBERSHIP, sc.user().getDynamicMembership(), DefaultSyncConfigImpl.PARAM_GROUP_AUTO_MEMBERSHIP, sc.user().getAutoMembership())); registerSyncHandlerMapping(context, this); } private void sync(@NotNull String id, boolean isGroup) throws Exception { ctx = sh.createContext(idp, userManager, valueFactory); ExternalIdentity exIdentity = (isGroup) ? idp.getGroup(id) : idp.getUser(id); assertNotNull(exIdentity); SyncResult res = ctx.sync(exIdentity); assertEquals(idp.getName(), res.getIdentity().getExternalIdRef().getProviderName()); assertSame(SyncResult.Status.ADD, res.getStatus()); r.commit(); } private void close() { if (ctx != null) { ctx.close(); } if (idpRegistration != null) { idpRegistration.unregister(); } if (shRegistration != null) { shRegistration.unregister(); } } private AppConfigurationEntry asConfigurationEntry() { return new AppConfigurationEntry( ExternalLoginModule.class.getName(), AppConfigurationEntry.LoginModuleControlFlag.SUFFICIENT, ImmutableMap.<String, String>of( SyncHandlerMapping.PARAM_SYNC_HANDLER_NAME, sh.getName(), SyncHandlerMapping.PARAM_IDP_NAME, idp.getName() )); } } }
package org.greenlaw110.atmsim; import org.greenlaw110.atmsim.dispense.BalancedNoteCount; import org.osgl._; import org.osgl.exception.NotAppliedException; import org.osgl.util.C; import org.osgl.util.E; import java.util.Comparator; import java.util.EnumMap; import java.util.List; /** * Simulate the ATM note dispense logic * * @see org.greenlaw110.atmsim.NoteType * @see org.greenlaw110.atmsim.Bucket * @see org.greenlaw110.atmsim.DispenseStrategy */ public class ATM { /** * Map the {@link Bucket buckets} in this ATM to the {@link org.greenlaw110.atmsim.NoteType types} * <p/> * <p>That says we have one bucket per note type in this ATM</p> */ private EnumMap<NoteType, Bucket> buckets = new EnumMap<NoteType, Bucket>(NoteType.class); /** * Organize all bucket instances in a list so that we can easily iterate through them */ private C.List<Bucket> bucketList; /** * A readonly view of {@link #bucketList} */ private C.List<BucketView> bucketListView; /** * The dispense strategy. Default value is * {@link org.greenlaw110.atmsim.dispense.BalancedNoteCount} */ private DispenseStrategy strategy; /** * The format that help to print out the state * of this ATM. Default value is * {@link org.greenlaw110.atmsim.NoteDeckFormat#INSTANCE} */ private NoteDeckFormat fmt = NoteDeckFormat.INSTANCE; /** * Keep track of the sum of all buckets values */ private int value; /** * Construct an empty ATM without any notes */ public ATM() { this(new BalancedNoteCount()); } /** * Construct an empty ATM with {@link DispenseStrategy} specified */ public ATM(DispenseStrategy strategy) { this(C.emptyListOf(Bucket.class), strategy); } /** * Construct an ATM with a list of buckets in which the notes will * be transferred to this ATM * * @param buckets a list of buckets contains notes */ public ATM(Iterable<? extends Bucket> buckets) { this(buckets, new BalancedNoteCount()); } /** * Construct an ATM with a list of buckets in which the notes will * be transferred to this ATM and with dispense strategy specified * * @param buckets a list of buckets contains notes * @param strategy the note dispense strategy */ public ATM(Iterable<? extends Bucket> buckets, DispenseStrategy strategy) { init(strategy, buckets); } /** * Initialize the bucket instances in this ATM * * @param algorithm the dispense strategy * @param buckets a list of buckets contains notes to be filled * into buckets of this ATM * @return this ATM instance */ private void init(DispenseStrategy algorithm, Iterable<? extends Bucket> buckets) { setStrategy(algorithm); for (NoteType type : NoteType.values()) { this.buckets.put(type, Bucket.of(type)); } // this will be an readonly immutable list bucketList = C.list(this.buckets.values()); // use lazy map view so that we always get backed by // bucketList bucketListView = bucketList.lazy().map(BucketView.F.CONSTRUCTOR); transferFrom(buckets); } private void transferFrom(Iterable<? extends Bucket> buckets) { for (Bucket bucket : buckets) { value += bucket.value(); this.buckets.get(bucket.type()).transferFrom(bucket); } } /** * Set the {@link org.greenlaw110.atmsim.NoteDeckFormat format} * * @param format * @return this ATM instance */ public ATM setFormat(NoteDeckFormat format) { E.NPE(format); fmt = format; return this; } /** * Set notes dispense strategy to this ATM * * @param strategy the dispense strategy * @return the ATM instance */ public ATM setStrategy(DispenseStrategy strategy) { E.NPE(strategy); this.strategy = strategy; return this; } /** * Get the dispense strategy associated with the ATM * * @return the strategy */ public DispenseStrategy getStrategy() { return strategy; } /** * Returns a read only view to all buckets of this ATM * * @return a list of {@link org.greenlaw110.atmsim.BucketView} of all * buckets in this ATM */ public List<BucketView> buckets() { return bucketListView; } // revert the dispense operation from // a collection of buckets private void revert(Iterable<? extends Bucket> buckets) { transferFrom(buckets); } /** * Calculate remainder that should be deduct from the value specified, so that * the rest value is divisible by the note type value, and the quotient shall * not exceed the {@code maxNotes} specified. * <p/> * <p>The {@code others} is note type of other bucket in the * ATM with notes support the dispense of the value. This * parameter is used to adjust the remainder calculation</p> * <p/> * <p>It is possible to identify the value cannot be dispensed * during the calculation. Then it should return an negative * number indicate service fail</p> * * @param value the value to be dispensed from the ATM * @param noteValue the value of the note type * @param maxNotes the maximum number of notes in the bucket of the * type specified * @param otherTypes contains note value of other available buckets * @return the remainder. */ private int findRemainder(int value, int noteValue, int maxNotes, List<Integer> otherTypes) { int quotient = value / noteValue; int remainder = value % noteValue; boolean needsFurtherCheck = remainder != 0; if (quotient > maxNotes) { remainder = remainder + noteValue * (quotient - maxNotes); if (quotient >= maxNotes) needsFurtherCheck = true; } if (needsFurtherCheck) { // we need to make sure remainder be a multiplication of // any one of other note types. // otherwise we will fail to dispense some simple // value like 80 boolean ok = false; for (Integer v : otherTypes) { if (remainder % v == 0) { ok = true; break; } } if (!ok) { // we need to increase the remainder by N times noteValue // so that it can be divided by any one in the other types for (remainder = remainder + noteValue; remainder <= value; remainder += noteValue) { for (Integer v : otherTypes) { if (remainder % v == 0) { quotient = (value - remainder) / noteValue; if (quotient > maxNotes) { remainder = remainder + noteValue * (quotient - maxNotes - 1); continue; } ok = true; break; } } if (ok) break; } if (!ok) return -1; } } return remainder; } /** * Dispense notes that add up to the value specified. At the end of * the operation this method returns a list of Bucket represents the * notes been dispensed from this ATM. * * @param value the money value to be dispensed from the ATM * @return a list of buckets contains notes been dispensed from the ATM * @throws NoteDispenseException if the ATM failed to dispense the * required money value * @see DispenseStrategy#comparator() */ public List<Bucket> dispense(int value) throws NoteDispenseException { E.illegalArgumentIf(value < 0, "oops, can't dispense notes for negative value"); if (value > this.value || value % NoteType.GCD_VALUE != 0) { throw new NoteDispenseException(value); } C.List<Bucket> cash = C.newList(); int originalValue = value; try { while (value > 0) { // sort/filter available buckets for notes dispense Comparator<Bucket> cmp = strategy.comparator(); C.List<Bucket> l = bucketList.sort(cmp).filter(F.filter(value)); if (l.isEmpty()) { throw new NoteDispenseException(originalValue); } int v0 = value; for (Bucket atmBucket : l) { int maxNotes = atmBucket.noteCount(); int noteValue = atmBucket.noteTypeValue(); List<Integer> others = l.drop(1).map(new _.F1<Bucket, Integer>() { @Override public Integer apply(Bucket bucket) throws NotAppliedException, _.Break { return bucket.noteTypeValue(); } }); int remainder = findRemainder(value, noteValue, maxNotes, others); if (remainder < 0) throw new NoteDispenseException(originalValue); if (remainder >= value) continue; int dispenseValue = value - remainder; int transferCount = dispenseValue / noteValue; // prepare the dispense bucket and commit notes transfer Bucket bucket = Bucket.of(atmBucket.type()); bucket.transferFrom(atmBucket, transferCount); cash.add(bucket); this.value -= dispenseValue; value = remainder; } if (v0 == value) { throw new NoteDispenseException(originalValue); } } } catch (NoteDispenseException e) { revert(cash); throw e; } catch (RuntimeException e) { revert(cash); throw e; } return cash; } /** * Returns the total value of all notes in this ATM */ public int value() { return value; } @Override public String toString() { return "ATM state\n" + fmt.format(bucketList); } // for unit test purpose public Bucket _byType(NoteType type) { return bucketList.findFirst(F.byType(type)).get(); } /** * The function object namespace */ private static enum F { ; /** * Returns a filter function that test the whether a bucket support * dispense notes for the specified value. A bucket is considered to * be able to dispense if: * <ul> * <li>there are notes in the bucket</li> * <li>the note type value is lesser than the value to be dispensed</li> * </ul> * * @param value the value (sum of notes) needs to be dispensed * @return a predicate to test the bucket */ static final _.Predicate<Bucket> filter(final int value) { return new _.Predicate<Bucket>() { @Override public boolean test(Bucket bucket) { return bucket.noteCount() > 0 && bucket.noteTypeValue() <= value; } }; } /** * Returns a predicate that find bucket by note type * * @param type * @return a Predicate that test if a bucket matches the type specified */ static final _.Predicate<Bucket> byType(final NoteType type) { return new _.Predicate<Bucket>() { @Override public boolean test(Bucket bucket) { return bucket.type() == type; } }; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.yarn.core; import java.io.IOException; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.client.api.YarnClient; import org.apache.hadoop.yarn.client.api.YarnClientApplication; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.security.AMRMTokenIdentifier; /** * YARN resource manager client implementation for Drill. Provides a wrapper * around the YARN client interface to the Resource Manager. Used by the client * app to start the Drill application master. * <p> * Based on * <a href="https://github.com/hortonworks/simple-yarn-app">simple-yarn-app</a> */ public class YarnRMClient { private final YarnConfiguration conf; private final YarnClient yarnClient; /** * Application ID. Semantics are such that each session of Drill-on-YARN works * with no more than one application ID. */ private ApplicationId appId; private YarnClientApplication app; public YarnRMClient() { this(new YarnConfiguration()); } public YarnRMClient(ApplicationId appId) { this(); this.appId = appId; } public YarnRMClient(YarnConfiguration conf) { this.conf = conf; yarnClient = YarnClient.createYarnClient(); yarnClient.init(conf); yarnClient.start(); } public GetNewApplicationResponse createAppMaster() throws YarnClientException { // Create application via yarnClient // Response is a new application ID along with cluster capacity info try { app = yarnClient.createApplication(); } catch (YarnException | IOException e) { throw new YarnClientException("Create application failed", e); } GetNewApplicationResponse response = app.getNewApplicationResponse(); appId = response.getApplicationId(); return response; } public void submitAppMaster(AppSpec spec) throws YarnClientException { if (app == null) { throw new IllegalStateException("call createAppMaster( ) first"); } ApplicationSubmissionContext appContext; try { appContext = spec.createAppLaunchContext(conf, app); } catch (IOException e) { throw new YarnClientException("Create app launch context failed", e); } // Submit application try { yarnClient.submitApplication(appContext); } catch (YarnException | IOException e) { throw new YarnClientException("Submit application failed", e); } } public ApplicationId getAppId() { return appId; } public ApplicationReport getAppReport() throws YarnClientException { try { return yarnClient.getApplicationReport(appId); } catch (YarnException | IOException e) { throw new YarnClientException("Get application report failed", e); } } /** * Waits for the application to start. This version is somewhat informal, the * intended use is when debugging unmanaged applications. * * @throws YarnClientException */ public ApplicationAttemptId waitForStart() throws YarnClientException { ApplicationReport appReport; YarnApplicationState appState; ApplicationAttemptId attemptId; while (true) { appReport = getAppReport(); appState = appReport.getYarnApplicationState(); attemptId = appReport.getCurrentApplicationAttemptId(); if (appState != YarnApplicationState.NEW && appState != YarnApplicationState.NEW_SAVING && appState != YarnApplicationState.SUBMITTED) { break; } System.out.println("App State: " + appState); try { Thread.sleep(1000); } catch (InterruptedException e) { // Should never occur. } } if (appState != YarnApplicationState.ACCEPTED) { throw new YarnClientException( "Application start failed with status " + appState); } return attemptId; } /** * Wait for the application to enter one of the completion states. This is an * informal implementation useful for testing. * * @throws YarnClientException */ public void waitForCompletion() throws YarnClientException { ApplicationReport appReport; YarnApplicationState appState; while (true) { appReport = getAppReport(); appState = appReport.getYarnApplicationState(); if (appState == YarnApplicationState.FINISHED || appState == YarnApplicationState.KILLED || appState == YarnApplicationState.FAILED) { break; } try { Thread.sleep(100); } catch (InterruptedException e) { // Should never occur. } } System.out.println("Application " + appId + " finished with" + " state " + appState + " at " + appReport.getFinishTime()); } public Token<AMRMTokenIdentifier> getAMRMToken() throws YarnClientException { try { return yarnClient.getAMRMToken(appId); } catch (YarnException | IOException e) { throw new YarnClientException("Get AM/RM token failed", e); } } /** * Return standard class path entries from the YARN application class path. */ public String[] getYarnAppClassPath() { return conf.getStrings(YarnConfiguration.YARN_APPLICATION_CLASSPATH, YarnConfiguration.DEFAULT_YARN_APPLICATION_CLASSPATH); } public void killApplication() throws YarnClientException { try { yarnClient.killApplication(appId); } catch (YarnException | IOException e) { throw new YarnClientException( "Kill failed for application: " + appId.toString()); } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2018_04_01.implementation; import retrofit2.Retrofit; import com.google.common.reflect.TypeToken; import com.microsoft.azure.AzureServiceFuture; import com.microsoft.azure.ListOperationCallback; import com.microsoft.azure.management.network.v2018_04_01.ErrorException; import com.microsoft.azure.Page; import com.microsoft.azure.PagedList; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; import com.microsoft.rest.ServiceResponse; import java.io.IOException; import java.util.List; import okhttp3.ResponseBody; import retrofit2.http.GET; import retrofit2.http.Header; import retrofit2.http.Headers; import retrofit2.http.Path; import retrofit2.http.Query; import retrofit2.http.Url; import retrofit2.Response; import rx.functions.Func1; import rx.Observable; /** * An instance of this class provides access to all the operations defined * in HubVirtualNetworkConnections. */ public class HubVirtualNetworkConnectionsInner { /** The Retrofit service to perform REST calls. */ private HubVirtualNetworkConnectionsService service; /** The service client containing this operation class. */ private NetworkManagementClientImpl client; /** * Initializes an instance of HubVirtualNetworkConnectionsInner. * * @param retrofit the Retrofit instance built from a Retrofit Builder. * @param client the instance of the service client containing this operation class. */ public HubVirtualNetworkConnectionsInner(Retrofit retrofit, NetworkManagementClientImpl client) { this.service = retrofit.create(HubVirtualNetworkConnectionsService.class); this.client = client; } /** * The interface defining all the services for HubVirtualNetworkConnections to be * used by Retrofit to perform actually REST calls. */ interface HubVirtualNetworkConnectionsService { @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2018_04_01.HubVirtualNetworkConnections get" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/hubVirtualNetworkConnections/{connectionName}") Observable<Response<ResponseBody>> get(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("virtualHubName") String virtualHubName, @Path("connectionName") String connectionName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2018_04_01.HubVirtualNetworkConnections list" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/hubVirtualNetworkConnections") Observable<Response<ResponseBody>> list(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("virtualHubName") String virtualHubName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.network.v2018_04_01.HubVirtualNetworkConnections listNext" }) @GET Observable<Response<ResponseBody>> listNext(@Url String nextUrl, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); } /** * Retrieves the details of a HubVirtualNetworkConnection. * * @param resourceGroupName The resource group name of the VirtualHub. * @param virtualHubName The name of the VirtualHub. * @param connectionName The name of the vpn connection. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ErrorException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the HubVirtualNetworkConnectionInner object if successful. */ public HubVirtualNetworkConnectionInner get(String resourceGroupName, String virtualHubName, String connectionName) { return getWithServiceResponseAsync(resourceGroupName, virtualHubName, connectionName).toBlocking().single().body(); } /** * Retrieves the details of a HubVirtualNetworkConnection. * * @param resourceGroupName The resource group name of the VirtualHub. * @param virtualHubName The name of the VirtualHub. * @param connectionName The name of the vpn connection. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<HubVirtualNetworkConnectionInner> getAsync(String resourceGroupName, String virtualHubName, String connectionName, final ServiceCallback<HubVirtualNetworkConnectionInner> serviceCallback) { return ServiceFuture.fromResponse(getWithServiceResponseAsync(resourceGroupName, virtualHubName, connectionName), serviceCallback); } /** * Retrieves the details of a HubVirtualNetworkConnection. * * @param resourceGroupName The resource group name of the VirtualHub. * @param virtualHubName The name of the VirtualHub. * @param connectionName The name of the vpn connection. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the HubVirtualNetworkConnectionInner object */ public Observable<HubVirtualNetworkConnectionInner> getAsync(String resourceGroupName, String virtualHubName, String connectionName) { return getWithServiceResponseAsync(resourceGroupName, virtualHubName, connectionName).map(new Func1<ServiceResponse<HubVirtualNetworkConnectionInner>, HubVirtualNetworkConnectionInner>() { @Override public HubVirtualNetworkConnectionInner call(ServiceResponse<HubVirtualNetworkConnectionInner> response) { return response.body(); } }); } /** * Retrieves the details of a HubVirtualNetworkConnection. * * @param resourceGroupName The resource group name of the VirtualHub. * @param virtualHubName The name of the VirtualHub. * @param connectionName The name of the vpn connection. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the HubVirtualNetworkConnectionInner object */ public Observable<ServiceResponse<HubVirtualNetworkConnectionInner>> getWithServiceResponseAsync(String resourceGroupName, String virtualHubName, String connectionName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (virtualHubName == null) { throw new IllegalArgumentException("Parameter virtualHubName is required and cannot be null."); } if (connectionName == null) { throw new IllegalArgumentException("Parameter connectionName is required and cannot be null."); } final String apiVersion = "2018-04-01"; return service.get(this.client.subscriptionId(), resourceGroupName, virtualHubName, connectionName, apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<HubVirtualNetworkConnectionInner>>>() { @Override public Observable<ServiceResponse<HubVirtualNetworkConnectionInner>> call(Response<ResponseBody> response) { try { ServiceResponse<HubVirtualNetworkConnectionInner> clientResponse = getDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<HubVirtualNetworkConnectionInner> getDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<HubVirtualNetworkConnectionInner, ErrorException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<HubVirtualNetworkConnectionInner>() { }.getType()) .registerError(ErrorException.class) .build(response); } /** * Retrieves the details of all HubVirtualNetworkConnections. * * @param resourceGroupName The resource group name of the VirtualHub. * @param virtualHubName The name of the VirtualHub. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ErrorException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;HubVirtualNetworkConnectionInner&gt; object if successful. */ public PagedList<HubVirtualNetworkConnectionInner> list(final String resourceGroupName, final String virtualHubName) { ServiceResponse<Page<HubVirtualNetworkConnectionInner>> response = listSinglePageAsync(resourceGroupName, virtualHubName).toBlocking().single(); return new PagedList<HubVirtualNetworkConnectionInner>(response.body()) { @Override public Page<HubVirtualNetworkConnectionInner> nextPage(String nextPageLink) { return listNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Retrieves the details of all HubVirtualNetworkConnections. * * @param resourceGroupName The resource group name of the VirtualHub. * @param virtualHubName The name of the VirtualHub. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<HubVirtualNetworkConnectionInner>> listAsync(final String resourceGroupName, final String virtualHubName, final ListOperationCallback<HubVirtualNetworkConnectionInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listSinglePageAsync(resourceGroupName, virtualHubName), new Func1<String, Observable<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>>>() { @Override public Observable<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>> call(String nextPageLink) { return listNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Retrieves the details of all HubVirtualNetworkConnections. * * @param resourceGroupName The resource group name of the VirtualHub. * @param virtualHubName The name of the VirtualHub. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;HubVirtualNetworkConnectionInner&gt; object */ public Observable<Page<HubVirtualNetworkConnectionInner>> listAsync(final String resourceGroupName, final String virtualHubName) { return listWithServiceResponseAsync(resourceGroupName, virtualHubName) .map(new Func1<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>, Page<HubVirtualNetworkConnectionInner>>() { @Override public Page<HubVirtualNetworkConnectionInner> call(ServiceResponse<Page<HubVirtualNetworkConnectionInner>> response) { return response.body(); } }); } /** * Retrieves the details of all HubVirtualNetworkConnections. * * @param resourceGroupName The resource group name of the VirtualHub. * @param virtualHubName The name of the VirtualHub. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;HubVirtualNetworkConnectionInner&gt; object */ public Observable<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>> listWithServiceResponseAsync(final String resourceGroupName, final String virtualHubName) { return listSinglePageAsync(resourceGroupName, virtualHubName) .concatMap(new Func1<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>, Observable<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>>>() { @Override public Observable<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>> call(ServiceResponse<Page<HubVirtualNetworkConnectionInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Retrieves the details of all HubVirtualNetworkConnections. * ServiceResponse<PageImpl<HubVirtualNetworkConnectionInner>> * @param resourceGroupName The resource group name of the VirtualHub. ServiceResponse<PageImpl<HubVirtualNetworkConnectionInner>> * @param virtualHubName The name of the VirtualHub. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;HubVirtualNetworkConnectionInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>> listSinglePageAsync(final String resourceGroupName, final String virtualHubName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (virtualHubName == null) { throw new IllegalArgumentException("Parameter virtualHubName is required and cannot be null."); } final String apiVersion = "2018-04-01"; return service.list(this.client.subscriptionId(), resourceGroupName, virtualHubName, apiVersion, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>>>() { @Override public Observable<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<HubVirtualNetworkConnectionInner>> result = listDelegate(response); return Observable.just(new ServiceResponse<Page<HubVirtualNetworkConnectionInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<HubVirtualNetworkConnectionInner>> listDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<HubVirtualNetworkConnectionInner>, ErrorException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<HubVirtualNetworkConnectionInner>>() { }.getType()) .registerError(ErrorException.class) .build(response); } /** * Retrieves the details of all HubVirtualNetworkConnections. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws ErrorException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the PagedList&lt;HubVirtualNetworkConnectionInner&gt; object if successful. */ public PagedList<HubVirtualNetworkConnectionInner> listNext(final String nextPageLink) { ServiceResponse<Page<HubVirtualNetworkConnectionInner>> response = listNextSinglePageAsync(nextPageLink).toBlocking().single(); return new PagedList<HubVirtualNetworkConnectionInner>(response.body()) { @Override public Page<HubVirtualNetworkConnectionInner> nextPage(String nextPageLink) { return listNextSinglePageAsync(nextPageLink).toBlocking().single().body(); } }; } /** * Retrieves the details of all HubVirtualNetworkConnections. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @param serviceFuture the ServiceFuture object tracking the Retrofit calls * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<HubVirtualNetworkConnectionInner>> listNextAsync(final String nextPageLink, final ServiceFuture<List<HubVirtualNetworkConnectionInner>> serviceFuture, final ListOperationCallback<HubVirtualNetworkConnectionInner> serviceCallback) { return AzureServiceFuture.fromPageResponse( listNextSinglePageAsync(nextPageLink), new Func1<String, Observable<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>>>() { @Override public Observable<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>> call(String nextPageLink) { return listNextSinglePageAsync(nextPageLink); } }, serviceCallback); } /** * Retrieves the details of all HubVirtualNetworkConnections. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;HubVirtualNetworkConnectionInner&gt; object */ public Observable<Page<HubVirtualNetworkConnectionInner>> listNextAsync(final String nextPageLink) { return listNextWithServiceResponseAsync(nextPageLink) .map(new Func1<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>, Page<HubVirtualNetworkConnectionInner>>() { @Override public Page<HubVirtualNetworkConnectionInner> call(ServiceResponse<Page<HubVirtualNetworkConnectionInner>> response) { return response.body(); } }); } /** * Retrieves the details of all HubVirtualNetworkConnections. * * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the PagedList&lt;HubVirtualNetworkConnectionInner&gt; object */ public Observable<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>> listNextWithServiceResponseAsync(final String nextPageLink) { return listNextSinglePageAsync(nextPageLink) .concatMap(new Func1<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>, Observable<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>>>() { @Override public Observable<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>> call(ServiceResponse<Page<HubVirtualNetworkConnectionInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listNextWithServiceResponseAsync(nextPageLink)); } }); } /** * Retrieves the details of all HubVirtualNetworkConnections. * ServiceResponse<PageImpl<HubVirtualNetworkConnectionInner>> * @param nextPageLink The NextLink from the previous successful call to List operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the PagedList&lt;HubVirtualNetworkConnectionInner&gt; object wrapped in {@link ServiceResponse} if successful. */ public Observable<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>> listNextSinglePageAsync(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null."); } String nextUrl = String.format("%s", nextPageLink); return service.listNext(nextUrl, this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>>>() { @Override public Observable<ServiceResponse<Page<HubVirtualNetworkConnectionInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<HubVirtualNetworkConnectionInner>> result = listNextDelegate(response); return Observable.just(new ServiceResponse<Page<HubVirtualNetworkConnectionInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<HubVirtualNetworkConnectionInner>> listNextDelegate(Response<ResponseBody> response) throws ErrorException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<HubVirtualNetworkConnectionInner>, ErrorException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<HubVirtualNetworkConnectionInner>>() { }.getType()) .registerError(ErrorException.class) .build(response); } }
/* * Copyright 2016-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.rules; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import com.facebook.buck.event.ActionGraphEvent; import com.facebook.buck.event.BuckEvent; import com.facebook.buck.event.BuckEventBus; import com.facebook.buck.event.BuckEventBusFactory; import com.facebook.buck.event.listener.BroadcastEventListener; import com.facebook.buck.jvm.java.JavaLibraryBuilder; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargetFactory; import com.facebook.buck.rules.keys.ContentAgnosticRuleKeyFactory; import com.facebook.buck.rules.keys.RuleKeyFieldLoader; import com.facebook.buck.testutil.TargetGraphFactory; import com.facebook.buck.testutil.WatchEventsForTests; import com.facebook.buck.testutil.integration.TemporaryPaths; import com.facebook.buck.timing.IncrementingFakeClock; import com.facebook.buck.util.WatchmanWatcher; import com.google.common.collect.ImmutableSet; import com.google.common.eventbus.Subscribe; import org.hamcrest.Matchers; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.io.IOException; import java.nio.file.Path; import java.nio.file.StandardWatchEventKinds; import java.util.HashMap; import java.util.Map; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; public class ActionGraphCacheTest { private static final boolean CHECK_GRAPHS = true; private static final boolean NOT_CHECK_GRAPHS = false; private TargetNode<?, ?> nodeA; private TargetNode<?, ?> nodeB; private TargetGraph targetGraph; private BuckEventBus eventBus; private BroadcastEventListener broadcastEventListener; private BlockingQueue<BuckEvent> trackedEvents = new LinkedBlockingQueue<>(); private final int keySeed = 0; @Rule public ExpectedException expectedException = ExpectedException.none(); @Rule public TemporaryPaths tmpFilePath = new TemporaryPaths(); @Before public void setUp() { // Creates the following target graph: // A // / // B nodeB = createTargetNode("B"); nodeA = createTargetNode("A", nodeB); targetGraph = TargetGraphFactory.newInstance(nodeA, nodeB); eventBus = BuckEventBusFactory.newInstance( new IncrementingFakeClock(TimeUnit.SECONDS.toNanos(1))); broadcastEventListener = new BroadcastEventListener(); broadcastEventListener.addEventBus(eventBus); eventBus.register(new Object() { @Subscribe public void actionGraphCacheEvent(ActionGraphEvent.Cache event) { trackedEvents.add(event); } }); } @Test public void hitOnCache() throws InterruptedException { ActionGraphCache cache = new ActionGraphCache(broadcastEventListener); ActionGraphAndResolver resultRun1 = cache.getActionGraph( eventBus, CHECK_GRAPHS, /* skipActionGraphCache */ false, targetGraph, keySeed); // The 1st time you query the ActionGraph it's a cache miss. assertEquals(countEventsOf(ActionGraphEvent.Cache.Hit.class), 0); assertEquals(countEventsOf(ActionGraphEvent.Cache.Miss.class), 1); ActionGraphAndResolver resultRun2 = cache.getActionGraph( eventBus, CHECK_GRAPHS, /* skipActionGraphCache */ false, targetGraph, keySeed); // The 2nd time it should be a cache hit and the ActionGraphs should be exactly the same. assertEquals(countEventsOf(ActionGraphEvent.Cache.Hit.class), 1); assertEquals(countEventsOf(ActionGraphEvent.Cache.Miss.class), 1); // Check all the RuleKeys are the same between the 2 ActionGraphs. Map<BuildRule, RuleKey> resultRun1RuleKeys = getRuleKeysFromBuildRules( resultRun1.getActionGraph().getNodes(), resultRun1.getResolver()); Map<BuildRule, RuleKey> resultRun2RuleKeys = getRuleKeysFromBuildRules( resultRun2.getActionGraph().getNodes(), resultRun2.getResolver()); assertThat(resultRun1RuleKeys, Matchers.equalTo(resultRun2RuleKeys)); } @Test public void missOnCache() { ActionGraphCache cache = new ActionGraphCache(broadcastEventListener); ActionGraphAndResolver resultRun1 = cache.getActionGraph( eventBus, CHECK_GRAPHS, /* skipActionGraphCache */ false, targetGraph, keySeed); // Each time you call it for a different TargetGraph so all calls should be misses. assertEquals(countEventsOf(ActionGraphEvent.Cache.Hit.class), 0); assertEquals(countEventsOf(ActionGraphEvent.Cache.Miss.class), 1); ActionGraphAndResolver resultRun2 = cache.getActionGraph( eventBus, CHECK_GRAPHS, /* skipActionGraphCache */ false, targetGraph.getSubgraph(ImmutableSet.of(nodeB)), keySeed); assertEquals(countEventsOf(ActionGraphEvent.Cache.Hit.class), 0); assertEquals(countEventsOf(ActionGraphEvent.Cache.Miss.class), 2); ActionGraphAndResolver resultRun3 = cache.getActionGraph( eventBus, CHECK_GRAPHS, /* skipActionGraphCache */ false, targetGraph, keySeed); assertEquals(countEventsOf(ActionGraphEvent.Cache.Hit.class), 0); assertEquals(countEventsOf(ActionGraphEvent.Cache.Miss.class), 3); // Run1 and Run2 should not match, but Run1 and Run3 should Map<BuildRule, RuleKey> resultRun1RuleKeys = getRuleKeysFromBuildRules( resultRun1.getActionGraph().getNodes(), resultRun1.getResolver()); Map<BuildRule, RuleKey> resultRun2RuleKeys = getRuleKeysFromBuildRules( resultRun2.getActionGraph().getNodes(), resultRun2.getResolver()); Map<BuildRule, RuleKey> resultRun3RuleKeys = getRuleKeysFromBuildRules( resultRun3.getActionGraph().getNodes(), resultRun3.getResolver()); // Run2 is done in a subgraph and it should not have the same ActionGraph. assertThat(resultRun1RuleKeys, Matchers.not(Matchers.equalTo(resultRun2RuleKeys))); // Run1 and Run3 should match. assertThat(resultRun1RuleKeys, Matchers.equalTo(resultRun3RuleKeys)); } @Test public void missWithTargetGraphHashMatch() { ActionGraphCache cache = new ActionGraphCache(broadcastEventListener); cache.getActionGraph( eventBus, CHECK_GRAPHS, /* skipActionGraphCache */ false, targetGraph, keySeed); assertEquals(1, countEventsOf(ActionGraphEvent.Cache.Miss.class)); cache.getActionGraph( eventBus, CHECK_GRAPHS, /* skipActionGraphCache */ false, TargetGraphFactory.newInstance(nodeA, createTargetNode("B")), keySeed); assertEquals(1, countEventsOf(ActionGraphEvent.Cache.MissWithTargetGraphHashMatch.class)); assertEquals(2, countEventsOf(ActionGraphEvent.Cache.Miss.class)); } // If this breaks it probably means the ActionGraphCache checking also breaks. @Test public void compareActionGraphsBasedOnRuleKeys() { ActionGraphAndResolver resultRun1 = ActionGraphCache.getFreshActionGraph( eventBus, new DefaultTargetNodeToBuildRuleTransformer(), targetGraph); ActionGraphAndResolver resultRun2 = ActionGraphCache.getFreshActionGraph( eventBus, new DefaultTargetNodeToBuildRuleTransformer(), targetGraph); // Check all the RuleKeys are the same between the 2 ActionGraphs. Map<BuildRule, RuleKey> resultRun1RuleKeys = getRuleKeysFromBuildRules( resultRun1.getActionGraph().getNodes(), resultRun1.getResolver()); Map<BuildRule, RuleKey> resultRun2RuleKeys = getRuleKeysFromBuildRules( resultRun2.getActionGraph().getNodes(), resultRun2.getResolver()); assertThat(resultRun1RuleKeys, Matchers.equalTo(resultRun2RuleKeys)); } @Test public void cacheInvalidationBasedOnEvents() throws IOException, InterruptedException { ActionGraphCache cache = new ActionGraphCache(broadcastEventListener); Path file = tmpFilePath.newFile("foo.txt"); // Fill the cache. An overflow event should invalidate the cache. cache.getActionGraph( eventBus, NOT_CHECK_GRAPHS, /* skipActionGraphCache */ false, targetGraph, keySeed); assertFalse(cache.isCacheEmpty()); cache.invalidateBasedOn(WatchmanWatcher.createOverflowEvent("testing")); assertTrue(cache.isCacheEmpty()); // Fill the cache. Add a file and ActionGraphCache should be invalidated. cache.getActionGraph( eventBus, NOT_CHECK_GRAPHS, /* skipActionGraphCache */ false, targetGraph, keySeed); assertFalse(cache.isCacheEmpty()); cache.invalidateBasedOn( WatchEventsForTests.createPathEvent(file, StandardWatchEventKinds.ENTRY_CREATE)); assertTrue(cache.isCacheEmpty()); //Re-fill cache. Remove a file and ActionGraphCache should be invalidated. cache.getActionGraph( eventBus, NOT_CHECK_GRAPHS, /* skipActionGraphCache */ false, targetGraph, keySeed); assertFalse(cache.isCacheEmpty()); cache.invalidateBasedOn( WatchEventsForTests.createPathEvent(file, StandardWatchEventKinds.ENTRY_DELETE)); assertTrue(cache.isCacheEmpty()); // Re-fill cache. Modify contents of a file, ActionGraphCache should NOT be invalidated. cache.getActionGraph( eventBus, CHECK_GRAPHS, /* skipActionGraphCache */ false, targetGraph, keySeed); assertFalse(cache.isCacheEmpty()); cache.invalidateBasedOn( WatchEventsForTests.createPathEvent(file, StandardWatchEventKinds.ENTRY_MODIFY)); cache.getActionGraph( eventBus, NOT_CHECK_GRAPHS, /* skipActionGraphCache */ false, targetGraph, keySeed); assertFalse(cache.isCacheEmpty()); // We should have 4 cache misses and 1 hit from when you request the same graph after a file // modification. assertEquals(countEventsOf(ActionGraphEvent.Cache.Hit.class), 1); assertEquals(countEventsOf(ActionGraphEvent.Cache.Miss.class), 4); } private TargetNode<?, ?> createTargetNode(String name, TargetNode<?, ?>... deps) { BuildTarget buildTarget = BuildTargetFactory.newInstance("//foo:" + name); JavaLibraryBuilder targetNodeBuilder = JavaLibraryBuilder.createBuilder(buildTarget); for (TargetNode<?, ?> dep : deps) { targetNodeBuilder.addDep(dep.getBuildTarget()); } return targetNodeBuilder.build(); } private int countEventsOf(Class<? extends ActionGraphEvent> trackedClass) { int i = 0; for (BuckEvent event : trackedEvents) { if (trackedClass.isInstance(event)) { i++; } } return i; } private Map<BuildRule, RuleKey> getRuleKeysFromBuildRules( Iterable<BuildRule> buildRules, BuildRuleResolver buildRuleResolver) { RuleKeyFieldLoader ruleKeyFieldLoader = new RuleKeyFieldLoader(0); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(buildRuleResolver); SourcePathResolver pathResolver = new SourcePathResolver(ruleFinder); ContentAgnosticRuleKeyFactory factory = new ContentAgnosticRuleKeyFactory(ruleKeyFieldLoader, pathResolver, ruleFinder); HashMap<BuildRule, RuleKey> ruleKeysMap = new HashMap<>(); for (BuildRule rule : buildRules) { ruleKeysMap.put(rule, factory.build(rule)); } return ruleKeysMap; } }
package edu.bu.android.hiddendata; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.FileSystems; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.io.PatternFilenameFilter; import edu.bu.android.hiddendata.model.JsonUtils; import edu.bu.android.hiddendata.model.Results; public class BatchResultReporter { private static final Logger logger = LoggerFactory.getLogger("Result"); //private final File resultsDir; private List<File> resultDirs = new ArrayList<File>(); public BatchResultReporter(){ } /** * We want to only look at full paths from the file, ie the file we use to feed into the analysis * @param file */ public void loadFromFile(String file){ Path path = FileSystems.getDefault().getPath(file); try { List<String> apks = Files.readAllLines(path, Charset.defaultCharset()); for (String apk : apks){ resultDirs.add(new File(apk)); } } catch (IOException e) { e.printStackTrace(); } } public void loadAll(String resultDirFilePath){ File resultsDir = new File(resultDirFilePath); String[] dirFiles = resultsDir.list(); for (String dir : dirFiles){ resultDirs.add(new File(resultsDir, dir)); } } /** * Results found with json results file * @param display * @return */ public List<File> getFinishedWithResult(boolean display){ List<File> resultFiles = getResultFiles(); logger.info("{} results.json file found", resultFiles.size()); if (display){ processResults(resultFiles); } return resultFiles; } private List<File> getResultFiles(){ List<File> resultFiles = new ArrayList<File>(); for (File apk : resultDirs){ String [] resultFile = apk.list(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return (name.endsWith(FindHidden.RESULTS_SUFFIX)); } }); if (resultFile.length == 1){ resultFiles.add(new File(apk, resultFile[0])); } } return resultFiles; } public void listFinished(){ List<File> resultFiles = getFilesByRegex(FindHidden.RESULTS_SUFFIX + "$"); display(resultFiles, true); } /** * Finished, may be have results, may not. But did not crash * @return */ public List<File> getFinished(boolean display){ List<File> resultFiles = new ArrayList<File>(); for (File apkDirFile : resultDirs){ String [] resultFile = apkDirFile.list(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return (name.equals(FindHidden.FLAG_DONE)); } }); if (resultFile.length == 1){ resultFiles.add(apkDirFile); } } if (display){ logger.info("{} finished", resultFiles.size()); } return resultFiles; } /** * Get ones that looked like they crashed because they done have the correct flags. Also want ot make sure * it didnt run out of memory * @param display */ public void getCrashed(boolean display){ //String resultsDir = ""; List<File> crashedFiles = new ArrayList<File>(); for (File apkDirFile : resultDirs){ String [] doneFlag = apkDirFile.list(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return (name.equals(FindHidden.FLAG_DONE)); } }); String [] oom = apkDirFile.list(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.startsWith("java_error") && name.endsWith("log"); } }); if (doneFlag.length == 0 && oom.length == 0){ // no flags so we crashed in the beginnging for some reason crashedFiles.add(apkDirFile); } } logger.info("{} probably crashes.", crashedFiles.size()); if (display){ display(crashedFiles, false); } } /** * Out of memory if java_error files exists and the model2ui.flag does NOT exist. This is becuase it * may have evntually finished on another run * @param display */ public void getOutOfMemory(boolean display){ List<File> files = getFiles("java_error", "log"); List<File> finished = getFinished(false); Iterator<File> it = files.iterator(); while (it.hasNext()){ File f = it.next(); if (finished.contains(f)){ it.remove(); } } logger.info("{} ran out of memory", files.size()); } /** * The APKs that were able to successfully finsh the first pass and have a model found */ public void getFoundNetworkToModelFlows(boolean display){ List<File> files = getFiles(FindHidden.FLAG_MODEL, ""); logger.info("{} APKS have network to model flows", files.size()); if (display){ display(files, false); } } /** * Get files in the results directory that match * @param filePrefix * @param fileSuffix * @return */ private List<File> getFiles(final String filePrefix, final String fileSuffix){ //String resultsDir = ""; List<File> files = new ArrayList<File>(); for (File apk : resultDirs){ String [] resultFile = apk.list(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.startsWith(filePrefix) && name.endsWith(fileSuffix); } }); if (resultFile.length > 0){ // no flags so we crashed in the beginnging for some reason files.add(apk); } } return files; } private List<File> getFilesByRegex(String regex){ final Pattern p = Pattern.compile(regex); //String resultsDir = ""; List<File> files = new ArrayList<File>(); for (File apk : resultDirs){ String [] resultFile = apk.list(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return p.matcher(name).find(); } }); if (resultFile.length > 0){ // no flags so we crashed in the beginnging for some reason files.add(apk); } } return files; } private void processResults(List<File> results){ for (File f : results){ Results result = new JsonUtils<Results>().load(f, Results.class); displayResult(result); } } private void display(List<File> files, boolean justName){ for (File f : files){ if (justName){ System.out.println(f.getName()); } else { System.out.println(f.getAbsolutePath()); } } } private void displayResult(Results result){ logger.info(result.getApkName()); Iterator<String> it = result.getGetMethodsInApp().keySet().iterator(); while (it.hasNext()){ String key = it.next(); int count = result.getGetMethodsInApp().get(key); if (!result.getUsedConfidenceHigh().contains(key) && !result.getUsedConfidenceLow().contains(key)){ //result.getGetMethodsInApp().remove(key); logger.info(key + " -> " + count); } } } /** * From the results see which are candidates based on a list of keywords * @param keywordFile */ public void getCandidates(String keywordFile, boolean display){ List<String> keywords = loadKeywords(keywordFile); List<File> results = getResultFiles(); int count = 0; for (File f : results){ boolean isCandidate = false; Results result = new JsonUtils<Results>().load(f, Results.class); Map<String, Integer> methods = result.getGetMethodsInApp(); Iterator<String> it = methods.keySet().iterator(); while (it.hasNext()){ String key = it.next(); if (result.getUsedConfidenceHigh().contains(key)){ continue; } if (result.getUsedConfidenceLow().contains(key)){ continue; } if (isKeyword(keywords, key)){ isCandidate = true; } } if (isCandidate){ if (display){ System.out.println(result.getApkName()); } count ++; } } logger.info("{} Candidates found.", count); } private boolean isKeyword(List<String> list, String name){ for (String l : list){ if (name.toLowerCase().contains(l)){ return true; } } return false; } private List<String> loadKeywords(String file){ Path path = FileSystems.getDefault().getPath(file); try { List<String> keywords = Files.readAllLines(path, Charset.defaultCharset()); return keywords; } catch (IOException e) { e.printStackTrace(); } return null; } public static void main(String[] args){ CommandLineParser parser = new DefaultParser(); Options options = new Options(); options.addOption("r", "results", true, "Path to results directory"); options.addOption("d", "display", true, "Display status of apks in directory"); options.addOption("s", "Number of APKs with results"); options.addOption("S", "Detail list of models for APKs with results"); options.addOption("f", "List of all apk files with results"); options.addOption("l", "List runs that crashed"); options.addOption("L", "Detail list displaying all apks that crashed"); options.addOption("m", "Out of memory"); options.addOption("M", "Out of memory, list those files"); options.addOption("n", "Network to model flows found"); options.addOption("N", "List of APKs where network to model flows found"); options.addOption("c", "candidates", true, "List number candidates"); options.addOption("C", "candidateslist", true, "List candidates"); options.addOption("t", "Total number of APKs processed"); try { CommandLine line = parser.parse( options, args ); BatchResultReporter report = new BatchResultReporter(); if (line.hasOption("display")){ report.loadFromFile(line.getOptionValue("display")); } else { report.loadAll(line.getOptionValue("results")); } if (line.hasOption("n")){ report.getFoundNetworkToModelFlows(false); } else if (line.hasOption("N")){ report.getFoundNetworkToModelFlows(true); } if (line.hasOption("l")){ report.getCrashed(false); } else if (line.hasOption("L")){ report.getCrashed(true); } if (line.hasOption("t")){ report.getFinished(true); } if (line.hasOption("s")){ report.getFinishedWithResult(false); } else if (line.hasOption("S")){ report.getFinishedWithResult(true); } else if (line.hasOption("f")){ report.listFinished(); } if (line.hasOption("m")){ report.getOutOfMemory(false); } else if (line.hasOption("M")){ report.getOutOfMemory(true); } if (line.hasOption("c")){ report.getCandidates(line.getOptionValue("c"), false); } else if (line.hasOption("C")){ report.getCandidates(line.getOptionValue("C"), true); } } catch (ParseException e) { e.printStackTrace(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.as2.api; import java.io.IOException; import java.security.KeyPair; import java.security.KeyPairGenerator; import java.security.SecureRandom; import java.security.Security; import java.security.cert.Certificate; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.List; import org.apache.camel.component.as2.api.entity.ApplicationEDIEntity; import org.apache.camel.component.as2.api.entity.ApplicationEDIFACTEntity; import org.apache.camel.component.as2.api.entity.ApplicationPkcs7SignatureEntity; import org.apache.camel.component.as2.api.entity.MultipartSignedEntity; import org.apache.http.HttpEntity; import org.apache.http.HttpException; import org.apache.http.HttpRequest; import org.apache.http.HttpResponse; import org.apache.http.HttpVersion; import org.apache.http.entity.ContentType; import org.apache.http.message.BasicHttpEntityEnclosingRequest; import org.apache.http.protocol.HttpContext; import org.apache.http.protocol.HttpCoreContext; import org.apache.http.protocol.HttpRequestHandler; import org.bouncycastle.asn1.ASN1EncodableVector; import org.bouncycastle.asn1.cms.AttributeTable; import org.bouncycastle.asn1.cms.IssuerAndSerialNumber; import org.bouncycastle.asn1.smime.SMIMECapabilitiesAttribute; import org.bouncycastle.asn1.smime.SMIMECapability; import org.bouncycastle.asn1.smime.SMIMECapabilityVector; import org.bouncycastle.asn1.smime.SMIMEEncryptionKeyPreferenceAttribute; import org.bouncycastle.asn1.x500.X500Name; import org.bouncycastle.cert.jcajce.JcaCertStore; import org.bouncycastle.cms.jcajce.JcaSimpleSignerInfoGeneratorBuilder; import org.bouncycastle.jce.provider.BouncyCastleProvider; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; public class AS2MessageTest { public static final String EDI_MESSAGE = "UNB+UNOA:1+005435656:1+006415160:1+060515:1434+00000000000778'\n" + "UNH+00000000000117+INVOIC:D:97B:UN'\n" + "BGM+380+342459+9'\n" + "DTM+3:20060515:102'\n" + "RFF+ON:521052'\n" + "NAD+BY+792820524::16++CUMMINS MID-RANGE ENGINE PLANT'\n" + "NAD+SE+005435656::16++GENERAL WIDGET COMPANY'\n" + "CUX+1:USD'\n" + "LIN+1++157870:IN'\n" + "IMD+F++:::WIDGET'\n" + "QTY+47:1020:EA'\n" + "ALI+US'\n" + "MOA+203:1202.58'\n" + "PRI+INV:1.179'\n" + "LIN+2++157871:IN'\n" + "IMD+F++:::DIFFERENT WIDGET'\n" + "QTY+47:20:EA'\n" + "ALI+JP'\n" + "MOA+203:410'\n" + "PRI+INV:20.5'\n" + "UNS+S'\n" + "MOA+39:2137.58'\n" + "ALC+C+ABG'\n" + "MOA+8:525'\n" + "UNT+23+00000000000117'\n" + "UNZ+1+00000000000778'"; @SuppressWarnings("unused") private static final Logger LOG = LoggerFactory.getLogger(AS2MessageTest.class); private static final String METHOD = "POST"; private static final String TARGET_HOST = "localhost"; private static final int TARGET_PORT = 8080; private static final String AS2_VERSION = "1.1"; private static final String USER_AGENT = "Camel AS2 Endpoint"; private static final String REQUEST_URI = "/"; private static final String AS2_NAME = "878051556"; private static final String SUBJECT = "Test Case"; private static final String FROM = "mrAS@example.org"; private static final String CLIENT_FQDN = "client.example.org"; private static final String SERVER_FQDN = "server.example.org"; private static final String DISPOSITION_NOTIFICATION_TO = "mrAS@example.org"; private static final String[] SIGNED_RECEIPT_MIC_ALGORITHMS = new String[] {"sha1", "md5"}; private static AS2ServerConnection testServer; private AS2SignedDataGenerator gen; private KeyPair issueKP; private X509Certificate issueCert; private KeyPair signingKP; private X509Certificate signingCert; private List<X509Certificate> certList; private void setupKeysAndCertificates() throws Exception { // // set up our certificates // KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA", "BC"); kpg.initialize(1024, new SecureRandom()); String issueDN = "O=Punkhorn Software, C=US"; issueKP = kpg.generateKeyPair(); issueCert = Utils.makeCertificate( issueKP, issueDN, issueKP, issueDN); // // certificate we sign against // String signingDN = "CN=William J. Collins, E=punkhornsw@gmail.com, O=Punkhorn Software, C=US"; signingKP = kpg.generateKeyPair(); signingCert = Utils.makeCertificate( signingKP, signingDN, issueKP, issueDN); certList = new ArrayList<X509Certificate>(); certList.add(signingCert); certList.add(issueCert); } @BeforeClass public static void setUpOnce() throws Exception { Security.addProvider(new BouncyCastleProvider()); // // set up our certificates // KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA", "BC"); kpg.initialize(1024, new SecureRandom()); String issueDN = "O=Punkhorn Software, C=US"; KeyPair issueKP = kpg.generateKeyPair(); X509Certificate issueCert = Utils.makeCertificate( issueKP, issueDN, issueKP, issueDN); // // certificate we sign against // String signingDN = "CN=William J. Collins, E=punkhornsw@gmail.com, O=Punkhorn Software, C=US"; KeyPair signingKP = kpg.generateKeyPair(); X509Certificate signingCert = Utils.makeCertificate( signingKP, signingDN, issueKP, issueDN); List<X509Certificate> certList = new ArrayList<X509Certificate>(); certList.add(signingCert); certList.add(issueCert); testServer = new AS2ServerConnection(AS2_VERSION, "MyServer-HTTP/1.1", SERVER_FQDN, 8080, certList.toArray(new Certificate[0]), signingKP.getPrivate()); testServer.listen("*", new HttpRequestHandler() { @Override public void handle(HttpRequest request, HttpResponse response, HttpContext context) throws HttpException, IOException { try { org.apache.camel.component.as2.api.entity.EntityParser.parseAS2MessageEntity(request); context.setAttribute(SUBJECT, SUBJECT); context.setAttribute(FROM, AS2_NAME); } catch (Exception e) { throw new HttpException("Failed to parse AS2 Message Entity", e); } } }); } @AfterClass public static void tearDownOnce() throws Exception { testServer.close(); } @Before public void setUp() throws Exception { Security.addProvider(new BouncyCastleProvider()); setupKeysAndCertificates(); // Create and populate certificate store. JcaCertStore certs = new JcaCertStore(certList); // Create capabilities vector SMIMECapabilityVector capabilities = new SMIMECapabilityVector(); capabilities.addCapability(SMIMECapability.dES_EDE3_CBC); capabilities.addCapability(SMIMECapability.rC2_CBC, 128); capabilities.addCapability(SMIMECapability.dES_CBC); // Create signing attributes ASN1EncodableVector attributes = new ASN1EncodableVector(); attributes.add(new SMIMEEncryptionKeyPreferenceAttribute(new IssuerAndSerialNumber(new X500Name(signingCert.getIssuerDN().getName()), signingCert.getSerialNumber()))); attributes.add(new SMIMECapabilitiesAttribute(capabilities)); for (String signingAlgorithmName : AS2SignedDataGenerator .getSupportedSignatureAlgorithmNamesForKey(signingKP.getPrivate())) { try { this.gen = new AS2SignedDataGenerator(); this.gen.addSignerInfoGenerator(new JcaSimpleSignerInfoGeneratorBuilder().setProvider("BC") .setSignedAttributeGenerator(new AttributeTable(attributes)) .build(signingAlgorithmName, signingKP.getPrivate(), signingCert)); this.gen.addCertificates(certs); break; } catch (Exception e) { this.gen = null; continue; } } if (this.gen == null) { throw new Exception("failed to create signing generator"); } } @Test public void plainEDIMessageTest() throws Exception { AS2ClientConnection clientConnection = new AS2ClientConnection(AS2_VERSION, USER_AGENT, CLIENT_FQDN, TARGET_HOST, TARGET_PORT); AS2ClientManager clientManager = new AS2ClientManager(clientConnection); HttpCoreContext httpContext = clientManager.send(EDI_MESSAGE, REQUEST_URI, SUBJECT, FROM, AS2_NAME, AS2_NAME, AS2MessageStructure.PLAIN, ContentType.create(AS2MediaType.APPLICATION_EDIFACT, AS2Charset.US_ASCII), null, null, null, DISPOSITION_NOTIFICATION_TO, SIGNED_RECEIPT_MIC_ALGORITHMS); HttpRequest request = httpContext.getRequest(); assertEquals("Unexpected method value", METHOD, request.getRequestLine().getMethod()); assertEquals("Unexpected request URI value", REQUEST_URI, request.getRequestLine().getUri()); assertEquals("Unexpected HTTP version value", HttpVersion.HTTP_1_1, request.getRequestLine().getProtocolVersion()); assertEquals("Unexpected subject value", SUBJECT, request.getFirstHeader(AS2Header.SUBJECT).getValue()); assertEquals("Unexpected from value", FROM, request.getFirstHeader(AS2Header.FROM).getValue()); assertEquals("Unexpected AS2 version value", AS2_VERSION, request.getFirstHeader(AS2Header.AS2_VERSION).getValue()); assertEquals("Unexpected AS2 from value", AS2_NAME, request.getFirstHeader(AS2Header.AS2_FROM).getValue()); assertEquals("Unexpected AS2 to value", AS2_NAME, request.getFirstHeader(AS2Header.AS2_TO).getValue()); assertTrue("Unexpected message id value", request.getFirstHeader(AS2Header.MESSAGE_ID).getValue().endsWith(CLIENT_FQDN + ">")); assertEquals("Unexpected target host value", TARGET_HOST + ":" + TARGET_PORT, request.getFirstHeader(AS2Header.TARGET_HOST).getValue()); assertEquals("Unexpected user agent value", USER_AGENT, request.getFirstHeader(AS2Header.USER_AGENT).getValue()); assertNotNull("Date value missing", request.getFirstHeader(AS2Header.DATE)); assertNotNull("Content length value missing", request.getFirstHeader(AS2Header.CONTENT_LENGTH)); assertTrue("Unexpected content type for message", request.getFirstHeader(AS2Header.CONTENT_TYPE).getValue().startsWith(AS2MediaType.APPLICATION_EDIFACT)); assertTrue("Request does not contain entity", request instanceof BasicHttpEntityEnclosingRequest); HttpEntity entity = ((BasicHttpEntityEnclosingRequest)request).getEntity(); assertNotNull("Request does not contain entity", entity); assertTrue("Unexpected request entity type", entity instanceof ApplicationEDIFACTEntity); ApplicationEDIFACTEntity ediEntity = (ApplicationEDIFACTEntity) entity; assertTrue("Unexpected content type for entity", ediEntity.getContentType().getValue().startsWith(AS2MediaType.APPLICATION_EDIFACT)); assertTrue("Entity not set as main body of request", ediEntity.isMainBody()); } @Test public void multipartSignedMessageTest() throws Exception { AS2ClientConnection clientConnection = new AS2ClientConnection(AS2_VERSION, USER_AGENT, CLIENT_FQDN, TARGET_HOST, TARGET_PORT); AS2ClientManager clientManager = new AS2ClientManager(clientConnection); HttpCoreContext httpContext = clientManager.send(EDI_MESSAGE, REQUEST_URI, SUBJECT, FROM, AS2_NAME, AS2_NAME, AS2MessageStructure.SIGNED, ContentType.create(AS2MediaType.APPLICATION_EDIFACT, AS2Charset.US_ASCII), null, certList.toArray(new Certificate[0]), signingKP.getPrivate(), DISPOSITION_NOTIFICATION_TO, SIGNED_RECEIPT_MIC_ALGORITHMS); HttpRequest request = httpContext.getRequest(); assertEquals("Unexpected method value", METHOD, request.getRequestLine().getMethod()); assertEquals("Unexpected request URI value", REQUEST_URI, request.getRequestLine().getUri()); assertEquals("Unexpected HTTP version value", HttpVersion.HTTP_1_1, request.getRequestLine().getProtocolVersion()); assertEquals("Unexpected subject value", SUBJECT, request.getFirstHeader(AS2Header.SUBJECT).getValue()); assertEquals("Unexpected from value", FROM, request.getFirstHeader(AS2Header.FROM).getValue()); assertEquals("Unexpected AS2 version value", AS2_VERSION, request.getFirstHeader(AS2Header.AS2_VERSION).getValue()); assertEquals("Unexpected AS2 from value", AS2_NAME, request.getFirstHeader(AS2Header.AS2_FROM).getValue()); assertEquals("Unexpected AS2 to value", AS2_NAME, request.getFirstHeader(AS2Header.AS2_TO).getValue()); assertTrue("Unexpected message id value", request.getFirstHeader(AS2Header.MESSAGE_ID).getValue().endsWith(CLIENT_FQDN + ">")); assertEquals("Unexpected target host value", TARGET_HOST + ":" + TARGET_PORT, request.getFirstHeader(AS2Header.TARGET_HOST).getValue()); assertEquals("Unexpected user agent value", USER_AGENT, request.getFirstHeader(AS2Header.USER_AGENT).getValue()); assertNotNull("Date value missing", request.getFirstHeader(AS2Header.DATE)); assertNotNull("Content length value missing", request.getFirstHeader(AS2Header.CONTENT_LENGTH)); assertTrue("Unexpected content type for message", request.getFirstHeader(AS2Header.CONTENT_TYPE).getValue().startsWith(AS2MediaType.MULTIPART_SIGNED)); assertTrue("Request does not contain entity", request instanceof BasicHttpEntityEnclosingRequest); HttpEntity entity = ((BasicHttpEntityEnclosingRequest)request).getEntity(); assertNotNull("Request does not contain entity", entity); assertTrue("Unexpected request entity type", entity instanceof MultipartSignedEntity); MultipartSignedEntity signedEntity = (MultipartSignedEntity)entity; assertTrue("Entity not set as main body of request", signedEntity.isMainBody()); assertTrue("Request contains invalid number of mime parts", signedEntity.getPartCount() == 2); // Validated first mime part. assertTrue("First mime part incorrect type ", signedEntity.getPart(0) instanceof ApplicationEDIFACTEntity); ApplicationEDIFACTEntity ediEntity = (ApplicationEDIFACTEntity) signedEntity.getPart(0); assertTrue("Unexpected content type for first mime part", ediEntity.getContentType().getValue().startsWith(AS2MediaType.APPLICATION_EDIFACT)); assertFalse("First mime type set as main body of request", ediEntity.isMainBody()); // Validate second mime part. assertTrue("Second mime part incorrect type ", signedEntity.getPart(1) instanceof ApplicationPkcs7SignatureEntity); ApplicationPkcs7SignatureEntity signatureEntity = (ApplicationPkcs7SignatureEntity) signedEntity.getPart(1); assertTrue("Unexpected content type for second mime part", signatureEntity.getContentType().getValue().startsWith(AS2MediaType.APPLICATION_PKCS7_SIGNATURE)); assertFalse("First mime type set as main body of request", signatureEntity.isMainBody()); } @Test public void signatureVerificationTest() throws Exception { AS2ClientConnection clientConnection = new AS2ClientConnection(AS2_VERSION, USER_AGENT, CLIENT_FQDN, TARGET_HOST, TARGET_PORT); AS2ClientManager clientManager = new AS2ClientManager(clientConnection); HttpCoreContext httpContext = clientManager.send(EDI_MESSAGE, REQUEST_URI, SUBJECT, FROM, AS2_NAME, AS2_NAME, AS2MessageStructure.SIGNED, ContentType.create(AS2MediaType.APPLICATION_EDIFACT, AS2Charset.US_ASCII), null, certList.toArray(new Certificate[0]), signingKP.getPrivate(), DISPOSITION_NOTIFICATION_TO, SIGNED_RECEIPT_MIC_ALGORITHMS); HttpRequest request = httpContext.getRequest(); assertTrue("Request does not contain entity", request instanceof BasicHttpEntityEnclosingRequest); HttpEntity entity = ((BasicHttpEntityEnclosingRequest)request).getEntity(); assertNotNull("Request does not contain entity", entity); assertTrue("Unexpected request entity type", entity instanceof MultipartSignedEntity); MultipartSignedEntity signedEntity = (MultipartSignedEntity)entity; ApplicationEDIEntity ediMessageEntity = signedEntity.getSignedDataEntity(); assertNotNull("Multipart signed entity does not contain EDI message entity", ediMessageEntity); ApplicationPkcs7SignatureEntity signatureEntity = signedEntity.getSignatureEntity(); assertNotNull("Multipart signed entity does not contain signature entity", signatureEntity); // Validate Signature assertTrue("Signature is invalid", signedEntity.isValid()); } @Test public void mdnMessageTest() throws Exception { AS2ClientConnection clientConnection = new AS2ClientConnection(AS2_VERSION, USER_AGENT, CLIENT_FQDN, TARGET_HOST, TARGET_PORT); AS2ClientManager clientManager = new AS2ClientManager(clientConnection); HttpCoreContext httpContext = clientManager.send(EDI_MESSAGE, REQUEST_URI, SUBJECT, FROM, AS2_NAME, AS2_NAME, AS2MessageStructure.PLAIN, ContentType.create(AS2MediaType.APPLICATION_EDIFACT, AS2Charset.US_ASCII), null, null, null, DISPOSITION_NOTIFICATION_TO, null); @SuppressWarnings("unused") HttpResponse response = httpContext.getResponse(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import javax.cache.Cache; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.util.typedef.P2; import org.apache.ignite.testframework.MvccFeatureChecker; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.apache.ignite.transactions.Transaction; import org.jetbrains.annotations.Nullable; import org.junit.Before; import org.junit.Test; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.cache.CacheRebalanceMode.SYNC; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; import static org.apache.ignite.transactions.TransactionConcurrency.OPTIMISTIC; import static org.apache.ignite.transactions.TransactionIsolation.REPEATABLE_READ; /** * Basic store test. */ public abstract class GridCacheBasicStoreAbstractTest extends GridCommonAbstractTest { /** Cache store. */ private static final GridCacheTestStore store = new GridCacheTestStore(); /** */ @Before public void beforeGridCacheBasicStoreAbstractTest() { MvccFeatureChecker.skipIfNotSupported(MvccFeatureChecker.Feature.CACHE_STORE); } /** * */ protected GridCacheBasicStoreAbstractTest() { super(true /*start grid. */); } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { store.resetTimestamp(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { jcache().clear(); store.reset(); } /** @return Caching mode. */ protected abstract CacheMode cacheMode(); /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { MvccFeatureChecker.skipIfNotSupported(MvccFeatureChecker.Feature.CACHE_STORE); IgniteConfiguration c = super.getConfiguration(igniteInstanceName); CacheConfiguration cc = defaultCacheConfiguration(); cc.setCacheMode(cacheMode()); cc.setWriteSynchronizationMode(FULL_SYNC); cc.setAtomicityMode(atomicityMode()); cc.setRebalanceMode(SYNC); cc.setCacheStoreFactory(singletonFactory(store)); cc.setReadThrough(true); cc.setWriteThrough(true); cc.setLoadPreviousValue(true); c.setCacheConfiguration(cc); return c; } /** * @return Cache atomicity mode. */ protected CacheAtomicityMode atomicityMode() { return TRANSACTIONAL; } /** * @throws IgniteCheckedException If failed. */ @Test public void testNotExistingKeys() throws IgniteCheckedException { IgniteCache<Integer, String> cache = jcache(); Map<Integer, String> map = store.getMap(); cache.put(100, "hacuna matata"); assertEquals(1, map.size()); cache.localEvict(Collections.singleton(100)); assertEquals(1, map.size()); assertEquals("hacuna matata", cache.getAndRemove(100)); assertTrue(map.isEmpty()); store.resetLastMethod(); assertNull(store.getLastMethod()); cache.remove(200); assertEquals("remove", store.getLastMethod()); cache.get(300); assertEquals("load", store.getLastMethod()); } /** @throws Exception If test fails. */ @Test public void testWriteThrough() throws Exception { IgniteCache<Integer, String> cache = jcache(); Map<Integer, String> map = store.getMap(); assert map.isEmpty(); if (atomicityMode() == TRANSACTIONAL) { try (Transaction tx = grid().transactions().txStart(OPTIMISTIC, REPEATABLE_READ)) { for (int i = 1; i <= 10; i++) { cache.put(i, Integer.toString(i)); checkLastMethod(null); } tx.commit(); } } else { Map<Integer, String> putMap = new HashMap<>(); for (int i = 1; i <= 10; i++) putMap.put(i, Integer.toString(i)); cache.putAll(putMap); } checkLastMethod("putAll"); assert cache.size() == 10; for (int i = 1; i <= 10; i++) { String val = map.get(i); assert val != null; assert val.equals(Integer.toString(i)); } store.resetLastMethod(); if (atomicityMode() == TRANSACTIONAL) { try (Transaction tx = grid().transactions().txStart()) { for (int i = 1; i <= 10; i++) { String val = cache.getAndRemove(i); checkLastMethod(null); assert val != null; assert val.equals(Integer.toString(i)); } tx.commit(); checkLastMethod("removeAll"); } } else { Set<Integer> keys = new HashSet<>(); for (int i = 1; i <= 10; i++) keys.add(i); cache.removeAll(keys); checkLastMethod("removeAll"); } assert map.isEmpty(); } /** @throws Exception If test failed. */ @Test public void testReadThrough() throws Exception { IgniteCache<Integer, String> cache = jcache(); Map<Integer, String> map = store.getMap(); assert map.isEmpty(); if (atomicityMode() == TRANSACTIONAL) { try (Transaction tx = grid().transactions().txStart(OPTIMISTIC, REPEATABLE_READ)) { for (int i = 1; i <= 10; i++) cache.put(i, Integer.toString(i)); checkLastMethod(null); tx.commit(); } } else { Map<Integer, String> putMap = new HashMap<>(); for (int i = 1; i <= 10; i++) putMap.put(i, Integer.toString(i)); cache.putAll(putMap); } checkLastMethod("putAll"); for (int i = 1; i <= 10; i++) { String val = map.get(i); assert val != null; assert val.equals(Integer.toString(i)); } cache.clear(); assert cache.localSize() == 0; assert cache.localSize() == 0; assert map.size() == 10; for (int i = 1; i <= 10; i++) { // Read through. String val = cache.get(i); checkLastMethod("load"); assert val != null; assert val.equals(Integer.toString(i)); } assert cache.size() == 10; cache.clear(); assert cache.localSize() == 0; assert cache.localSize() == 0; assert map.size() == 10; Set<Integer> keys = new HashSet<>(); for (int i = 1; i <= 10; i++) keys.add(i); // Read through. Map<Integer, String> vals = cache.getAll(keys); checkLastMethod("loadAll"); assert vals != null; assert vals.size() == 10; for (int i = 1; i <= 10; i++) { String val = vals.get(i); assert val != null; assert val.equals(Integer.toString(i)); } // Write through. cache.removeAll(keys); checkLastMethod("removeAll"); assert cache.localSize() == 0; assert cache.localSize() == 0; assert map.isEmpty(); } /** @throws Exception If test failed. */ @Test public void testLoadCache() throws Exception { IgniteCache<Integer, String> cache = jcache(); int cnt = 1; cache.loadCache(null, cnt); checkLastMethod("loadAllFull"); assert !(cache.localSize() == 0); Map<Integer, String> map = cache.getAll(keySet(cache)); assert map.size() == cnt : "Invalid map size: " + map.size(); // Recheck last method to make sure // values were read from cache. checkLastMethod("loadAllFull"); int start = store.getStart(); for (int i = start; i < start + cnt; i++) { String val = map.get(i); assert val != null; assert val.equals(Integer.toString(i)); } } /** @throws Exception If test failed. */ @Test public void testLoadCacheWithPredicate() throws Exception { IgniteCache<Integer, String> cache = jcache(); int cnt = 10; cache.loadCache(new P2<Integer, String>() { @Override public boolean apply(Integer key, String val) { // Accept only even numbers. return key % 2 == 0; } }, cnt); checkLastMethod("loadAllFull"); Map<Integer, String> map = cache.getAll(keySet(cache)); assert map.size() == cnt / 2 : "Invalid map size: " + map.size(); // Recheck last method to make sure // values were read from cache. checkLastMethod("loadAllFull"); int start = store.getStart(); for (int i = start; i < start + cnt; i++) { String val = map.get(i); if (i % 2 == 0) { assert val != null; assert val.equals(Integer.toString(i)); } else assert val == null; } } /** @throws Exception If test failed. */ @Test public void testReloadCache() throws Exception { IgniteCache<Integer, String> cache = jcache(); cache.loadCache(null, 0); assert cache.size() == 0; checkLastMethod("loadAllFull"); Set<Integer> keys = new HashSet<>(); for (int i = 1; i <= 10; i++) { keys.add(i); cache.put(i, Integer.toString(i)); checkLastMethod("put"); } assert cache.size() == 10; loadAll(cache, keys, true); checkLastMethod("loadAll"); assert cache.size() == 10; store.resetLastMethod(); for (int i = 1; i <= 10; i++) { String val = cache.get(i); assert val != null; assert val.equals(Integer.toString(i)); // Make sure that value is coming from cache, not from store. checkLastMethod(null); } cache.clear(); cache.loadCache(new P2<Integer, String>() { @Override public boolean apply(Integer k, String v) { // Only accept even numbers. return k % 2 == 0; } }, 10); checkLastMethod("loadAllFull"); store.resetLastMethod(); assertEquals(5, cache.size()); for (Cache.Entry<Integer, String> entry : cache) { String val = entry.getValue(); assert val != null; assert val.equals(Integer.toString(entry.getKey())); assert entry.getKey() % 2 == 0; // Make sure that value is coming from cache, not from store. checkLastMethod(null); } // Make sure that value is coming from cache, not from store. checkLastMethod(null); } /** @throws Exception If test failed. */ @Test public void testReloadAll() throws Exception { IgniteCache<Integer, String> cache = jcache(); assert cache.size() == 0; Map<Integer, String> vals = new HashMap<>(); for (int i = 1; i <= 10; i++) vals.put(i, Integer.toString(i)); loadAll(cache, vals.keySet(), true); assert cache.size() == 0: "Cache is not empty."; checkLastMethod("loadAll"); cache.putAll(vals); checkLastMethod("putAll"); assert cache.size() == 10; loadAll(cache, vals.keySet(), true); checkLastMethod("loadAll"); assert cache.size() == 10; store.resetLastMethod(); for (int i = 1; i <= 10; i++) { String val = cache.get(i); assert val != null; assert val.equals(Integer.toString(i)); // Make sure that value is coming from cache, not from store. checkLastMethod(null); } for (int i = 1; i <= 10; i++) store.write(new CacheEntryImpl<>(i, "reloaded-" + i)); loadAll(cache, vals.keySet(), true); checkLastMethod("loadAll"); store.resetLastMethod(); assert cache.size() == 10; for (int i = 1; i <= 10; i++) { String val = cache.get(i); assert val != null; assert val.equals("reloaded-" + i); // Make sure that value is coming from cache, not from store. checkLastMethod(null); } } /** @throws Exception If test failed. */ @Test public void testReload() throws Exception { IgniteCache<Integer, String> cache = jcache(); assert cache.size() == 0; Map<Integer, String> vals = new HashMap<>(); for (int i = 1; i <= 10; i++) vals.put(i, Integer.toString(i)); loadAll(cache, vals.keySet(), true); assert cache.size() == 0; checkLastMethod("loadAll"); cache.putAll(vals); checkLastMethod("putAll"); assert cache.size() == 10; load(cache, 1, true); String val = cache.localPeek(1); assert val != null; assert "1".equals(val); checkLastMethod("load"); assert cache.size() == 10; store.resetLastMethod(); for (int i = 1; i <= 10; i++) { val = cache.get(i); assert val != null; assert val.equals(Integer.toString(i)); // Make sure that value is coming from cache, not from store. checkLastMethod(null); } for (int i = 1; i <= 10; i++) store.write(new CacheEntryImpl<>(i, "reloaded-" + i)); store.resetLastMethod(); assert cache.size() == 10; for (int i = 1; i <= 10; i++) { load(cache, i, true); val = cache.localPeek(i); checkLastMethod("load"); assertEquals("reloaded-" + i, val); store.resetLastMethod(); String cached = cache.get(i); assert cached != null; assert cached.equals(val) : "Cached value mismatch [expected=" + val + ", cached=" + cached + ']'; // Make sure that value is coming from cache, not from store. checkLastMethod(null); } } /** @param mtd Expected last method value. */ private void checkLastMethod(@Nullable String mtd) { String lastMtd = store.getLastMethod(); if (mtd == null) assert lastMtd == null : "Last method must be null: " + lastMtd; else { assert lastMtd != null : "Last method must be not null"; assert lastMtd.equals(mtd) : "Last method does not match [expected=" + mtd + ", lastMtd=" + lastMtd + ']'; } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vcs.changes.committed; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.openapi.vcs.*; import com.intellij.openapi.vcs.history.VcsRevisionNumber; import com.intellij.openapi.vcs.versionBrowser.ChangeBrowserSettings; import com.intellij.openapi.vcs.versionBrowser.ChangesBrowserSettingsEditor; import com.intellij.openapi.vcs.versionBrowser.CommittedChangeList; import com.intellij.openapi.vcs.versionBrowser.DateFilterComponent; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.AsynchConsumer; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.*; import java.util.List; /** * @author yole */ public class CompositeCommittedChangesProvider implements CommittedChangesProvider<CommittedChangeList, CompositeCommittedChangesProvider.CompositeChangeBrowserSettings> { private final Project myProject; private List<AbstractVcs> myBaseVcss = new ArrayList<AbstractVcs>(); public CompositeCommittedChangesProvider(final Project project, final AbstractVcs... baseVcss) { myProject = project; myBaseVcss = new ArrayList<AbstractVcs>(); Collections.addAll(myBaseVcss, baseVcss); } public CompositeCommittedChangesProvider.CompositeChangeBrowserSettings createDefaultSettings() { Map<AbstractVcs, ChangeBrowserSettings> map = new HashMap<AbstractVcs, ChangeBrowserSettings>(); for(AbstractVcs vcs: myBaseVcss) { final CommittedChangesProvider provider = vcs.getCommittedChangesProvider(); assert provider != null; map.put(vcs, provider.createDefaultSettings()); } return new CompositeChangeBrowserSettings(map); } public ChangesBrowserSettingsEditor<CompositeCommittedChangesProvider.CompositeChangeBrowserSettings> createFilterUI(final boolean showDateFilter) { return new CompositeChangesBrowserSettingsEditor(); } public CompositeRepositoryLocation getLocationFor(final FilePath root) { final AbstractVcs vcs = ProjectLevelVcsManager.getInstance(myProject).getVcsFor(root); if (vcs != null) { final CommittedChangesProvider committedChangesProvider = vcs.getCommittedChangesProvider(); if (committedChangesProvider != null) { return new CompositeRepositoryLocation(committedChangesProvider, CommittedChangesCache.getInstance(myProject).getLocationCache().getLocation(vcs, root, false)); } } return null; } public RepositoryLocation getLocationFor(final FilePath root, final String repositoryPath) { return getLocationFor(root); } public VcsCommittedListsZipper getZipper() { throw new UnsupportedOperationException(); } public List<CommittedChangeList> getCommittedChanges(CompositeCommittedChangesProvider.CompositeChangeBrowserSettings settings, RepositoryLocation location, final int maxCount) throws VcsException { throw new UnsupportedOperationException(); } public void loadCommittedChanges(CompositeChangeBrowserSettings settings, RepositoryLocation location, int maxCount, AsynchConsumer<CommittedChangeList> consumer) throws VcsException { throw new UnsupportedOperationException(); } public ChangeListColumn[] getColumns() { Set<ChangeListColumn> columns = new LinkedHashSet<ChangeListColumn>(); for(AbstractVcs vcs: myBaseVcss) { final CommittedChangesProvider provider = vcs.getCommittedChangesProvider(); assert provider != null; ChangeListColumn[] providerColumns = provider.getColumns(); for(ChangeListColumn col: providerColumns) { if (col == ChangeListColumn.DATE || col == ChangeListColumn.DESCRIPTION || col == ChangeListColumn.NAME || col instanceof ChangeListColumn.ChangeListNumberColumn) { columns.add(col); } } } return columns.toArray(new ChangeListColumn[columns.size()]); } @Nullable public VcsCommittedViewAuxiliary createActions(final DecoratorManager manager, final RepositoryLocation location) { JTabbedPane tabbedPane = null; List<AnAction> actions = null; List<AnAction> toolbarActions = null; final List<Runnable> calledOnDispose = new ArrayList<Runnable>(); for (AbstractVcs baseVcs : myBaseVcss) { final CommittedChangesProvider provider = baseVcs.getCommittedChangesProvider(); if (provider != null) { VcsCommittedViewAuxiliary auxiliary = provider.createActions(manager, location); if (auxiliary != null) { if (tabbedPane == null) { tabbedPane = new JTabbedPane(); actions = new ArrayList<AnAction>(); toolbarActions = new ArrayList<AnAction>(); } actions.addAll(auxiliary.getPopupActions()); toolbarActions.addAll(auxiliary.getToolbarActions()); calledOnDispose.add(auxiliary.getCalledOnViewDispose()); } } } if (tabbedPane != null) { final JPanel panel = new JPanel(); panel.add(tabbedPane); return new VcsCommittedViewAuxiliary(actions, new Runnable() { public void run() { for (Runnable runnable : calledOnDispose) { runnable.run(); } } }, toolbarActions); } return null; } public int getUnlimitedCountValue() { throw new UnsupportedOperationException(); } @Override public Pair<CommittedChangeList, FilePath> getOneList(VirtualFile file, VcsRevisionNumber number) { throw new UnsupportedOperationException(); } public static class CompositeChangeBrowserSettings extends ChangeBrowserSettings { private final Map<AbstractVcs, ChangeBrowserSettings> myMap; private final Set<AbstractVcs> myEnabledVcs = new HashSet<AbstractVcs>(); public CompositeChangeBrowserSettings(final Map<AbstractVcs, ChangeBrowserSettings> map) { myMap = map; myEnabledVcs.addAll(map.keySet()); } public void put(final AbstractVcs vcs, final ChangeBrowserSettings settings) { myMap.put(vcs, settings); } public ChangeBrowserSettings get(final AbstractVcs vcs) { return myMap.get(vcs); } public void setEnabledVcss(Collection<AbstractVcs> vcss) { myEnabledVcs.clear(); myEnabledVcs.addAll(vcss); } public Collection<AbstractVcs> getEnabledVcss() { return myEnabledVcs; } } private class CompositeChangesBrowserSettingsEditor implements ChangesBrowserSettingsEditor<CompositeChangeBrowserSettings> { private final JPanel myCompositePanel; private final DateFilterComponent myDateFilter; private CompositeChangeBrowserSettings mySettings; private final Map<AbstractVcs, ChangesBrowserSettingsEditor> myEditors = new HashMap<AbstractVcs, ChangesBrowserSettingsEditor>(); private final Map<AbstractVcs, JCheckBox> myEnabledCheckboxes = new HashMap<AbstractVcs, JCheckBox>(); public CompositeChangesBrowserSettingsEditor() { myCompositePanel = new JPanel(); myCompositePanel.setLayout(new BoxLayout(myCompositePanel, BoxLayout.Y_AXIS)); myDateFilter = new DateFilterComponent(); myCompositePanel.add(myDateFilter.getPanel()); for(AbstractVcs vcs: myBaseVcss) { final CommittedChangesProvider provider = vcs.getCommittedChangesProvider(); assert provider != null; final ChangesBrowserSettingsEditor editor = provider.createFilterUI(false); myEditors.put(vcs, editor); JPanel wrapperPane = new JPanel(new BorderLayout()); wrapperPane.setBorder(BorderFactory.createTitledBorder(vcs.getDisplayName())); final JCheckBox checkBox = new JCheckBox(VcsBundle.message("composite.change.provider.include.vcs.checkbox", vcs.getDisplayName()), true); checkBox.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent e) { updateVcsEnabled(checkBox, editor); } }); wrapperPane.add(checkBox, BorderLayout.NORTH); myEnabledCheckboxes.put(vcs, checkBox); wrapperPane.add(editor.getComponent(), BorderLayout.CENTER); myCompositePanel.add(wrapperPane); } } private void updateVcsEnabled(JCheckBox checkBox, ChangesBrowserSettingsEditor editor) { UIUtil.setEnabled(editor.getComponent(), checkBox.isSelected(), true); if (checkBox.isSelected()) { editor.updateEnabledControls(); } } public JComponent getComponent() { return myCompositePanel; } public CompositeChangeBrowserSettings getSettings() { Set<AbstractVcs> enabledVcss = new HashSet<AbstractVcs>(); for(AbstractVcs vcs: myEditors.keySet()) { ChangeBrowserSettings settings = myEditors.get(vcs).getSettings(); myDateFilter.saveValues(settings); mySettings.put(vcs, settings); if (myEnabledCheckboxes.get(vcs).isSelected()) { enabledVcss.add(vcs); } } mySettings.setEnabledVcss(enabledVcss); return mySettings; } public void setSettings(CompositeChangeBrowserSettings settings) { mySettings = settings; boolean dateFilterInitialized = false; for(AbstractVcs vcs: myEditors.keySet()) { final ChangeBrowserSettings vcsSettings = mySettings.get(vcs); final ChangesBrowserSettingsEditor editor = myEditors.get(vcs); //noinspection unchecked editor.setSettings(vcsSettings); if (!dateFilterInitialized) { myDateFilter.initValues(vcsSettings); dateFilterInitialized = true; } final JCheckBox checkBox = myEnabledCheckboxes.get(vcs); checkBox.setSelected(settings.getEnabledVcss().contains(vcs)); updateVcsEnabled(checkBox, editor); } } @Nullable public String validateInput() { for(ChangesBrowserSettingsEditor editor: myEditors.values()) { String result = editor.validateInput(); if (result != null) return result; } return null; } public void updateEnabledControls() { for(ChangesBrowserSettingsEditor editor: myEditors.values()) { editor.updateEnabledControls(); } } public String getDimensionServiceKey() { @NonNls StringBuilder result = new StringBuilder(); result.append("Composite"); for(AbstractVcs vcs: myBaseVcss) { result.append(".").append(vcs.getDisplayName()); } return result.toString(); } } }
/* * Copyright 2009 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; /** * @author johnlenz@google.com (John Lenz) */ public final class MinimizeExitPointsTest extends CompilerTestCase { @Override protected CompilerPass getProcessor(final Compiler compiler) { return new MinimizeExitPoints(compiler); } @Override protected int getNumRepetitions() { return 1; } void foldSame(String js) { testSame(js); } void fold(String js, String expected) { test(js, expected); } public void testBreakOptimization() throws Exception { fold("f:{if(true){a();break f;}else;b();}", "f:{if(true){a()}else{b()}}"); fold("f:{if(false){a();break f;}else;b();break f;}", "f:{if(false){a()}else{b()}}"); fold("f:{if(a()){b();break f;}else;c();}", "f:{if(a()){b();}else{c();}}"); fold("f:{if(a()){b()}else{c();break f;}}", "f:{if(a()){b()}else{c();}}"); fold("f:{if(a()){b();break f;}else;}", "f:{if(a()){b();}else;}"); fold("f:{if(a()){break f;}else;}", "f:{if(a()){}else;}"); fold("f:while(a())break f;", "f:while(a())break f"); foldSame("f:for(x in a())break f"); fold("f:{while(a())break;}", "f:{while(a())break;}"); foldSame("f:{for(x in a())break}"); fold("f:try{break f;}catch(e){break f;}", "f:try{}catch(e){}"); fold("f:try{if(a()){break f;}else{break f;} break f;}catch(e){}", "f:try{if(a()){}else{}}catch(e){}"); fold("f:g:break f", ""); fold("f:g:{if(a()){break f;}else{break f;} break f;}", "f:g:{if(a()){}else{}}"); } public void testFunctionReturnOptimization1() throws Exception { fold("function f(){return}", "function f(){}"); } public void testFunctionReturnOptimization2() throws Exception { fold("function f(){if(a()){b();if(c())return;}}", "function f(){if(a()){b();if(c());}}"); fold("function f(){if(x)return; x=3; return; }", "function f(){if(x); else x=3}"); fold("function f(){if(true){a();return;}else;b();}", "function f(){if(true){a();}else{b();}}"); fold("function f(){if(false){a();return;}else;b();return;}", "function f(){if(false){a();}else{b();}}"); fold("function f(){if(a()){b();return;}else;c();}", "function f(){if(a()){b();}else{c();}}"); fold("function f(){if(a()){b()}else{c();return;}}", "function f(){if(a()){b()}else{c();}}"); fold("function f(){if(a()){b();return;}else;}", "function f(){if(a()){b();}else;}"); fold("function f(){if(a()){return;}else{return;} return;}", "function f(){if(a()){}else{}}"); fold("function f(){if(a()){return;}else{return;} b();}", "function f(){if(a()){}else{return;b()}}"); fold("function f(){ if (x) return; if (y) return; if (z) return; w(); }", " function f() {" + " if (x) {} else { if (y) {} else { if (z) {} else w(); }}" + " }"); fold("function f(){while(a())return;}", "function f(){while(a())return}"); foldSame("function f(){for(x in a())return}"); fold("function f(){while(a())break;}", "function f(){while(a())break}"); foldSame("function f(){for(x in a())break}"); fold("function f(){try{return;}catch(e){throw 9;}finally{return}}", "function f(){try{}catch(e){throw 9;}finally{return}}"); foldSame("function f(){try{throw 9;}finally{return;}}"); fold("function f(){try{return;}catch(e){return;}}", "function f(){try{}catch(e){}}"); fold("function f(){try{if(a()){return;}else{return;} return;}catch(e){}}", "function f(){try{if(a()){}else{}}catch(e){}}"); fold("function f(){g:return}", "function f(){}"); fold("function f(){g:if(a()){return;}else{return;} return;}", "function f(){g:if(a()){}else{}}"); fold("function f(){try{g:if(a()){throw 9;} return;}finally{return}}", "function f(){try{g:if(a()){throw 9;}}finally{return}}"); } public void testWhileContinueOptimization() throws Exception { fold("while(true){if(x)continue; x=3; continue; }", "while(true)if(x);else x=3"); foldSame("while(true){a();continue;b();}"); fold("while(true){if(true){a();continue;}else;b();}", "while(true){if(true){a();}else{b()}}"); fold("while(true){if(false){a();continue;}else;b();continue;}", "while(true){if(false){a()}else{b();}}"); fold("while(true){if(a()){b();continue;}else;c();}", "while(true){if(a()){b();}else{c();}}"); fold("while(true){if(a()){b();}else{c();continue;}}", "while(true){if(a()){b();}else{c();}}"); fold("while(true){if(a()){b();continue;}else;}", "while(true){if(a()){b();}else;}"); fold("while(true){if(a()){continue;}else{continue;} continue;}", "while(true){if(a()){}else{}}"); fold("while(true){if(a()){continue;}else{continue;} b();}", "while(true){if(a()){}else{continue;b();}}"); fold("while(true)while(a())continue;", "while(true)while(a());"); fold("while(true)for(x in a())continue", "while(true)for(x in a());"); fold("while(true)while(a())break;", "while(true)while(a())break"); fold("while(true)for(x in a())break", "while(true)for(x in a())break"); fold("while(true){try{continue;}catch(e){continue;}}", "while(true){try{}catch(e){}}"); fold("while(true){try{if(a()){continue;}else{continue;}" + "continue;}catch(e){}}", "while(true){try{if(a()){}else{}}catch(e){}}"); fold("while(true){g:continue}", "while(true){}"); // This case could be improved. fold("while(true){g:if(a()){continue;}else{continue;} continue;}", "while(true){g:if(a());else;}"); } public void testDoContinueOptimization() throws Exception { fold("do{if(x)continue; x=3; continue; }while(true)", "do if(x); else x=3; while(true)"); foldSame("do{a();continue;b()}while(true)"); fold("do{if(true){a();continue;}else;b();}while(true)", "do{if(true){a();}else{b();}}while(true)"); fold("do{if(false){a();continue;}else;b();continue;}while(true)", "do{if(false){a();}else{b();}}while(true)"); fold("do{if(a()){b();continue;}else;c();}while(true)", "do{if(a()){b();}else{c()}}while(true)"); fold("do{if(a()){b();}else{c();continue;}}while(true)", "do{if(a()){b();}else{c();}}while(true)"); fold("do{if(a()){b();continue;}else;}while(true)", "do{if(a()){b();}else;}while(true)"); fold("do{if(a()){continue;}else{continue;} continue;}while(true)", "do{if(a()){}else{}}while(true)"); fold("do{if(a()){continue;}else{continue;} b();}while(true)", "do{if(a()){}else{continue; b();}}while(true)"); fold("do{while(a())continue;}while(true)", "do while(a());while(true)"); fold("do{for(x in a())continue}while(true)", "do for(x in a());while(true)"); fold("do{while(a())break;}while(true)", "do while(a())break;while(true)"); fold("do for(x in a())break;while(true)", "do for(x in a())break;while(true)"); fold("do{try{continue;}catch(e){continue;}}while(true)", "do{try{}catch(e){}}while(true)"); fold("do{try{if(a()){continue;}else{continue;}" + "continue;}catch(e){}}while(true)", "do{try{if(a()){}else{}}catch(e){}}while(true)"); fold("do{g:continue}while(true)", "do{}while(true)"); // This case could be improved. fold("do{g:if(a()){continue;}else{continue;} continue;}while(true)", "do{g:if(a());else;}while(true)"); fold("do { foo(); continue; } while(false)", "do { foo(); } while(false)"); fold("do { foo(); break; } while(false)", "do { foo(); } while(false)"); fold("do{break}while(!new Date());", "do{}while(!new Date());"); foldSame("do { foo(); switch (x) { case 1: break; default: f()}; } while(false)"); } public void testForContinueOptimization() throws Exception { fold("for(x in y){if(x)continue; x=3; continue; }", "for(x in y)if(x);else x=3"); foldSame("for(x in y){a();continue;b()}"); fold("for(x in y){if(true){a();continue;}else;b();}", "for(x in y){if(true)a();else b();}"); fold("for(x in y){if(false){a();continue;}else;b();continue;}", "for(x in y){if(false){a();}else{b()}}"); fold("for(x in y){if(a()){b();continue;}else;c();}", "for(x in y){if(a()){b();}else{c();}}"); fold("for(x in y){if(a()){b();}else{c();continue;}}", "for(x in y){if(a()){b();}else{c();}}"); fold("for(x=0;x<y;x++){if(a()){b();continue;}else;}", "for(x=0;x<y;x++){if(a()){b();}else;}"); fold("for(x=0;x<y;x++){if(a()){continue;}else{continue;} continue;}", "for(x=0;x<y;x++){if(a()){}else{}}"); fold("for(x=0;x<y;x++){if(a()){continue;}else{continue;} b();}", "for(x=0;x<y;x++){if(a()){}else{continue; b();}}"); fold("for(x=0;x<y;x++)while(a())continue;", "for(x=0;x<y;x++)while(a());"); fold("for(x=0;x<y;x++)for(x in a())continue", "for(x=0;x<y;x++)for(x in a());"); fold("for(x=0;x<y;x++)while(a())break;", "for(x=0;x<y;x++)while(a())break"); foldSame("for(x=0;x<y;x++)for(x in a())break"); fold("for(x=0;x<y;x++){try{continue;}catch(e){continue;}}", "for(x=0;x<y;x++){try{}catch(e){}}"); fold("for(x=0;x<y;x++){try{if(a()){continue;}else{continue;}" + "continue;}catch(e){}}", "for(x=0;x<y;x++){try{if(a()){}else{}}catch(e){}}"); fold("for(x=0;x<y;x++){g:continue}", "for(x=0;x<y;x++){}"); fold("for(x=0;x<y;x++){g:if(a()){continue;}else{continue;} continue;}", "for(x=0;x<y;x++){g:if(a());else;}"); } public void testCodeMotionDoesntBreakFunctionHoisting() throws Exception { setAcceptedLanguage(CompilerOptions.LanguageMode.ECMASCRIPT6); fold("function f() { if (x) return; foo(); function foo() {} }", "function f() { if (x); else { function foo() {} foo(); } }"); } public void testDontRemoveBreakInTryFinally() throws Exception { foldSame("function f() {b:try{throw 9} finally {break b} return 1;}"); } /** * See https://github.com/google/closure-compiler/issues/554 * The 'break' prevents the 'b=false' from being evaluated. * If we fold the do-while to 'do;while(b=false)' the code will * be incorrect. */ public void testDontFoldBreakInDoWhileIfConditionHasSideEffects() { foldSame("var b=true;do{break}while(b=false);"); } public void testSwitchExitPoints1() { fold( "switch (x) { case 1: f(); break; }", "switch (x) { case 1: f(); }"); fold( "switch (x) { case 1: f(); break; case 2: g(); break; }", "switch (x) { case 1: f(); break; case 2: g(); }"); fold( "switch (x) { case 1: if (x) { f(); break; } break; default: g(); break; }", "switch (x) { case 1: if (x) { f(); } break; default: g(); }"); } }
package water.rapids; import water.DKV; import water.H2O; import water.fvec.Frame; import water.fvec.Vec; import water.util.VecUtils; import java.util.ArrayList; import java.util.Arrays; /** A collection of Strings only. This is a syntatic form only, and never * executes and never gets on the execution stack. */ public class ASTStrList extends ASTParameter { public String[] _strs; ASTStrList( Exec e ) { ArrayList<String> strs = new ArrayList<>(); while( true ) { char c = e.skipWS(); if( c==']' ) break; if( e.isQuote(c) ) strs.add(e.match(c)); else throw new IllegalArgumentException("Expecting the start of a string"); } e.xpeek(']'); _strs = strs.toArray(new String[strs.size()]); } // Strange count of args, due to custom parsing @Override int nargs() { return -1; } // This is a special syntatic form; the number-list never executes and hits // the execution stack @Override public Val exec(Env env) { throw H2O.fail(); } @Override public String str() { return Arrays.toString(_strs); } // Select columns by number or String. @Override int[] columns( String[] names ) { int[] idxs = new int[_strs.length]; for( int i=0; i < _strs.length; i++ ) { int idx = idxs[i] = water.util.ArrayUtils.find(names,_strs[i]); if( idx == -1 ) throw new IllegalArgumentException("Column "+_strs[i]+" not found"); } return idxs; } } /** Assign column names */ class ASTColNames extends ASTPrim { @Override public String[] args() { return new String[]{"ary", "cols", "names"}; } @Override int nargs() { return 1+3; } // (colnames frame [#cols] ["names"]) @Override public String str() { return "colnames="; } @Override Val apply( Env env, Env.StackHelp stk, AST asts[] ) { Frame fr = stk.track(asts[1].exec(env)).getFrame(); if( asts[2] instanceof ASTNumList ) { if( !(asts[3] instanceof ASTStrList) ) throw new IllegalArgumentException("Column naming requires a string-list, but found a "+asts[3].getClass()); ASTNumList cols = ((ASTNumList)asts[2]); ASTStrList nams = ((ASTStrList)asts[3]); int d[] = cols.expand4(); if( d.length != nams._strs.length ) throw new IllegalArgumentException("Must have the same number of column choices as names"); for( int i=0; i<d.length; i++ ) fr._names[d[i]] = nams._strs[i]; } else if( (asts[2] instanceof ASTNum) ) { int col = (int)(asts[2].exec(env).getNum()); String name = asts[3].exec(env).getStr() ; fr._names[col] = name; } else throw new IllegalArgumentException("Column naming requires a number-list, but found a "+asts[2].getClass()); if( fr._key != null ) DKV.put(fr); // Update names in DKV return new ValFrame(fr); } } /** Convert to StringVec */ class ASTAsCharacter extends ASTPrim { @Override public String[] args() { return new String[]{"ary"}; } @Override int nargs() { return 1+1; } // (as.character col) @Override public String str() { return "as.character"; } @Override Val apply( Env env, Env.StackHelp stk, AST asts[] ) { Frame ary = stk.track(asts[1].exec(env)).getFrame(); Vec[] nvecs = new Vec[ary.numCols()]; Vec vv; for(int c=0;c<nvecs.length;++c) { vv = ary.vec(c); try { nvecs[c] = vv.toStringVec(); } catch (Exception e) { VecUtils.deleteVecs(nvecs, c); throw e; } } return new ValFrame(new Frame(ary._names, nvecs)); } } /** Convert to a factor/categorical */ class ASTAsFactor extends ASTPrim { @Override public String[] args() { return new String[]{"ary"}; } @Override int nargs() { return 1+1; } // (as.factor col) @Override public String str() { return "as.factor"; } @Override Val apply( Env env, Env.StackHelp stk, AST asts[] ) { Frame ary = stk.track(asts[1].exec(env)).getFrame(); Vec[] nvecs = new Vec[ary.numCols()]; // Type check - prescreen for correct types for (Vec v : ary.vecs()) if (!(v.isCategorical() || v.isString()|| v.isNumeric())) throw new IllegalArgumentException("asfactor() requires a string, categorical, or numeric column. " +"Received "+ary.anyVec().get_type_str() +". Please convert column to a string or categorical first."); Vec vv; for(int c=0;c<nvecs.length;++c) { vv = ary.vec(c); try { nvecs[c] = vv.toCategoricalVec(); } catch (Exception e) { VecUtils.deleteVecs(nvecs, c); throw e; } } return new ValFrame(new Frame(ary._names, nvecs)); } } /** Convert to a numeric */ class ASTAsNumeric extends ASTPrim { @Override public String[] args() { return new String[]{"ary"}; } @Override int nargs() { return 1+1; } // (as.numeric col) @Override public String str() { return "as.numeric"; } @Override Val apply( Env env, Env.StackHelp stk, AST asts[] ) { Frame fr = stk.track(asts[1].exec(env)).getFrame(); Vec[] nvecs = new Vec[fr.numCols()]; Vec vv; for(int c=0;c<nvecs.length;++c) { vv = fr.vec(c); try { nvecs[c] = vv.toNumericVec(); } catch (Exception e) { VecUtils.deleteVecs(nvecs, c); throw e; } } return new ValFrame(new Frame(fr._names, nvecs)); } } /** Is String Vec? */ class ASTIsCharacter extends ASTPrim { @Override public String[] args() { return new String[]{"ary"}; } @Override int nargs() { return 1+1; } // (is.character col) @Override public String str() { return "is.character"; } @Override ValNums apply( Env env, Env.StackHelp stk, AST asts[] ) { Frame fr = stk.track(asts[1].exec(env)).getFrame(); if( fr.numCols() == 1 ) return new ValNums(new double[]{fr.anyVec().isString()?1:0}); double ds[] = new double[fr.numCols()]; for( int i=0; i<fr.numCols(); i++ ) ds[i] = fr.vec(i).isString() ? 1 : 0; return new ValNums(ds); } } /** Is a factor/categorical? */ class ASTIsFactor extends ASTPrim { @Override public String[] args() { return new String[]{"ary"}; } @Override int nargs() { return 1+1; } // (is.factor col) @Override public String str() { return "is.factor"; } @Override ValNums apply( Env env, Env.StackHelp stk, AST asts[] ) { Frame fr = stk.track(asts[1].exec(env)).getFrame(); if( fr.numCols() == 1 ) return new ValNums(new double[]{fr.anyVec().isCategorical()?1:0}); double ds[] = new double[fr.numCols()]; for( int i=0; i<fr.numCols(); i++ ) ds[i] = fr.vec(i).isCategorical() ? 1 : 0; return new ValNums(ds); } } /** Is a numeric? */ class ASTIsNumeric extends ASTPrim { @Override public String[] args() { return new String[]{"ary"}; } @Override int nargs() { return 1+1; } // (is.numeric col) @Override public String str() { return "is.numeric"; } @Override ValNums apply( Env env, Env.StackHelp stk, AST asts[] ) { Frame fr = stk.track(asts[1].exec(env)).getFrame(); if( fr.numCols() == 1 ) return new ValNums(new double[]{fr.anyVec().isNumeric()?1:0}); double ds[] = new double[fr.numCols()]; for( int i=0; i<fr.numCols(); i++ ) ds[i] = fr.vec(i).isNumeric() ? 1 : 0; return new ValNums(ds); } } /** Any columns factor/categorical? */ class ASTAnyFactor extends ASTPrim { @Override public String[] args() { return new String[]{"ary"}; } @Override int nargs() { return 1+1; } // (any.factor frame) @Override public String str() { return "any.factor"; } @Override ValNum apply( Env env, Env.StackHelp stk, AST asts[] ) { Frame fr = stk.track(asts[1].exec(env)).getFrame(); for( Vec vec : fr.vecs() ) if( vec.isCategorical()) return new ValNum(1); return new ValNum(0); } }
/*- * Copyright (c) 2005, 2020 Oracle and/or its affiliates. All rights reserved. * * See the file EXAMPLES-LICENSE for license information. * */ package db.txn; import com.sleepycat.bind.EntryBinding; import com.sleepycat.bind.serial.StoredClassCatalog; import com.sleepycat.bind.serial.SerialBinding; import com.sleepycat.bind.tuple.StringBinding; import com.sleepycat.db.Cursor; import com.sleepycat.db.CursorConfig; import com.sleepycat.db.Database; import com.sleepycat.db.DatabaseEntry; import com.sleepycat.db.DatabaseException; import com.sleepycat.db.DeadlockException; import com.sleepycat.db.Environment; import com.sleepycat.db.LockMode; import com.sleepycat.db.OperationStatus; import com.sleepycat.db.Transaction; import java.util.Random; public class DBWriter extends Thread { private Database myDb = null; private Environment myEnv = null; private EntryBinding<PayloadData> dataBinding = null; private final Random generator; private boolean passTxn = false; private static final int MAX_RETRY = 20; private static String[] keys = {"key 1", "key 2", "key 3", "key 4", "key 5", "key 6", "key 7", "key 8", "key 9", "key 10"}; // Constructor. Get our DB handles from here // This consturctor allows us to indicate whether the // txn handle should be handed to countRecords() DBWriter(Environment env, Database db, StoredClassCatalog scc, boolean passtxn) throws DatabaseException { this.generator = new Random(); myDb = db; myEnv = env; dataBinding = new SerialBinding<>(scc, PayloadData.class); passTxn = passtxn; } // Constructor. Get our DB handles from here DBWriter(Environment env, Database db, StoredClassCatalog scc) throws DatabaseException { this.generator = new Random(); myDb = db; myEnv = env; dataBinding = new SerialBinding<>(scc, PayloadData.class); } // Thread method that writes a series of records // to the database using transaction protection. // Deadlock handling is demonstrated here. @Override public void run () { Transaction txn = null; // Perform 50 transactions for (int i=0; i<50; i++) { boolean retry = true; int retry_count = 0; // while loop is used for deadlock retries while (retry) { // try block used for deadlock detection and // general db exception handling try { // Get a transaction txn = myEnv.beginTransaction(null, null); // Write 10 records to the db // for each transaction for (int j = 0; j < 10; j++) { // Get the key DatabaseEntry key = new DatabaseEntry(); StringBinding.stringToEntry(keys[j], key); // Get the data PayloadData pd = new PayloadData(i+j, getName(), generator.nextDouble()); DatabaseEntry data = new DatabaseEntry(); dataBinding.objectToEntry(pd, data); // Do the put myDb.put(txn, key, data); } // commit System.out.println(getName() + " : committing txn : " + i); // This code block allows us to decide if txn handle is // passed to countRecords() // // TxnGuideInMemory requires a txn handle be handed to // countRecords(). The code self deadlocks if you don't. // TxnGuide has no such requirement because it supports // uncommitted reads. Transaction txnHandle = null; if (passTxn) { txnHandle = txn; } System.out.println(getName() + " : Found " + countRecords(txnHandle) + " records in the database."); try { txn.commit(); txn = null; } catch (DatabaseException e) { System.err.println("Error on txn commit: " + e.toString()); } retry = false; } catch (DeadlockException de) { System.out.println("################# " + getName() + " : caught deadlock"); // retry if necessary if (retry_count < MAX_RETRY) { System.err.println(getName() + " : Retrying operation."); retry = true; retry_count++; } else { System.err.println(getName() + " : out of retries. Giving up."); retry = false; } } catch (DatabaseException e) { // abort and don't retry retry = false; System.err.println(getName() + " : caught exception: " + e.toString()); System.err.println(getName() + " : errno: " + e.getErrno()); e.printStackTrace(); } finally { if (txn != null) { try { txn.abort(); } catch (Exception e) { System.err.println("Error aborting transaction: " + e.toString()); e.printStackTrace(); } } } } } } // This simply counts the number of records contained in the // database and returns the result. You can use this method // in three ways: // // First call it with an active txn handle. // // Secondly, configure the cursor for dirty reads // // Third, call countRecords AFTER the writer has committed // its transaction. // // If you do none of these things, the writer thread will // self-deadlock. // // Note that this method exists only for illustrative purposes. // A more straight-forward way to count the number of records in // a database is to use the Database.getStats() method. private int countRecords(Transaction txn) throws DatabaseException { DatabaseEntry key = new DatabaseEntry(); DatabaseEntry data = new DatabaseEntry(); int count = 0; Cursor cursor = null; try { // Get the cursor CursorConfig cc = new CursorConfig(); // setReadUncommitted is ignored if the database was not // opened for uncommitted read support. TxnGuide opens // its database in this way, TxnGuideInMemory does not. cc.setReadUncommitted(true); cursor = myDb.openCursor(txn, cc); while (cursor.getNext(key, data, LockMode.DEFAULT) == OperationStatus.SUCCESS) { count++; } } finally { if (cursor != null) { cursor.close(); } } return count; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.rex; import org.apache.calcite.avatica.util.ByteString; import org.apache.calcite.avatica.util.DateTimeUtils; import org.apache.calcite.avatica.util.TimeUnit; import org.apache.calcite.config.CalciteSystemProperty; import org.apache.calcite.linq4j.function.Functions; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.runtime.FlatLists; import org.apache.calcite.runtime.GeoFunctions; import org.apache.calcite.runtime.Geometries; import org.apache.calcite.sql.SqlCollation; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.parser.SqlParserUtil; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.util.CompositeList; import org.apache.calcite.util.ConversionUtil; import org.apache.calcite.util.DateString; import org.apache.calcite.util.Litmus; import org.apache.calcite.util.NlsString; import org.apache.calcite.util.Sarg; import org.apache.calcite.util.TimeString; import org.apache.calcite.util.TimestampString; import org.apache.calcite.util.Util; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import org.checkerframework.checker.initialization.qual.UnknownInitialization; import org.checkerframework.checker.nullness.qual.Nullable; import org.checkerframework.checker.nullness.qual.PolyNull; import org.checkerframework.checker.nullness.qual.RequiresNonNull; import org.checkerframework.dataflow.qual.Pure; import java.io.PrintWriter; import java.math.BigDecimal; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.TimeZone; import static org.apache.calcite.linq4j.Nullness.castNonNull; import static org.apache.calcite.rel.type.RelDataTypeImpl.NON_NULLABLE_SUFFIX; import static java.util.Objects.requireNonNull; /** * Constant value in a row-expression. * * <p>There are several methods for creating literals in {@link RexBuilder}: * {@link RexBuilder#makeLiteral(boolean)} and so forth.</p> * * <p>How is the value stored? In that respect, the class is somewhat of a black * box. There is a {@link #getValue} method which returns the value as an * object, but the type of that value is implementation detail, and it is best * that your code does not depend upon that knowledge. It is better to use * task-oriented methods such as {@link #getValue2} and * {@link #toJavaString}.</p> * * <p>The allowable types and combinations are:</p> * * <table> * <caption>Allowable types for RexLiteral instances</caption> * <tr> * <th>TypeName</th> * <th>Meaning</th> * <th>Value type</th> * </tr> * <tr> * <td>{@link SqlTypeName#NULL}</td> * <td>The null value. It has its own special type.</td> * <td>null</td> * </tr> * <tr> * <td>{@link SqlTypeName#BOOLEAN}</td> * <td>Boolean, namely <code>TRUE</code>, <code>FALSE</code> or <code> * UNKNOWN</code>.</td> * <td>{@link Boolean}, or null represents the UNKNOWN value</td> * </tr> * <tr> * <td>{@link SqlTypeName#DECIMAL}</td> * <td>Exact number, for example <code>0</code>, <code>-.5</code>, <code> * 12345</code>.</td> * <td>{@link BigDecimal}</td> * </tr> * <tr> * <td>{@link SqlTypeName#DOUBLE}</td> * <td>Approximate number, for example <code>6.023E-23</code>.</td> * <td>{@link BigDecimal}</td> * </tr> * <tr> * <td>{@link SqlTypeName#DATE}</td> * <td>Date, for example <code>DATE '1969-04'29'</code></td> * <td>{@link Calendar}; * also {@link Calendar} (UTC time zone) * and {@link Integer} (days since POSIX epoch)</td> * </tr> * <tr> * <td>{@link SqlTypeName#TIME}</td> * <td>Time, for example <code>TIME '18:37:42.567'</code></td> * <td>{@link Calendar}; * also {@link Calendar} (UTC time zone) * and {@link Integer} (milliseconds since midnight)</td> * </tr> * <tr> * <td>{@link SqlTypeName#TIMESTAMP}</td> * <td>Timestamp, for example <code>TIMESTAMP '1969-04-29 * 18:37:42.567'</code></td> * <td>{@link TimestampString}; * also {@link Calendar} (UTC time zone) * and {@link Long} (milliseconds since POSIX epoch)</td> * </tr> * <tr> * <td>{@link SqlTypeName#INTERVAL_DAY}, * {@link SqlTypeName#INTERVAL_DAY_HOUR}, * {@link SqlTypeName#INTERVAL_DAY_MINUTE}, * {@link SqlTypeName#INTERVAL_DAY_SECOND}, * {@link SqlTypeName#INTERVAL_HOUR}, * {@link SqlTypeName#INTERVAL_HOUR_MINUTE}, * {@link SqlTypeName#INTERVAL_HOUR_SECOND}, * {@link SqlTypeName#INTERVAL_MINUTE}, * {@link SqlTypeName#INTERVAL_MINUTE_SECOND}, * {@link SqlTypeName#INTERVAL_SECOND}</td> * <td>Interval, for example <code>INTERVAL '4:3:2' HOUR TO SECOND</code></td> * <td>{@link BigDecimal}; * also {@link Long} (milliseconds)</td> * </tr> * <tr> * <td>{@link SqlTypeName#INTERVAL_YEAR}, * {@link SqlTypeName#INTERVAL_YEAR_MONTH}, * {@link SqlTypeName#INTERVAL_MONTH}</td> * <td>Interval, for example <code>INTERVAL '2-3' YEAR TO MONTH</code></td> * <td>{@link BigDecimal}; * also {@link Integer} (months)</td> * </tr> * <tr> * <td>{@link SqlTypeName#CHAR}</td> * <td>Character constant, for example <code>'Hello, world!'</code>, <code> * ''</code>, <code>_N'Bonjour'</code>, <code>_ISO-8859-1'It''s superman!' * COLLATE SHIFT_JIS$ja_JP$2</code>. These are always CHAR, never VARCHAR.</td> * <td>{@link NlsString}; * also {@link String}</td> * </tr> * <tr> * <td>{@link SqlTypeName#BINARY}</td> * <td>Binary constant, for example <code>X'7F34'</code>. (The number of hexits * must be even; see above.) These constants are always BINARY, never * VARBINARY.</td> * <td>{@link ByteBuffer}; * also {@code byte[]}</td> * </tr> * <tr> * <td>{@link SqlTypeName#SYMBOL}</td> * <td>A symbol is a special type used to make parsing easier; it is not part of * the SQL standard, and is not exposed to end-users. It is used to hold a flag, * such as the LEADING flag in a call to the function <code> * TRIM([LEADING|TRAILING|BOTH] chars FROM string)</code>.</td> * <td>An enum class</td> * </tr> * </table> */ public class RexLiteral extends RexNode { //~ Instance fields -------------------------------------------------------- /** * The value of this literal. Must be consistent with its type, as per * {@link #valueMatchesType}. For example, you can't store an * {@link Integer} value here just because you feel like it -- all numbers are * represented by a {@link BigDecimal}. But since this field is private, it * doesn't really matter how the values are stored. */ private final @Nullable Comparable value; /** * The real type of this literal, as reported by {@link #getType}. */ private final RelDataType type; // TODO jvs 26-May-2006: Use SqlTypeFamily instead; it exists // for exactly this purpose (to avoid the confusion which results // from overloading SqlTypeName). /** * An indication of the broad type of this literal -- even if its type isn't * a SQL type. Sometimes this will be different than the SQL type; for * example, all exact numbers, including integers have typeName * {@link SqlTypeName#DECIMAL}. See {@link #valueMatchesType} for the * definitive story. */ private final SqlTypeName typeName; private static final ImmutableList<TimeUnit> TIME_UNITS = ImmutableList.copyOf(TimeUnit.values()); //~ Constructors ----------------------------------------------------------- /** * Creates a <code>RexLiteral</code>. */ RexLiteral( @Nullable Comparable value, RelDataType type, SqlTypeName typeName) { this.value = value; this.type = requireNonNull(type, "type"); this.typeName = requireNonNull(typeName, "typeName"); Preconditions.checkArgument(valueMatchesType(value, typeName, true)); Preconditions.checkArgument((value == null) == type.isNullable()); Preconditions.checkArgument(typeName != SqlTypeName.ANY); this.digest = computeDigest(RexDigestIncludeType.OPTIONAL); } //~ Methods ---------------------------------------------------------------- /** * Returns a string which concisely describes the definition of this * rex literal. Two literals are equivalent if and only if their digests are the same. * * <p>The digest does not contain the expression's identity, but does include the identity * of children. * * <p>Technically speaking 1:INT differs from 1:FLOAT, so we need data type in the literal's * digest, however we want to avoid extra verbosity of the {@link RelNode#getDigest()} for * readability purposes, so we omit type info in certain cases. * For instance, 1:INT becomes 1 (INT is implied by default), however 1:BIGINT always holds * the type * * <p>Here's a non-exhaustive list of the "well known cases": * <ul><li>Hide "NOT NULL" for not null literals * <li>Hide INTEGER, BOOLEAN, SYMBOL, TIME(0), TIMESTAMP(0), DATE(0) types * <li>Hide collation when it matches IMPLICIT/COERCIBLE * <li>Hide charset when it matches default * <li>Hide CHAR(xx) when literal length is equal to the precision of the type. * In other words, use 'Bob' instead of 'Bob':CHAR(3) * <li>Hide BOOL for AND/OR arguments. In other words, AND(true, null) means * null is BOOL. * <li>Hide types for literals in simple binary operations (e.g. +, -, *, /, * comparison) when type of the other argument is clear. * See {@link RexCall#computeDigest(boolean)} * For instance: =(true. null) means null is BOOL. =($0, null) means the type * of null matches the type of $0. * </ul> * * @param includeType whether the digest should include type or not * @return digest */ @RequiresNonNull({"typeName", "type"}) public final String computeDigest( @UnknownInitialization RexLiteral this, RexDigestIncludeType includeType) { if (includeType == RexDigestIncludeType.OPTIONAL) { if (digest != null) { // digest is initialized with OPTIONAL, so cached value matches for // includeType=OPTIONAL as well return digest; } // Compute we should include the type or not includeType = digestIncludesType(); } else if (digest != null && includeType == digestIncludesType()) { // The digest is always computed with includeType=OPTIONAL // If it happened to omit the type, we want to optimize computeDigest(NO_TYPE) as well // If the digest includes the type, we want to optimize computeDigest(ALWAYS) return digest; } return toJavaString(value, typeName, type, includeType); } /** * Returns true if {@link RexDigestIncludeType#OPTIONAL} digest would include data type. * * @see RexCall#computeDigest(boolean) * @return true if {@link RexDigestIncludeType#OPTIONAL} digest would include data type */ @RequiresNonNull("type") RexDigestIncludeType digestIncludesType( @UnknownInitialization RexLiteral this ) { return shouldIncludeType(value, type); } /** Returns whether a value is appropriate for its type. (We have rules about * these things!) */ public static boolean valueMatchesType( @Nullable Comparable value, SqlTypeName typeName, boolean strict) { if (value == null) { return true; } switch (typeName) { case BOOLEAN: // Unlike SqlLiteral, we do not allow boolean null. return value instanceof Boolean; case NULL: return false; // value should have been null case INTEGER: // not allowed -- use Decimal case TINYINT: case SMALLINT: if (strict) { throw Util.unexpected(typeName); } // fall through case DECIMAL: case DOUBLE: case FLOAT: case REAL: case BIGINT: return value instanceof BigDecimal; case DATE: return value instanceof DateString; case TIME: return value instanceof TimeString; case TIME_WITH_LOCAL_TIME_ZONE: return value instanceof TimeString; case TIMESTAMP: return value instanceof TimestampString; case TIMESTAMP_WITH_LOCAL_TIME_ZONE: return value instanceof TimestampString; case INTERVAL_YEAR: case INTERVAL_YEAR_MONTH: case INTERVAL_MONTH: case INTERVAL_DAY: case INTERVAL_DAY_HOUR: case INTERVAL_DAY_MINUTE: case INTERVAL_DAY_SECOND: case INTERVAL_HOUR: case INTERVAL_HOUR_MINUTE: case INTERVAL_HOUR_SECOND: case INTERVAL_MINUTE: case INTERVAL_MINUTE_SECOND: case INTERVAL_SECOND: // The value of a DAY-TIME interval (whatever the start and end units, // even say HOUR TO MINUTE) is in milliseconds (perhaps fractional // milliseconds). The value of a YEAR-MONTH interval is in months. return value instanceof BigDecimal; case VARBINARY: // not allowed -- use Binary if (strict) { throw Util.unexpected(typeName); } // fall through case BINARY: return value instanceof ByteString; case VARCHAR: // not allowed -- use Char if (strict) { throw Util.unexpected(typeName); } // fall through case CHAR: // A SqlLiteral's charset and collation are optional; not so a // RexLiteral. return (value instanceof NlsString) && (((NlsString) value).getCharset() != null) && (((NlsString) value).getCollation() != null); case SARG: return value instanceof Sarg; case SYMBOL: return value instanceof Enum; case ROW: case MULTISET: return value instanceof List; case GEOMETRY: return value instanceof Geometries.Geom; case ANY: // Literal of type ANY is not legal. "CAST(2 AS ANY)" remains // an integer literal surrounded by a cast function. return false; default: throw Util.unexpected(typeName); } } /** * Returns the strict literal type for a given type. The rules should keep * sync with what {@link RexBuilder#makeLiteral} defines. */ public static SqlTypeName strictTypeName(RelDataType type) { final SqlTypeName typeName = type.getSqlTypeName(); switch (typeName) { case INTEGER: case TINYINT: case SMALLINT: return SqlTypeName.DECIMAL; case REAL: case FLOAT: return SqlTypeName.DOUBLE; case VARBINARY: return SqlTypeName.BINARY; case VARCHAR: return SqlTypeName.CHAR; default: return typeName; } } private static String toJavaString( @Nullable Comparable value, SqlTypeName typeName, RelDataType type, RexDigestIncludeType includeType) { assert includeType != RexDigestIncludeType.OPTIONAL : "toJavaString must not be called with includeType=OPTIONAL"; if (value == null) { return includeType == RexDigestIncludeType.NO_TYPE ? "null" : "null:" + type.getFullTypeString(); } StringBuilder sb = new StringBuilder(); appendAsJava(value, sb, typeName, type, false, includeType); if (includeType != RexDigestIncludeType.NO_TYPE) { sb.append(':'); final String fullTypeString = type.getFullTypeString(); if (!fullTypeString.endsWith(NON_NULLABLE_SUFFIX)) { sb.append(fullTypeString); } else { // Trim " NOT NULL". Apparently, the literal is not null, so we just print the data type. sb.append(fullTypeString, 0, fullTypeString.length() - NON_NULLABLE_SUFFIX.length()); } } return sb.toString(); } /** * Computes if data type can be omitted from the digset. * <p>For instance, {@code 1:BIGINT} has to keep data type while {@code 1:INT} * should be represented as just {@code 1}. * * <p>Implementation assumption: this method should be fast. In fact might call * {@link NlsString#getValue()} which could decode the string, however we rely on the cache there. * * @see RexLiteral#computeDigest(RexDigestIncludeType) * @param value value of the literal * @param type type of the literal * @return NO_TYPE when type can be omitted, ALWAYS otherwise */ private static RexDigestIncludeType shouldIncludeType(@Nullable Comparable value, RelDataType type) { if (type.isNullable()) { // This means "null literal", so we require a type for it // There might be exceptions like AND(null, true) which are handled by RexCall#computeDigest return RexDigestIncludeType.ALWAYS; } // The variable here simplifies debugging (one can set a breakpoint at return) // final ensures we set the value in all the branches, and it ensures the value is set just once final RexDigestIncludeType includeType; if (type.getSqlTypeName() == SqlTypeName.BOOLEAN || type.getSqlTypeName() == SqlTypeName.INTEGER || type.getSqlTypeName() == SqlTypeName.SYMBOL) { // We don't want false:BOOLEAN NOT NULL, so we don't print type information for // non-nullable BOOLEAN and INTEGER includeType = RexDigestIncludeType.NO_TYPE; } else if (type.getSqlTypeName() == SqlTypeName.CHAR && value instanceof NlsString) { NlsString nlsString = (NlsString) value; // Ignore type information for 'Bar':CHAR(3) if (( (nlsString.getCharset() != null && Objects.equals(type.getCharset(), nlsString.getCharset())) || (nlsString.getCharset() == null && Objects.equals(SqlCollation.IMPLICIT.getCharset(), type.getCharset()))) && Objects.equals(nlsString.getCollation(), type.getCollation()) && ((NlsString) value).getValue().length() == type.getPrecision()) { includeType = RexDigestIncludeType.NO_TYPE; } else { includeType = RexDigestIncludeType.ALWAYS; } } else if (type.getPrecision() == 0 && ( type.getSqlTypeName() == SqlTypeName.TIME || type.getSqlTypeName() == SqlTypeName.TIMESTAMP || type.getSqlTypeName() == SqlTypeName.DATE)) { // Ignore type information for '12:23:20':TIME(0) // Note that '12:23:20':TIME WITH LOCAL TIME ZONE includeType = RexDigestIncludeType.NO_TYPE; } else { includeType = RexDigestIncludeType.ALWAYS; } return includeType; } /** Returns whether a value is valid as a constant value, using the same * criteria as {@link #valueMatchesType}. */ public static boolean validConstant(@Nullable Object o, Litmus litmus) { if (o == null || o instanceof BigDecimal || o instanceof NlsString || o instanceof ByteString || o instanceof Boolean) { return litmus.succeed(); } else if (o instanceof List) { List list = (List) o; for (Object o1 : list) { if (!validConstant(o1, litmus)) { return litmus.fail("not a constant: {}", o1); } } return litmus.succeed(); } else if (o instanceof Map) { @SuppressWarnings("unchecked") final Map<Object, Object> map = (Map) o; for (Map.Entry entry : map.entrySet()) { if (!validConstant(entry.getKey(), litmus)) { return litmus.fail("not a constant: {}", entry.getKey()); } if (!validConstant(entry.getValue(), litmus)) { return litmus.fail("not a constant: {}", entry.getValue()); } } return litmus.succeed(); } else { return litmus.fail("not a constant: {}", o); } } /** Returns a list of the time units covered by an interval type such * as HOUR TO SECOND. Adds MILLISECOND if the end is SECOND, to deal with * fractional seconds. */ private static List<TimeUnit> getTimeUnits(SqlTypeName typeName) { final TimeUnit start = typeName.getStartUnit(); final TimeUnit end = typeName.getEndUnit(); final ImmutableList<TimeUnit> list = TIME_UNITS.subList(start.ordinal(), end.ordinal() + 1); if (end == TimeUnit.SECOND) { return CompositeList.of(list, ImmutableList.of(TimeUnit.MILLISECOND)); } return list; } private String intervalString(BigDecimal v) { final List<TimeUnit> timeUnits = getTimeUnits(type.getSqlTypeName()); final StringBuilder b = new StringBuilder(); for (TimeUnit timeUnit : timeUnits) { final BigDecimal[] result = v.divideAndRemainder(timeUnit.multiplier); if (b.length() > 0) { b.append(timeUnit.separator); } final int width = b.length() == 0 ? -1 : width(timeUnit); // don't pad 1st pad(b, result[0].toString(), width); v = result[1]; } if (Util.last(timeUnits) == TimeUnit.MILLISECOND) { while (b.toString().matches(".*\\.[0-9]*0")) { if (b.toString().endsWith(".0")) { b.setLength(b.length() - 2); // remove ".0" } else { b.setLength(b.length() - 1); // remove "0" } } } return b.toString(); } private static void pad(StringBuilder b, String s, int width) { if (width >= 0) { for (int i = s.length(); i < width; i++) { b.append('0'); } } b.append(s); } private static int width(TimeUnit timeUnit) { switch (timeUnit) { case MILLISECOND: return 3; case HOUR: case MINUTE: case SECOND: return 2; default: return -1; } } /** * Prints the value this literal as a Java string constant. */ public void printAsJava(PrintWriter pw) { Util.asStringBuilder(pw, sb -> appendAsJava(value, sb, typeName, type, true, RexDigestIncludeType.NO_TYPE)); } /** * Appends the specified value in the provided destination as a Java string. The value must be * consistent with the type, as per {@link #valueMatchesType}. * * <p>Typical return values:</p> * * <ul> * <li>true</li> * <li>null</li> * <li>"Hello, world!"</li> * <li>1.25</li> * <li>1234ABCD</li> * </ul> * * @param value Value to be appended to the provided destination as a Java string * @param sb Destination to which to append the specified value * @param typeName Type name to be used for the transformation of the value to a Java string * @param type Type to be used for the transformation of the value to a Java string * @param includeType Whether to include the data type in the Java representation */ private static void appendAsJava(@Nullable Comparable value, StringBuilder sb, SqlTypeName typeName, RelDataType type, boolean java, RexDigestIncludeType includeType) { switch (typeName) { case CHAR: NlsString nlsString = (NlsString) castNonNull(value); if (java) { Util.printJavaString( sb, nlsString.getValue(), true); } else { boolean includeCharset = (nlsString.getCharsetName() != null) && !nlsString.getCharsetName().equals( CalciteSystemProperty.DEFAULT_CHARSET.value()); sb.append(nlsString.asSql(includeCharset, false)); } break; case BOOLEAN: assert value instanceof Boolean; sb.append(value.toString()); break; case DECIMAL: assert value instanceof BigDecimal; sb.append(value.toString()); break; case DOUBLE: assert value instanceof BigDecimal; sb.append(Util.toScientificNotation((BigDecimal) value)); break; case BIGINT: assert value instanceof BigDecimal; long narrowLong = ((BigDecimal) value).longValue(); sb.append(String.valueOf(narrowLong)); sb.append('L'); break; case BINARY: assert value instanceof ByteString; sb.append("X'"); sb.append(((ByteString) value).toString(16)); sb.append("'"); break; case NULL: assert value == null; sb.append("null"); break; case SARG: assert value instanceof Sarg; //noinspection unchecked,rawtypes Util.asStringBuilder(sb, sb2 -> printSarg(sb2, (Sarg) value, type)); break; case SYMBOL: assert value instanceof Enum; sb.append("FLAG("); sb.append(value.toString()); sb.append(")"); break; case DATE: assert value instanceof DateString; sb.append(value.toString()); break; case TIME: case TIME_WITH_LOCAL_TIME_ZONE: assert value instanceof TimeString; sb.append(value.toString()); break; case TIMESTAMP: case TIMESTAMP_WITH_LOCAL_TIME_ZONE: assert value instanceof TimestampString; sb.append(value.toString()); break; case INTERVAL_YEAR: case INTERVAL_YEAR_MONTH: case INTERVAL_MONTH: case INTERVAL_DAY: case INTERVAL_DAY_HOUR: case INTERVAL_DAY_MINUTE: case INTERVAL_DAY_SECOND: case INTERVAL_HOUR: case INTERVAL_HOUR_MINUTE: case INTERVAL_HOUR_SECOND: case INTERVAL_MINUTE: case INTERVAL_MINUTE_SECOND: case INTERVAL_SECOND: assert value instanceof BigDecimal; sb.append(value.toString()); break; case MULTISET: case ROW: assert value instanceof List : "value must implement List: " + value; @SuppressWarnings("unchecked") final List<RexLiteral> list = (List<RexLiteral>) castNonNull(value); Util.asStringBuilder(sb, sb2 -> Util.printList(sb, list.size(), (sb3, i) -> sb3.append(list.get(i).computeDigest(includeType)))); break; case GEOMETRY: final String wkt = GeoFunctions.ST_AsWKT((Geometries.Geom) castNonNull(value)); sb.append(wkt); break; default: assert valueMatchesType(value, typeName, true); throw Util.needToImplement(typeName); } } private static <C extends Comparable<C>> void printSarg(StringBuilder sb, Sarg<C> sarg, RelDataType type) { sarg.printTo(sb, (sb2, value) -> sb2.append(toLiteral(type, value))); } /** Converts a value to a temporary literal, for the purposes of generating a * digest. Literals of type ROW and MULTISET require that their components are * also literals. */ private static RexLiteral toLiteral(RelDataType type, Comparable<?> value) { final SqlTypeName typeName = strictTypeName(type); switch (typeName) { case ROW: assert value instanceof List : "value must implement List: " + value; final List<Comparable<?>> fieldValues = (List) value; final List<RelDataTypeField> fields = type.getFieldList(); final List<RexLiteral> fieldLiterals = FlatLists.of( Functions.generate(fieldValues.size(), i -> toLiteral(fields.get(i).getType(), fieldValues.get(i)))); return new RexLiteral((Comparable) fieldLiterals, type, typeName); case MULTISET: assert value instanceof List : "value must implement List: " + value; final List<Comparable<?>> elementValues = (List) value; final List<RexLiteral> elementLiterals = FlatLists.of( Functions.generate(elementValues.size(), i -> toLiteral(castNonNull(type.getComponentType()), elementValues.get(i)))); return new RexLiteral((Comparable) elementLiterals, type, typeName); default: return new RexLiteral(value, type, typeName); } } /** * Converts a Jdbc string into a RexLiteral. This method accepts a string, * as returned by the Jdbc method ResultSet.getString(), and restores the * string into an equivalent RexLiteral. It allows one to use Jdbc strings * as a common format for data. * * <p>Returns null if and only if {@code literal} is null. * * @param type data type of literal to be read * @param typeName type family of literal * @param literal the (non-SQL encoded) string representation, as returned * by the Jdbc call to return a column as a string * @return a typed RexLiteral, or null */ public static @PolyNull RexLiteral fromJdbcString( RelDataType type, SqlTypeName typeName, @PolyNull String literal) { if (literal == null) { return null; } switch (typeName) { case CHAR: Charset charset = requireNonNull(type.getCharset(), () -> "charset for " + type); SqlCollation collation = type.getCollation(); NlsString str = new NlsString( literal, charset.name(), collation); return new RexLiteral(str, type, typeName); case BOOLEAN: Boolean b = ConversionUtil.toBoolean(literal); return new RexLiteral(b, type, typeName); case DECIMAL: case DOUBLE: BigDecimal d = new BigDecimal(literal); return new RexLiteral(d, type, typeName); case BINARY: byte[] bytes = ConversionUtil.toByteArrayFromString(literal, 16); return new RexLiteral(new ByteString(bytes), type, typeName); case NULL: return new RexLiteral(null, type, typeName); case INTERVAL_DAY: case INTERVAL_DAY_HOUR: case INTERVAL_DAY_MINUTE: case INTERVAL_DAY_SECOND: case INTERVAL_HOUR: case INTERVAL_HOUR_MINUTE: case INTERVAL_HOUR_SECOND: case INTERVAL_MINUTE: case INTERVAL_MINUTE_SECOND: case INTERVAL_SECOND: long millis = SqlParserUtil.intervalToMillis( literal, castNonNull(type.getIntervalQualifier())); return new RexLiteral(BigDecimal.valueOf(millis), type, typeName); case INTERVAL_YEAR: case INTERVAL_YEAR_MONTH: case INTERVAL_MONTH: long months = SqlParserUtil.intervalToMonths( literal, castNonNull(type.getIntervalQualifier())); return new RexLiteral(BigDecimal.valueOf(months), type, typeName); case DATE: case TIME: case TIMESTAMP: String format = getCalendarFormat(typeName); TimeZone tz = DateTimeUtils.UTC_ZONE; final Comparable v; switch (typeName) { case DATE: final Calendar cal = DateTimeUtils.parseDateFormat(literal, new SimpleDateFormat(format, Locale.ROOT), tz); if (cal == null) { throw new AssertionError("fromJdbcString: invalid date/time value '" + literal + "'"); } v = DateString.fromCalendarFields(cal); break; default: // Allow fractional seconds for times and timestamps assert format != null; final DateTimeUtils.PrecisionTime ts = DateTimeUtils.parsePrecisionDateTimeLiteral(literal, new SimpleDateFormat(format, Locale.ROOT), tz, -1); if (ts == null) { throw new AssertionError("fromJdbcString: invalid date/time value '" + literal + "'"); } switch (typeName) { case TIMESTAMP: v = TimestampString.fromCalendarFields(ts.getCalendar()) .withFraction(ts.getFraction()); break; case TIME: v = TimeString.fromCalendarFields(ts.getCalendar()) .withFraction(ts.getFraction()); break; default: throw new AssertionError(); } } return new RexLiteral(v, type, typeName); case SYMBOL: // Symbols are for internal use default: throw new AssertionError("fromJdbcString: unsupported type"); } } private static String getCalendarFormat(SqlTypeName typeName) { switch (typeName) { case DATE: return DateTimeUtils.DATE_FORMAT_STRING; case TIME: return DateTimeUtils.TIME_FORMAT_STRING; case TIMESTAMP: return DateTimeUtils.TIMESTAMP_FORMAT_STRING; default: throw new AssertionError("getCalendarFormat: unknown type"); } } public SqlTypeName getTypeName() { return typeName; } @Override public RelDataType getType() { return type; } @Override public SqlKind getKind() { return SqlKind.LITERAL; } /** * Returns whether this literal's value is null. */ public boolean isNull() { return value == null; } /** * Returns the value of this literal. * * <p>For backwards compatibility, returns DATE. TIME and TIMESTAMP as a * {@link Calendar} value in UTC time zone. */ @Pure public @Nullable Comparable getValue() { assert valueMatchesType(value, typeName, true) : value; if (value == null) { return null; } switch (typeName) { case TIME: case DATE: case TIMESTAMP: return getValueAs(Calendar.class); default: return value; } } /** * Returns the value of this literal, in the form that the calculator * program builder wants it. */ public @Nullable Object getValue2() { if (value == null) { return null; } switch (typeName) { case CHAR: return getValueAs(String.class); case DECIMAL: case TIMESTAMP: case TIMESTAMP_WITH_LOCAL_TIME_ZONE: return getValueAs(Long.class); case DATE: case TIME: case TIME_WITH_LOCAL_TIME_ZONE: return getValueAs(Integer.class); default: return value; } } /** * Returns the value of this literal, in the form that the rex-to-lix * translator wants it. */ public @Nullable Object getValue3() { if (value == null) { return null; } switch (typeName) { case DECIMAL: assert value instanceof BigDecimal; return value; default: return getValue2(); } } /** * Returns the value of this literal, in the form that {@link RexInterpreter} * wants it. */ public @Nullable Comparable getValue4() { if (value == null) { return null; } switch (typeName) { case TIMESTAMP: case TIMESTAMP_WITH_LOCAL_TIME_ZONE: return getValueAs(Long.class); case DATE: case TIME: case TIME_WITH_LOCAL_TIME_ZONE: return getValueAs(Integer.class); default: return value; } } /** Returns the value of this literal as an instance of the specified class. * * <p>The following SQL types allow more than one form: * * <ul> * <li>CHAR as {@link NlsString} or {@link String} * <li>TIME as {@link TimeString}, * {@link Integer} (milliseconds since midnight), * {@link Calendar} (in UTC) * <li>DATE as {@link DateString}, * {@link Integer} (days since 1970-01-01), * {@link Calendar} * <li>TIMESTAMP as {@link TimestampString}, * {@link Long} (milliseconds since 1970-01-01 00:00:00), * {@link Calendar} * <li>DECIMAL as {@link BigDecimal} or {@link Long} * </ul> * * <p>Called with {@code clazz} = {@link Comparable}, returns the value in * its native form. * * @param clazz Desired return type * @param <T> Return type * @return Value of this literal in the desired type */ public <T> @Nullable T getValueAs(Class<T> clazz) { if (value == null || clazz.isInstance(value)) { return clazz.cast(value); } switch (typeName) { case BINARY: if (clazz == byte[].class) { return clazz.cast(((ByteString) value).getBytes()); } break; case CHAR: if (clazz == String.class) { return clazz.cast(((NlsString) value).getValue()); } else if (clazz == Character.class) { return clazz.cast(((NlsString) value).getValue().charAt(0)); } break; case VARCHAR: if (clazz == String.class) { return clazz.cast(((NlsString) value).getValue()); } break; case DECIMAL: if (clazz == Long.class) { return clazz.cast(((BigDecimal) value).unscaledValue().longValue()); } // fall through case BIGINT: case INTEGER: case SMALLINT: case TINYINT: case DOUBLE: case REAL: case FLOAT: if (clazz == Long.class) { return clazz.cast(((BigDecimal) value).longValue()); } else if (clazz == Integer.class) { return clazz.cast(((BigDecimal) value).intValue()); } else if (clazz == Short.class) { return clazz.cast(((BigDecimal) value).shortValue()); } else if (clazz == Byte.class) { return clazz.cast(((BigDecimal) value).byteValue()); } else if (clazz == Double.class) { return clazz.cast(((BigDecimal) value).doubleValue()); } else if (clazz == Float.class) { return clazz.cast(((BigDecimal) value).floatValue()); } break; case DATE: if (clazz == Integer.class) { return clazz.cast(((DateString) value).getDaysSinceEpoch()); } else if (clazz == Calendar.class) { return clazz.cast(((DateString) value).toCalendar()); } break; case TIME: if (clazz == Integer.class) { return clazz.cast(((TimeString) value).getMillisOfDay()); } else if (clazz == Calendar.class) { // Note: Nanos are ignored return clazz.cast(((TimeString) value).toCalendar()); } break; case TIME_WITH_LOCAL_TIME_ZONE: if (clazz == Integer.class) { // Milliseconds since 1970-01-01 00:00:00 return clazz.cast(((TimeString) value).getMillisOfDay()); } break; case TIMESTAMP: if (clazz == Long.class) { // Milliseconds since 1970-01-01 00:00:00 return clazz.cast(((TimestampString) value).getMillisSinceEpoch()); } else if (clazz == Calendar.class) { // Note: Nanos are ignored return clazz.cast(((TimestampString) value).toCalendar()); } break; case TIMESTAMP_WITH_LOCAL_TIME_ZONE: if (clazz == Long.class) { // Milliseconds since 1970-01-01 00:00:00 return clazz.cast(((TimestampString) value).getMillisSinceEpoch()); } else if (clazz == Calendar.class) { // Note: Nanos are ignored return clazz.cast(((TimestampString) value).toCalendar()); } break; case INTERVAL_YEAR: case INTERVAL_YEAR_MONTH: case INTERVAL_MONTH: case INTERVAL_DAY: case INTERVAL_DAY_HOUR: case INTERVAL_DAY_MINUTE: case INTERVAL_DAY_SECOND: case INTERVAL_HOUR: case INTERVAL_HOUR_MINUTE: case INTERVAL_HOUR_SECOND: case INTERVAL_MINUTE: case INTERVAL_MINUTE_SECOND: case INTERVAL_SECOND: if (clazz == Integer.class) { return clazz.cast(((BigDecimal) value).intValue()); } else if (clazz == Long.class) { return clazz.cast(((BigDecimal) value).longValue()); } else if (clazz == String.class) { return clazz.cast(intervalString(castNonNull(getValueAs(BigDecimal.class)).abs())); } else if (clazz == Boolean.class) { // return whether negative return clazz.cast(castNonNull(getValueAs(BigDecimal.class)).signum() < 0); } break; default: break; } throw new AssertionError("cannot convert " + typeName + " literal to " + clazz); } public static boolean booleanValue(RexNode node) { return (Boolean) castNonNull(((RexLiteral) node).value); } @Override public boolean isAlwaysTrue() { if (typeName != SqlTypeName.BOOLEAN) { return false; } return booleanValue(this); } @Override public boolean isAlwaysFalse() { if (typeName != SqlTypeName.BOOLEAN) { return false; } return !booleanValue(this); } @Override public boolean equals(@Nullable Object obj) { if (this == obj) { return true; } return (obj instanceof RexLiteral) && Objects.equals(((RexLiteral) obj).value, value) && Objects.equals(((RexLiteral) obj).type, type); } @Override public int hashCode() { return Objects.hash(value, type); } public static @Nullable Comparable value(RexNode node) { return findValue(node); } public static int intValue(RexNode node) { final Comparable value = castNonNull(findValue(node)); return ((Number) value).intValue(); } public static @Nullable String stringValue(RexNode node) { final Comparable value = findValue(node); return (value == null) ? null : ((NlsString) value).getValue(); } private static @Nullable Comparable findValue(RexNode node) { if (node instanceof RexLiteral) { return ((RexLiteral) node).value; } if (node instanceof RexCall) { final RexCall call = (RexCall) node; final SqlOperator operator = call.getOperator(); if (operator == SqlStdOperatorTable.CAST) { return findValue(call.getOperands().get(0)); } if (operator == SqlStdOperatorTable.UNARY_MINUS) { final BigDecimal value = (BigDecimal) findValue(call.getOperands().get(0)); return requireNonNull(value, () -> "can't negate null in " + node).negate(); } } throw new AssertionError("not a literal: " + node); } public static boolean isNullLiteral(RexNode node) { return (node instanceof RexLiteral) && (((RexLiteral) node).value == null); } @Override public <R> R accept(RexVisitor<R> visitor) { return visitor.visitLiteral(this); } @Override public <R, P> R accept(RexBiVisitor<R, P> visitor, P arg) { return visitor.visitLiteral(this, arg); } }
package com.dbg.manager; import java.util.List; import com.dbg.constants.IAppConstants; import com.dbg.constants.ICommonConstants; import com.dbg.samplegame.R; import com.google.android.gms.ads.AdListener; import com.google.android.gms.ads.AdRequest; import com.google.android.gms.ads.AdSize; import com.google.android.gms.ads.AdView; import com.google.android.gms.ads.InterstitialAd; import com.parse.FindCallback; import com.parse.LogInCallback; import com.parse.ParseException; import com.parse.ParseObject; import com.parse.ParseQuery; import com.parse.ParseUser; import com.parse.SaveCallback; import com.revmob.RevMob; import com.revmob.RevMobAdsListener; import com.revmob.ads.banner.RevMobBanner; import com.revmob.ads.interstitial.RevMobFullscreen; import android.R.bool; import android.app.Activity; import android.media.MediaPlayer; import android.media.MediaPlayer.OnCompletionListener; import android.net.Uri; import android.view.View; import android.view.View.OnClickListener; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.MediaController; import android.widget.RelativeLayout; import android.widget.VideoView; public class AdManager { public static String PARSE_APP_ID="Li0RlkopvS2f58KUqUcyfFtqro0sRpS0GpOF3CP7"; public static String PARSE_CLIENT_KEY="oFIGiYqXvQdU0jpASSnYZMzVdHWAuevOsaXvDflc"; public static String ADMOB_ID="ca-app-pub-8572551537746831/8190297904"; public static String ADMOB_VIDEO_ID="ca-app-pub-8572551537746831/1284846305"; public static String REVMOB_VIDEO_KEY="55db700f0b2cb12b75d8112c"; public static String ParseAdType="AdType"; public static String ParseClickCount="ClickCount"; public static String ParseDisplayCount="DisplayCount"; public static String ParseVideoDisplayCount="VideoDisplayCount"; public static String ParseUsername="Username"; public static String Parsepassword="password"; public static String ParseLoginTable="Login"; public static String ParseAdvertismentTable="Advertisment"; public static int AdMob_TYPE=0; public static int RevMob_TYPE=1; public static int DBGAd_TYPE=2; public static int BANNER=0; public static int VIDEO=1; Activity activity; public int adTypeValue = -1; RelativeLayout.LayoutParams lp1; private LinearLayout linContainer; private RevMobFullscreen video; VideoView videoHolder; RevMobBanner banner; RevMob revmob; private InterstitialAd interstitialAd; RelativeLayout rl; public static float total=0; public static float videoAdVal=0.50f; public static float adDisplayVal=0.10f; public static float adClickVal=0.25f; public AdManager(Activity activity) { revmob = RevMob.start(activity); videoHolder= new VideoView(activity); this.activity=activity; } public AdManager(Activity activity,LinearLayout linContainer,RelativeLayout rl) { this.activity=activity; this.linContainer=linContainer; revmob = RevMob.start(activity); videoHolder= new VideoView(activity); this.rl=rl; videoHolder.setLayoutParams(new LinearLayout.LayoutParams( LinearLayout.LayoutParams.WRAP_CONTENT, LinearLayout.LayoutParams.WRAP_CONTENT)); lp1 = new RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.WRAP_CONTENT); lp1.addRule(RelativeLayout.CENTER_HORIZONTAL); lp1.addRule(RelativeLayout.CENTER_VERTICAL); } private boolean IsAdShow=false; public void ShowAd(final boolean isVideoLoad) { IsAdShow=true; loadVideoAd(0); } public void parseLogin(final boolean isVideoLoad) { if(AppPreferenceManager.getAdSettingType(activity)==0){ try{ ParseUser.logInInBackground("dbg", "dbg", new LogInCallback() { @Override public void done(ParseUser parseUser, ParseException arg1) { if((parseUser!=null)&&(arg1==null)){ int adType = parseUser.getInt(ICommonConstants.ParseAdType); //loadAd(adType); if(isVideoLoad){ loadVideoAd(adType); } } else{ } } }); }catch(Exception e){ } } else{ AppPreferenceManager.increaseAdCount(activity); } } public interface ParseListener { void getAmountListener(float amount); } public static void getParseData(final ParseListener parseListener){ total=0; ParseQuery<ParseObject> query = ParseQuery.getQuery("Advertisment"); query.findInBackground(new FindCallback<ParseObject>() { @Override public void done(List<ParseObject> arg0, ParseException arg1) { if(arg0!=null && arg0.size()>0){ for (int i = 0; i < arg0.size(); i++) { total=total+(arg0.get(i).getInt(ICommonConstants.ParseDisplayCount) *adDisplayVal); System.out.println("VAL 1=="+total); total=total+(arg0.get(i).getInt(ICommonConstants.ParseClickCount) *adClickVal); System.out.println("VAL 2=="+total); total=total+(arg0.get(i).getInt(ICommonConstants.ParseVideoDisplayCount) *videoAdVal); System.out.println("VAL 3=="+total); } parseListener.getAmountListener(total); } } }); } public void updateParseCount(int type,final String col) { ParseQuery<ParseObject> advertisments = ParseQuery.getQuery(ICommonConstants.ParseAdvertismentTable); advertisments.whereEqualTo(ICommonConstants.ParseAdType, type); advertisments.findInBackground(new FindCallback<ParseObject>() { @Override public void done(List<ParseObject> arg0, ParseException arg1) { if ((arg0!=null)&&(arg1==null)) { ParseObject parseObject = arg0.get(0); int displayCount = parseObject.getInt(col); parseObject.put(col, (displayCount + 1)); parseObject.saveInBackground(new SaveCallback() { @Override public void done(ParseException arg0) { // TODO Auto-generated method stub } }); } else{ System.out.println("Err"+ arg1.getMessage().toString()); } } }); } public void loadAd(int adType){ if (adTypeValue != adType) { adTypeValue = adType; switch (adType) { case 0: loadAdMob(); break; case 1: loadRevMob(); break; case 2: loadCustomAd(); break; default: break; } } } public void loadAdMob() { System.out.println("Admob "); AdView mAdView = new AdView(activity); mAdView.setAdUnitId(IAppConstants.ADMOB_ID); mAdView.setAdSize(AdSize.BANNER); AdRequest adRequest = new AdRequest.Builder().build(); mAdView.setAdListener(new AdListener() { @Override public void onAdOpened() { super.onAdOpened(); updateParseCount(ICommonConstants.AdMob, ICommonConstants.ParseClickCount); } @Override public void onAdLoaded() { // TODO Auto-generated method stub super.onAdLoaded(); if(IsAdShow){ IsAdShow=false; AppPreferenceManager.decreaseAdCount(activity); } updateParseCount(ICommonConstants.AdMob, ICommonConstants.ParseDisplayCount); } }); mAdView.loadAd(adRequest); linContainer.removeAllViews(); linContainer.addView(mAdView); } private void loadRevMob() { banner = revmob.createBanner(activity, revmobListener); activity.runOnUiThread(new Runnable() { @Override public void run() { linContainer.removeAllViews(); linContainer.addView(banner); } }); } RevMobAdsListener revmobListener = new RevMobAdsListener(){ @Override public void onRevMobSessionIsStarted() { video = revmob.createVideo(activity, revmobListener);; } @Override public void onRevMobAdClicked() { updateParseCount(ICommonConstants.RevMob, ICommonConstants.ParseClickCount); } @Override public void onRevMobAdDisplayed() { // TODO Auto-generated method stub super.onRevMobAdDisplayed(); if(IsAdShow){ IsAdShow=false; AppPreferenceManager.decreaseAdCount(activity); } updateParseCount(ICommonConstants.RevMob, ICommonConstants.ParseDisplayCount); } @Override public void onRevMobAdDismissed() { // TODO Auto-generated method stub super.onRevMobAdDismissed(); } @Override public void onRevMobRewardedVideoLoaded() { // TODO Auto-generated method stub super.onRevMobRewardedVideoLoaded(); } public void onRevMobVideoLoaded(){ video.showVideo(); if(IsAdShow){ IsAdShow=false; AppPreferenceManager.decreaseAdCount(activity); } updateParseCount(ICommonConstants.RevMob, ICommonConstants.ParseVideoDisplayCount); } }; private void loadCustomAd() { linContainer.removeAllViews(); ImageView customAd=new ImageView(activity); customAd.setBackgroundResource(R.drawable.ad); customAd.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { updateParseCount(ICommonConstants.DBGAd, ICommonConstants.ParseClickCount); } }); linContainer.addView(customAd); if(IsAdShow){ IsAdShow=false; AppPreferenceManager.decreaseAdCount(activity); } updateParseCount(ICommonConstants.DBGAd, ICommonConstants.ParseDisplayCount); } private void loadVideoAd(int adType){ switch (adType) { case 0: loadAdMobVideo(); break; case 1: video = revmob.createVideo(activity, revmobListener);; break; case 2: if(IsAdShow){ IsAdShow=false; AppPreferenceManager.decreaseAdCount(activity); } updateParseCount(ICommonConstants.DBGAd, ICommonConstants.ParseVideoDisplayCount); if(videoHolder!=null){ rl.addView(videoHolder,lp1); videoHolder.setVisibility(View.VISIBLE); } MediaController controller=new MediaController(activity); Uri video = Uri.parse("android.resource://" + activity.getPackageName() + "/" + R.raw.sample); videoHolder.setVideoURI(video); videoHolder.start(); videoHolder.setOnCompletionListener(new OnCompletionListener() { @Override public void onCompletion(MediaPlayer arg0) { if(videoHolder!=null){ videoHolder.setVisibility(View.INVISIBLE); rl.removeView(videoHolder); } } }); break; default: break; } } public void loadAdMobVideo() { interstitialAd=new InterstitialAd(activity); interstitialAd.setAdUnitId(IAppConstants.ADMOB_VIDEO_ID); interstitialAd.setAdListener(new AdListener() { @Override public void onAdLoaded() { // TODO Auto-generated method stub super.onAdLoaded(); if(interstitialAd.isLoaded()){ interstitialAd.show(); } if(IsAdShow){ IsAdShow=false; AppPreferenceManager.decreaseAdCount(activity); } updateParseCount(ICommonConstants.AdMob, ICommonConstants.ParseVideoDisplayCount); } }); AdRequest adRequest = new AdRequest.Builder().build(); interstitialAd.loadAd(adRequest); } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.opentides.bean; import java.io.Serializable; import java.util.ArrayList; import java.util.Date; import java.util.List; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EntityListeners; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Lob; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Transient; import org.opentides.bean.user.BaseUser; import org.opentides.persistence.listener.AuditLogListener; import org.opentides.util.CrudUtil; /** * This class is responsible for handling all audit functions needed to be * attached to the classes. * * @author allantan */ @Entity @EntityListeners({ AuditLogListener.class }) @Table(name = "HISTORY_LOG") public class AuditLog implements Serializable, Searchable { /** * Auto-generated class UID. */ private static final long serialVersionUID = 269168041517643087L; /** * Primary key. Annotation is transfered to getter method to allow * overridding from subclass. */ @Id @GeneratedValue(strategy = GenerationType.AUTO) @Column(name = "ID") private Long id; /** * Create date. */ @Column(name = "CREATEDATE") @Temporal(TemporalType.TIMESTAMP) private Date createDate; /** * Last update date. */ @Column(name = "UPDATEDATE") @Temporal(TemporalType.TIMESTAMP) private Date updateDate; /** * Primary key of object being tracked. */ @Column(name = "ENTITY_ID", nullable = false, updatable = false) private Long entityId; /** * Class type of object being tracked. */ @SuppressWarnings({ "rawtypes" }) @Column(name = "ENTITY_CLASS", nullable = false, updatable = false) private Class entityClass; /** * Arbitrary reference to object being tracked. * Use this attribute to store single reference string to different * classes that are interrelated. */ @Column(name = "REFERENCE") private String reference; /** * Message about the actions done. */ @Lob @Column(name = "MESSAGE", nullable = false, updatable = false) private String message; @Column(name = "FRIENDLY_MESSAGE", nullable = false, updatable = false) private String friendlyMessage; /** * User who performed the change. */ @Column(name = "USER_ID", nullable = false, updatable = false) private Long userId; /** * Owner of this object. */ @Column(name = "OWNER") private String owner; /** * Office that owns this object. * In most cases, this is office of the owner. */ @Column(name = "OWNER_OFFICE") private String ownerOffice; /** * Temporary reference to object being tracked. * Used by AuditLogListener when loading audit log object. */ @Transient private transient Object object; /** * Temporary reference to used who made the change. * Used by AuditLogListener when loading audit log object. */ @Transient private transient BaseUser user; @Transient private transient Date startDate; @Transient private transient Date endDate; @Transient private transient String logAction; /** * Default constructor. */ public AuditLog(){ } /** * Standard constructor. * * @param message message to log. If blank, message is automatically generated. * @param entityId id of object being tracked. * @param entityClass class name of object being tracked. * @param reference reference for group query. * @param userId user id of who made the change. * @param owner username of who made the change. * @param ownerOffice group of user who made the change. */ @SuppressWarnings({ "rawtypes" }) public AuditLog(final String message, final Long entityId, final Class entityClass, final String reference, final Long userId, final String owner, final String ownerOffice) { this.message = message; this.entityId = entityId; this.entityClass = entityClass; this.reference = reference; this.userId = userId; this.setCreateDate(new Date()); this.setOwner(owner); this.setOwnerOffice(ownerOffice); } /** * * @param friendlyMessage * @param message * @param entityId * @param entityClass * @param reference * @param userId * @param owner * @param ownerOffice */ @SuppressWarnings("rawtypes") public AuditLog(final String friendlyMessage, final String message, final Long entityId, final Class entityClass, final String reference, final Long userId, final String owner, final String ownerOffice){ this.friendlyMessage = friendlyMessage; this.message = message; this.entityId = entityId; this.entityClass = entityClass; this.reference = reference; this.userId = userId; this.setCreateDate(new Date()); this.setOwner(owner); this.setOwnerOffice(ownerOffice); } public List<String> getSearchProperties() { List<String> fields = new ArrayList<String>(); fields.add("userId"); fields.add("entityClass"); fields.add("entityId"); fields.add("reference"); fields.add("ownerOffice"); fields.add("updateDate"); return fields; } /** * Getter method for id. * * @return the id */ public final Long getId() { return id; } /** * Setter method for id. * * @param id the id to set */ public final void setId(Long id) { this.id = id; } /** * Getter method for createDate. * * @return the createDate */ public final Date getCreateDate() { return createDate; } /** * Setter method for createDate. * * @param createDate the createDate to set */ public final void setCreateDate(Date createDate) { this.createDate = createDate; } /** * Getter method for updateDate. * * @return the updateDate */ public final Date getUpdateDate() { return updateDate; } /** * Setter method for updateDate. * * @param updateDate the updateDate to set */ public final void setUpdateDate(Date updateDate) { this.updateDate = updateDate; } /** * Getter method for entityId. * * @return the entityId */ public final Long getEntityId() { return entityId; } /** * Setter method for entityId. * * @param entityId the entityId to set */ public final void setEntityId(Long entityId) { this.entityId = entityId; } /** * Getter to retrieve name of entity class. * @return */ public final String getEntityName() { return CrudUtil.getReadableName(entityClass); } /** * Getter method for entityClass. * * @return the entityClass */ @SuppressWarnings("rawtypes") public final Class getEntityClass() { return entityClass; } /** * Setter method for entityClass. * * @param entityClass the entityClass to set */ @SuppressWarnings("rawtypes") public final void setEntityClass(Class entityClass) { this.entityClass = entityClass; } /** * Getter method for reference. * * @return the reference */ public final String getReference() { return reference; } /** * Setter method for reference. * * @param reference the reference to set */ public final void setReference(String reference) { this.reference = reference; } /** * Getter method for message. * * @return the message */ public final String getMessage() { return message; } /** * Setter method for message. * * @param message the message to set */ public final void setMessage(String message) { this.message = message; } /** * Getter method for friendlyMessage. * * @return the friendlyMessage */ public final String getFriendlyMessage() { return friendlyMessage; } /** * Setter method for friendlyMessage. * * @param friendlyMessage the friendlyMessage to set */ public final void setFriendlyMessage(String friendlyMessage) { this.friendlyMessage = friendlyMessage; } /** * Getter method for userId. * * @return the userId */ public final Long getUserId() { return userId; } /** * Setter method for userId. * * @param userId the userId to set */ public final void setUserId(Long userId) { this.userId = userId; } /** * Getter method for owner. * * @return the owner */ public final String getOwner() { return owner; } /** * Setter method for owner. * * @param owner the owner to set */ public final void setOwner(String owner) { this.owner = owner; } /** * Getter method for ownerOffice. * * @return the ownerOffice */ public final String getOwnerOffice() { return ownerOffice; } /** * Setter method for ownerOffice. * * @param ownerOffice the ownerOffice to set */ public final void setOwnerOffice(String ownerOffice) { this.ownerOffice = ownerOffice; } /** * Getter method for object. * * @return the object */ public final Object getObject() { return object; } /** * Setter method for object. * * @param object the object to set */ public final void setObject(Object object) { this.object = object; } /** * Getter method for user. * * @return the user */ public final BaseUser getUser() { return user; } /** * Setter method for user. * * @param user the user to set */ public final void setUser(BaseUser user) { this.user = user; } /** * Getter method for startDate. * * @return the startDate */ public final Date getStartDate() { return startDate; } /** * Setter method for startDate. * * @param startDate the startDate to set */ public final void setStartDate(Date startDate) { this.startDate = startDate; } /** * Getter method for endDate. * * @return the endDate */ public final Date getEndDate() { return endDate; } /** * Setter method for endDate. * * @param endDate the endDate to set */ public final void setEndDate(Date endDate) { this.endDate = endDate; } /** * Getter method for logAction. * * @return the logAction */ public final String getLogAction() { return logAction; } /** * Setter method for logAction. * * @param logAction the logAction to set */ public final void setLogAction(String logAction) { this.logAction = logAction; } }
/** * Copyright 2011-2017 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.dmdl.directio.text; import static com.asakusafw.dmdl.directio.text.TextFormatConstants.*; import java.nio.charset.Charset; import java.util.Map; import java.util.Optional; import com.asakusafw.dmdl.directio.util.ClassName; import com.asakusafw.dmdl.directio.util.CodecNames; import com.asakusafw.dmdl.directio.util.Value; import com.asakusafw.dmdl.model.AstAttribute; import com.asakusafw.dmdl.model.AstAttributeElement; import com.asakusafw.dmdl.semantics.DmdlSemantics; import com.asakusafw.runtime.io.text.LineSeparator; import com.asakusafw.runtime.io.text.driver.ErrorAction; import com.asakusafw.runtime.io.text.driver.HeaderType; /** * Settings of formatted text file. * @since 0.9.1 */ public class TextFormatSettings { private Value<Charset> charset = Value.undefined(); private Value<HeaderType> headerType = Value.undefined(); private Value<ClassName> compressionType = Value.undefined(); private Value<LineSeparator> lineSeparator = Value.undefined(); private Value<Character> fieldSeparator = Value.undefined(); private Value<ErrorAction> lessInputAction = Value.undefined(); private Value<ErrorAction> moreInputAction = Value.undefined(); private Value<ClassName> inputTransformerClass = Value.undefined(); private Value<ClassName> outputTransformerClass = Value.undefined(); /** * Returns the charset name. * @return the charset name */ public Value<Charset> getCharset() { return charset; } /** * Returns the header type. * @return the header type */ public Value<HeaderType> getHeaderType() { return headerType; } /** * Returns the compression type. * @return the compression type */ public Value<ClassName> getCompressionType() { return compressionType; } /** * Returns the line separator. * @return the line separator */ public Value<LineSeparator> getLineSeparator() { return lineSeparator; } /** * Returns the field separator. * @return the field separator */ public Value<Character> getFieldSeparator() { return fieldSeparator; } /** * Returns the error action type for less input fields. * @return the error action type for less input fields */ public Value<ErrorAction> getLessInputAction() { return lessInputAction; } /** * Returns the error action type for extra input fields. * @return the error action type for extra input fields */ public Value<ErrorAction> getMoreInputAction() { return moreInputAction; } /** * Returns the input transformer class. * @return the input transformer class */ public Value<ClassName> getInputTransformerClass() { return inputTransformerClass; } /** * Returns the output transformer class. * @return the output transformer class */ public Value<ClassName> getOutputTransformerClass() { return outputTransformerClass; } /** * Consumes attribute elements about escape settings, and returns corresponding {@link EscapeSettings}. * @param environment the current environment * @param attribute the attribute * @param elements the element map to be consumed * @return corresponded {@link EscapeSettings}. */ public static TextFormatSettings consume( DmdlSemantics environment, AstAttribute attribute, Map<String, AstAttributeElement> elements) { AttributeAnalyzer analyzer = new AttributeAnalyzer(environment, attribute); TextFormatSettings settings = new TextFormatSettings(); consumeCharset(settings, analyzer, elements.remove(ELEMENT_CHARSET_NAME)); consumeHeaderType(settings, analyzer, elements.remove(ELEMENT_HEADER_TYPE)); consumeCompressionType(settings, analyzer, elements.remove(ELEMENT_COMPRESSION_TYPE)); consumeLineSeparator(settings, analyzer, elements.remove(ELEMENT_LINE_SEPARATOR)); consumeFieldSeparator(settings, analyzer, elements.remove(ELEMENT_FIELD_SEPARATOR)); consumeLessInputAction(settings, analyzer, elements.remove(ELEMENT_LESS_INPUT_ACTION)); consumeMoreInputAction(settings, analyzer, elements.remove(ELEMENT_MORE_INPUT_ACTION)); consumeInputTransformerClass(settings, analyzer, elements.remove(ELEMENT_INPUT_TRANSFORMER)); consumeOutputTransformerClass(settings, analyzer, elements.remove(ELEMENT_OUTPUT_TRANSFORMER)); return settings; } private static void consumeCharset( TextFormatSettings settings, AttributeAnalyzer analyzer, AstAttributeElement element) { if (element != null) { settings.charset = analyzer.toCharset(element); } } private static void consumeHeaderType( TextFormatSettings settings, AttributeAnalyzer analyzer, AstAttributeElement element) { if (element != null) { settings.headerType = analyzer.toEnumConstant(element, HeaderType.class); } } private static void consumeCompressionType( TextFormatSettings settings, AttributeAnalyzer analyzer, AstAttributeElement element) { if (element != null) { settings.compressionType = analyzer.toClassName( element, s -> Optional.of(CodecNames.resolveCodecName(s))); } } private static void consumeLineSeparator( TextFormatSettings settings, AttributeAnalyzer analyzer, AstAttributeElement element) { if (element != null) { settings.lineSeparator = analyzer.toEnumConstant(element, LineSeparator.class); } } private static void consumeFieldSeparator( TextFormatSettings settings, AttributeAnalyzer analyzer, AstAttributeElement element) { if (element != null) { settings.fieldSeparator = analyzer.toCharacter(element); } } private static void consumeLessInputAction( TextFormatSettings settings, AttributeAnalyzer analyzer, AstAttributeElement element) { if (element != null) { settings.lessInputAction = analyzer.toEnumConstant(element, ErrorAction.class); } } private static void consumeMoreInputAction( TextFormatSettings settings, AttributeAnalyzer analyzer, AstAttributeElement element) { if (element != null) { settings.moreInputAction = analyzer.toEnumConstant(element, ErrorAction.class); } } private static void consumeInputTransformerClass( TextFormatSettings settings, AttributeAnalyzer analyzer, AstAttributeElement element) { if (element != null) { settings.inputTransformerClass = analyzer.toClassName(element); } } private static void consumeOutputTransformerClass( TextFormatSettings settings, AttributeAnalyzer analyzer, AstAttributeElement element) { if (element != null) { settings.outputTransformerClass = analyzer.toClassName(element); } } /** * Verifies this settings. * @param environment the current environment * @param attribute the original attribute * @return {@code true} if the settings seems valid, otherwise {@code false} */ public boolean verify(DmdlSemantics environment, AstAttribute attribute) { AttributeAnalyzer analyzer = new AttributeAnalyzer(environment, attribute); if (fieldSeparator.isPresent()) { char c = fieldSeparator.getEntity(); if (c == '\r' || c == '\n') { analyzer.error(fieldSeparator.getDeclaration(), Messages.getString("TextFormatSettings.diagnosticConflictLineSeparator")); //$NON-NLS-1$ } } return analyzer.hasError() == false; } }
package fr.adrienbrault.idea.symfony2plugin.profiler.widget; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.impl.SimpleDataContext; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.fileEditor.FileEditorManagerEvent; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.ListPopup; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.openapi.wm.StatusBarWidget; import com.intellij.openapi.wm.impl.status.EditorBasedWidget; import com.intellij.ui.popup.PopupFactoryImpl; import com.intellij.util.Consumer; import fr.adrienbrault.idea.symfony2plugin.profiler.ProfilerIndexInterface; import fr.adrienbrault.idea.symfony2plugin.profiler.collector.DefaultDataCollectorInterface; import fr.adrienbrault.idea.symfony2plugin.profiler.dict.ProfilerRequestInterface; import fr.adrienbrault.idea.symfony2plugin.profiler.factory.ProfilerFactoryUtil; import fr.adrienbrault.idea.symfony2plugin.profiler.widget.action.SymfonyProfilerWidgetActions; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.awt.event.MouseEvent; import java.util.*; /** * @author Daniel Espendiller <daniel@espendiller.net> */ public class SymfonyProfilerWidget extends EditorBasedWidget implements StatusBarWidget.MultipleTextValuesPresentation, StatusBarWidget.Multiframe { public static String ID = "symfony2.profiler"; public SymfonyProfilerWidget(@NotNull Project project) { super(project); } @Override public StatusBarWidget copy() { return new SymfonyProfilerWidget(getProject()); } private enum ProfilerTarget { TEMPLATE, ROUTE, CONTROLLER } //constructs the actions for the widget popup public DefaultActionGroup getActions(){ DefaultActionGroup actionGroup = new DefaultActionGroup(null, false); ProfilerIndexInterface index = ProfilerFactoryUtil.createIndex(getProject()); if(index == null) { return actionGroup; } List<ProfilerRequestInterface> requests = index.getRequests(); Collection<AnAction> templateActions = new ArrayList<>(); Map<String, Integer> templateActionsMap = new HashMap<>(); Collection<AnAction> routeActions = new ArrayList<>(); Map<String, Integer> routeActionsMap = new HashMap<>(); Collection<AnAction> controllerActions = new ArrayList<>(); Map<String, Integer> controllerActionsMap = new HashMap<>(); Collection<AnAction> urlActions = new ArrayList<>(); Collection<AnAction> mailActions = new ArrayList<>(); for(ProfilerRequestInterface profilerRequest : requests) { urlActions.add(new SymfonyProfilerWidgetActions.UrlAction(index, profilerRequest)); DefaultDataCollectorInterface collector = profilerRequest.getCollector(DefaultDataCollectorInterface.class); if(collector != null) { attachProfileItem(templateActions, templateActionsMap, collector.getTemplate(), ProfilerTarget.TEMPLATE); attachProfileItem(routeActions, routeActionsMap, collector.getRoute(), ProfilerTarget.ROUTE); attachProfileItem(controllerActions, controllerActionsMap, collector.getController(), ProfilerTarget.CONTROLLER); } // @TODO: use collector //String content = profilerRequest.getContent(); //if(content != null && content.contains("Swift_Mime_Headers_MailboxHeader")) { // mailActions.add(new SymfonyProfilerWidgetActions.UrlAction(getProject(), profilerRequest, statusCode).withPanel("swiftmailer").withIcon(Symfony2Icons.MAIL)); //} } // routes if(urlActions.size() > 0) { actionGroup.addSeparator("Debug-Url"); actionGroup.addAll(urlActions); } // mails send by request if(mailActions.size() > 0) { actionGroup.addSeparator("E-Mail"); actionGroup.addAll(mailActions); } // routes if(routeActions.size() > 0) { actionGroup.addSeparator("Routes"); actionGroup.addAll(routeActions); } // controller methods if(controllerActions.size() > 0) { actionGroup.addSeparator("Controller"); actionGroup.addAll(controllerActions); } // template should be most use case; so keep it in cursor range if(templateActions.size() > 0) { actionGroup.addSeparator("Template"); actionGroup.addAll(templateActions); } return actionGroup; } private void attachProfileItem(Collection<AnAction> controllerActions, Map<String, Integer> controllerActionsMap, @Nullable String collectString, ProfilerTarget profilerTarget) { if(collectString == null) { return; } if(controllerActionsMap.containsKey(collectString)) { controllerActionsMap.put(collectString, controllerActionsMap.get(collectString)); } else { controllerActionsMap.put(collectString, 0); if(profilerTarget == ProfilerTarget.CONTROLLER) { controllerActions.add(new SymfonyProfilerWidgetActions.MethodAction(getProject(), collectString)); } if(profilerTarget == ProfilerTarget.ROUTE) { controllerActions.add(new SymfonyProfilerWidgetActions.RouteAction(getProject(), collectString)); } if(profilerTarget == ProfilerTarget.TEMPLATE) { controllerActions.add(new SymfonyProfilerWidgetActions.TemplateAction(getProject(), collectString)); } } } @Nullable @Override public ListPopup getPopupStep() { if (isDisposed()) { return null; } ActionGroup popupGroup = getActions(); DataContext dataContext = SimpleDataContext.builder() .add(CommonDataKeys.PROJECT, getProject()) .add(PlatformDataKeys.CONTEXT_COMPONENT, IdeFocusManager.getInstance(getProject()).getFocusOwner()) .build(); return new PopupFactoryImpl.ActionGroupPopup( "Symfony Profiler", popupGroup, dataContext, false, false, false, true, null, -1, null, null ); } @Nullable @Override public String getSelectedValue() { return "Symfony"; } @NotNull @Override public String getMaxValue() { return ""; } @NotNull @Override public String ID() { return ID; } @Nullable @Override public WidgetPresentation getPresentation(@NotNull PlatformType platformType) { return this; } @Nullable @Override public String getTooltipText() { return "Symfony Profiler"; } @Nullable @Override public Consumer<MouseEvent> getClickConsumer() { return null; } @Override public void selectionChanged(@NotNull FileEditorManagerEvent event) { update(event.getManager().getProject()); } @Override public void fileOpened(@NotNull FileEditorManager source, @NotNull VirtualFile file) { update(source.getProject()); } @Override public void fileClosed(@NotNull FileEditorManager source, @NotNull VirtualFile file) { update(source.getProject()); } public void update(final Project project) { ApplicationManager.getApplication().invokeLater(() -> { if ((getProject() == null) || getProject().isDisposed()) { return; } if (!isDisposed() && myStatusBar != null) { myStatusBar.updateWidget(ID()); } }); } }
/* Generated SBE (Simple Binary Encoding) message codec */ package uk.co.real_logic.sbe.ir.generated; import org.agrona.MutableDirectBuffer; import org.agrona.DirectBuffer; @javax.annotation.Generated(value = { "uk.co.real_logic.sbe.generation.java.JavaGenerator" }) @SuppressWarnings("all") public class FrameCodecEncoder { public static final int BLOCK_LENGTH = 12; public static final int TEMPLATE_ID = 1; public static final int SCHEMA_ID = 1; public static final int SCHEMA_VERSION = 0; private final FrameCodecEncoder parentMessage = this; private MutableDirectBuffer buffer; protected int offset; protected int limit; public int sbeBlockLength() { return BLOCK_LENGTH; } public int sbeTemplateId() { return TEMPLATE_ID; } public int sbeSchemaId() { return SCHEMA_ID; } public int sbeSchemaVersion() { return SCHEMA_VERSION; } public String sbeSemanticType() { return ""; } public MutableDirectBuffer buffer() { return buffer; } public int offset() { return offset; } public FrameCodecEncoder wrap(final MutableDirectBuffer buffer, final int offset) { this.buffer = buffer; this.offset = offset; limit(offset + BLOCK_LENGTH); return this; } public FrameCodecEncoder wrapAndApplyHeader( final MutableDirectBuffer buffer, final int offset, final MessageHeaderEncoder headerEncoder) { headerEncoder .wrap(buffer, offset) .blockLength(BLOCK_LENGTH) .templateId(TEMPLATE_ID) .schemaId(SCHEMA_ID) .version(SCHEMA_VERSION); return wrap(buffer, offset + MessageHeaderEncoder.ENCODED_LENGTH); } public int encodedLength() { return limit - offset; } public int limit() { return limit; } public void limit(final int limit) { this.limit = limit; } public static int irIdEncodingOffset() { return 0; } public static int irIdEncodingLength() { return 4; } public static int irIdNullValue() { return -2147483648; } public static int irIdMinValue() { return -2147483647; } public static int irIdMaxValue() { return 2147483647; } public FrameCodecEncoder irId(final int value) { buffer.putInt(offset + 0, value, java.nio.ByteOrder.LITTLE_ENDIAN); return this; } public static int irVersionEncodingOffset() { return 4; } public static int irVersionEncodingLength() { return 4; } public static int irVersionNullValue() { return -2147483648; } public static int irVersionMinValue() { return -2147483647; } public static int irVersionMaxValue() { return 2147483647; } public FrameCodecEncoder irVersion(final int value) { buffer.putInt(offset + 4, value, java.nio.ByteOrder.LITTLE_ENDIAN); return this; } public static int schemaVersionEncodingOffset() { return 8; } public static int schemaVersionEncodingLength() { return 4; } public static int schemaVersionNullValue() { return -2147483648; } public static int schemaVersionMinValue() { return -2147483647; } public static int schemaVersionMaxValue() { return 2147483647; } public FrameCodecEncoder schemaVersion(final int value) { buffer.putInt(offset + 8, value, java.nio.ByteOrder.LITTLE_ENDIAN); return this; } public static int packageNameId() { return 4; } public static String packageNameCharacterEncoding() { return "UTF-8"; } public static String packageNameMetaAttribute(final MetaAttribute metaAttribute) { switch (metaAttribute) { case EPOCH: return "unix"; case TIME_UNIT: return "nanosecond"; case SEMANTIC_TYPE: return ""; } return ""; } public static int packageNameHeaderLength() { return 2; } public FrameCodecEncoder putPackageName(final DirectBuffer src, final int srcOffset, final int length) { if (length > 65534) { throw new IllegalArgumentException("length > max value for type: " + length); } final int headerLength = 2; final int limit = parentMessage.limit(); parentMessage.limit(limit + headerLength + length); buffer.putShort(limit, (short)length, java.nio.ByteOrder.LITTLE_ENDIAN); buffer.putBytes(limit + headerLength, src, srcOffset, length); return this; } public FrameCodecEncoder putPackageName(final byte[] src, final int srcOffset, final int length) { if (length > 65534) { throw new IllegalArgumentException("length > max value for type: " + length); } final int headerLength = 2; final int limit = parentMessage.limit(); parentMessage.limit(limit + headerLength + length); buffer.putShort(limit, (short)length, java.nio.ByteOrder.LITTLE_ENDIAN); buffer.putBytes(limit + headerLength, src, srcOffset, length); return this; } public FrameCodecEncoder packageName(final String value) { final byte[] bytes; try { bytes = value.getBytes("UTF-8"); } catch (final java.io.UnsupportedEncodingException ex) { throw new RuntimeException(ex); } final int length = bytes.length; if (length > 65534) { throw new IllegalArgumentException("length > max value for type: " + length); } final int headerLength = 2; final int limit = parentMessage.limit(); parentMessage.limit(limit + headerLength + length); buffer.putShort(limit, (short)length, java.nio.ByteOrder.LITTLE_ENDIAN); buffer.putBytes(limit + headerLength, bytes, 0, length); return this; } public static int namespaceNameId() { return 5; } public static String namespaceNameCharacterEncoding() { return "UTF-8"; } public static String namespaceNameMetaAttribute(final MetaAttribute metaAttribute) { switch (metaAttribute) { case EPOCH: return "unix"; case TIME_UNIT: return "nanosecond"; case SEMANTIC_TYPE: return ""; } return ""; } public static int namespaceNameHeaderLength() { return 2; } public FrameCodecEncoder putNamespaceName(final DirectBuffer src, final int srcOffset, final int length) { if (length > 65534) { throw new IllegalArgumentException("length > max value for type: " + length); } final int headerLength = 2; final int limit = parentMessage.limit(); parentMessage.limit(limit + headerLength + length); buffer.putShort(limit, (short)length, java.nio.ByteOrder.LITTLE_ENDIAN); buffer.putBytes(limit + headerLength, src, srcOffset, length); return this; } public FrameCodecEncoder putNamespaceName(final byte[] src, final int srcOffset, final int length) { if (length > 65534) { throw new IllegalArgumentException("length > max value for type: " + length); } final int headerLength = 2; final int limit = parentMessage.limit(); parentMessage.limit(limit + headerLength + length); buffer.putShort(limit, (short)length, java.nio.ByteOrder.LITTLE_ENDIAN); buffer.putBytes(limit + headerLength, src, srcOffset, length); return this; } public FrameCodecEncoder namespaceName(final String value) { final byte[] bytes; try { bytes = value.getBytes("UTF-8"); } catch (final java.io.UnsupportedEncodingException ex) { throw new RuntimeException(ex); } final int length = bytes.length; if (length > 65534) { throw new IllegalArgumentException("length > max value for type: " + length); } final int headerLength = 2; final int limit = parentMessage.limit(); parentMessage.limit(limit + headerLength + length); buffer.putShort(limit, (short)length, java.nio.ByteOrder.LITTLE_ENDIAN); buffer.putBytes(limit + headerLength, bytes, 0, length); return this; } public static int semanticVersionId() { return 6; } public static String semanticVersionCharacterEncoding() { return "UTF-8"; } public static String semanticVersionMetaAttribute(final MetaAttribute metaAttribute) { switch (metaAttribute) { case EPOCH: return "unix"; case TIME_UNIT: return "nanosecond"; case SEMANTIC_TYPE: return ""; } return ""; } public static int semanticVersionHeaderLength() { return 2; } public FrameCodecEncoder putSemanticVersion(final DirectBuffer src, final int srcOffset, final int length) { if (length > 65534) { throw new IllegalArgumentException("length > max value for type: " + length); } final int headerLength = 2; final int limit = parentMessage.limit(); parentMessage.limit(limit + headerLength + length); buffer.putShort(limit, (short)length, java.nio.ByteOrder.LITTLE_ENDIAN); buffer.putBytes(limit + headerLength, src, srcOffset, length); return this; } public FrameCodecEncoder putSemanticVersion(final byte[] src, final int srcOffset, final int length) { if (length > 65534) { throw new IllegalArgumentException("length > max value for type: " + length); } final int headerLength = 2; final int limit = parentMessage.limit(); parentMessage.limit(limit + headerLength + length); buffer.putShort(limit, (short)length, java.nio.ByteOrder.LITTLE_ENDIAN); buffer.putBytes(limit + headerLength, src, srcOffset, length); return this; } public FrameCodecEncoder semanticVersion(final String value) { final byte[] bytes; try { bytes = value.getBytes("UTF-8"); } catch (final java.io.UnsupportedEncodingException ex) { throw new RuntimeException(ex); } final int length = bytes.length; if (length > 65534) { throw new IllegalArgumentException("length > max value for type: " + length); } final int headerLength = 2; final int limit = parentMessage.limit(); parentMessage.limit(limit + headerLength + length); buffer.putShort(limit, (short)length, java.nio.ByteOrder.LITTLE_ENDIAN); buffer.putBytes(limit + headerLength, bytes, 0, length); return this; } public String toString() { return appendTo(new StringBuilder(100)).toString(); } public StringBuilder appendTo(final StringBuilder builder) { FrameCodecDecoder writer = new FrameCodecDecoder(); writer.wrap(buffer, offset, BLOCK_LENGTH, SCHEMA_VERSION); return writer.appendTo(builder); } }
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.master.historicaltimeseries; import java.io.Serializable; import java.util.Map; import java.util.Set; import java.util.TreeSet; import org.joda.beans.Bean; import org.joda.beans.BeanBuilder; import org.joda.beans.BeanDefinition; import org.joda.beans.JodaBeanUtils; import org.joda.beans.MetaProperty; import org.joda.beans.Property; import org.joda.beans.PropertyDefinition; import org.joda.beans.impl.direct.DirectBean; import org.joda.beans.impl.direct.DirectBeanBuilder; import org.joda.beans.impl.direct.DirectMetaBean; import org.joda.beans.impl.direct.DirectMetaProperty; import org.joda.beans.impl.direct.DirectMetaPropertyMap; import com.opengamma.core.historicaltimeseries.HistoricalTimeSeriesInfo; import com.opengamma.id.ExternalIdBundleWithDates; import com.opengamma.id.MutableUniqueIdentifiable; import com.opengamma.id.ObjectId; import com.opengamma.id.UniqueId; import com.opengamma.util.PublicSPI; import com.opengamma.util.auth.Permissionable; /** * The information about a historical time-series. * <p> * This is used to hold the information about a time-series in the master. The actual time-series is held separately. * <p> * This class is mutable and not thread-safe. */ @PublicSPI @BeanDefinition public class ManageableHistoricalTimeSeriesInfo extends DirectBean implements HistoricalTimeSeriesInfo, MutableUniqueIdentifiable, Serializable, Permissionable { /** Serialization version. */ private static final long serialVersionUID = 1L; /** * The historical time-series unique identifier. This field is managed by the master but must be set for updates. */ @PropertyDefinition private UniqueId _uniqueId; /** * The external identifier bundle with valid dates if available. The key of the specific series, such as the equity identifiers. */ @PropertyDefinition private ExternalIdBundleWithDates _externalIdBundle; /** * The name of the historical time-series intended for display purposes. This field must not be null for the object to be valid. */ @PropertyDefinition private String _name; /** * The data field. This defines the type of data that the series represents. */ @PropertyDefinition private String _dataField; /** * The data source. The source of the data, typically a major financial data supplier. */ @PropertyDefinition private String _dataSource; /** * The data provider. The underlying data provider, such as an individual exchange. */ @PropertyDefinition private String _dataProvider; /** * The descriptive observation time key. This defines, textually, the time of day, such as LONDON_CLOSE. */ @PropertyDefinition private String _observationTime; /** * The object identifier of the historical time-series data points. This must be separately looked up using the master. */ @PropertyDefinition private ObjectId _timeSeriesObjectId; /** * The set of required permissions. * This is a set of permissions that a user needs to be able to view a time-series. */ @PropertyDefinition(validate = "notNull") private final Set<String> _requiredPermissions = new TreeSet<>(); /** * Creates an instance. */ public ManageableHistoricalTimeSeriesInfo() { } //------------------------- AUTOGENERATED START ------------------------- ///CLOVER:OFF /** * The meta-bean for {@code ManageableHistoricalTimeSeriesInfo}. * @return the meta-bean, not null */ public static ManageableHistoricalTimeSeriesInfo.Meta meta() { return ManageableHistoricalTimeSeriesInfo.Meta.INSTANCE; } static { JodaBeanUtils.registerMetaBean(ManageableHistoricalTimeSeriesInfo.Meta.INSTANCE); } @Override public ManageableHistoricalTimeSeriesInfo.Meta metaBean() { return ManageableHistoricalTimeSeriesInfo.Meta.INSTANCE; } //----------------------------------------------------------------------- /** * Gets the historical time-series unique identifier. This field is managed by the master but must be set for updates. * @return the value of the property */ public UniqueId getUniqueId() { return _uniqueId; } /** * Sets the historical time-series unique identifier. This field is managed by the master but must be set for updates. * @param uniqueId the new value of the property */ public void setUniqueId(UniqueId uniqueId) { this._uniqueId = uniqueId; } /** * Gets the the {@code uniqueId} property. * @return the property, not null */ public final Property<UniqueId> uniqueId() { return metaBean().uniqueId().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the external identifier bundle with valid dates if available. The key of the specific series, such as the equity identifiers. * @return the value of the property */ public ExternalIdBundleWithDates getExternalIdBundle() { return _externalIdBundle; } /** * Sets the external identifier bundle with valid dates if available. The key of the specific series, such as the equity identifiers. * @param externalIdBundle the new value of the property */ public void setExternalIdBundle(ExternalIdBundleWithDates externalIdBundle) { this._externalIdBundle = externalIdBundle; } /** * Gets the the {@code externalIdBundle} property. * @return the property, not null */ public final Property<ExternalIdBundleWithDates> externalIdBundle() { return metaBean().externalIdBundle().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the name of the historical time-series intended for display purposes. This field must not be null for the object to be valid. * @return the value of the property */ public String getName() { return _name; } /** * Sets the name of the historical time-series intended for display purposes. This field must not be null for the object to be valid. * @param name the new value of the property */ public void setName(String name) { this._name = name; } /** * Gets the the {@code name} property. * @return the property, not null */ public final Property<String> name() { return metaBean().name().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the data field. This defines the type of data that the series represents. * @return the value of the property */ public String getDataField() { return _dataField; } /** * Sets the data field. This defines the type of data that the series represents. * @param dataField the new value of the property */ public void setDataField(String dataField) { this._dataField = dataField; } /** * Gets the the {@code dataField} property. * @return the property, not null */ public final Property<String> dataField() { return metaBean().dataField().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the data source. The source of the data, typically a major financial data supplier. * @return the value of the property */ public String getDataSource() { return _dataSource; } /** * Sets the data source. The source of the data, typically a major financial data supplier. * @param dataSource the new value of the property */ public void setDataSource(String dataSource) { this._dataSource = dataSource; } /** * Gets the the {@code dataSource} property. * @return the property, not null */ public final Property<String> dataSource() { return metaBean().dataSource().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the data provider. The underlying data provider, such as an individual exchange. * @return the value of the property */ public String getDataProvider() { return _dataProvider; } /** * Sets the data provider. The underlying data provider, such as an individual exchange. * @param dataProvider the new value of the property */ public void setDataProvider(String dataProvider) { this._dataProvider = dataProvider; } /** * Gets the the {@code dataProvider} property. * @return the property, not null */ public final Property<String> dataProvider() { return metaBean().dataProvider().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the descriptive observation time key. This defines, textually, the time of day, such as LONDON_CLOSE. * @return the value of the property */ public String getObservationTime() { return _observationTime; } /** * Sets the descriptive observation time key. This defines, textually, the time of day, such as LONDON_CLOSE. * @param observationTime the new value of the property */ public void setObservationTime(String observationTime) { this._observationTime = observationTime; } /** * Gets the the {@code observationTime} property. * @return the property, not null */ public final Property<String> observationTime() { return metaBean().observationTime().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the object identifier of the historical time-series data points. This must be separately looked up using the master. * @return the value of the property */ public ObjectId getTimeSeriesObjectId() { return _timeSeriesObjectId; } /** * Sets the object identifier of the historical time-series data points. This must be separately looked up using the master. * @param timeSeriesObjectId the new value of the property */ public void setTimeSeriesObjectId(ObjectId timeSeriesObjectId) { this._timeSeriesObjectId = timeSeriesObjectId; } /** * Gets the the {@code timeSeriesObjectId} property. * @return the property, not null */ public final Property<ObjectId> timeSeriesObjectId() { return metaBean().timeSeriesObjectId().createProperty(this); } //----------------------------------------------------------------------- /** * Gets the set of required permissions. * This is a set of permissions that a user needs to be able to view a time-series. * @return the value of the property, not null */ public Set<String> getRequiredPermissions() { return _requiredPermissions; } /** * Sets the set of required permissions. * This is a set of permissions that a user needs to be able to view a time-series. * @param requiredPermissions the new value of the property, not null */ public void setRequiredPermissions(Set<String> requiredPermissions) { JodaBeanUtils.notNull(requiredPermissions, "requiredPermissions"); this._requiredPermissions.clear(); this._requiredPermissions.addAll(requiredPermissions); } /** * Gets the the {@code requiredPermissions} property. * This is a set of permissions that a user needs to be able to view a time-series. * @return the property, not null */ public final Property<Set<String>> requiredPermissions() { return metaBean().requiredPermissions().createProperty(this); } //----------------------------------------------------------------------- @Override public ManageableHistoricalTimeSeriesInfo clone() { return JodaBeanUtils.cloneAlways(this); } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj != null && obj.getClass() == this.getClass()) { ManageableHistoricalTimeSeriesInfo other = (ManageableHistoricalTimeSeriesInfo) obj; return JodaBeanUtils.equal(getUniqueId(), other.getUniqueId()) && JodaBeanUtils.equal(getExternalIdBundle(), other.getExternalIdBundle()) && JodaBeanUtils.equal(getName(), other.getName()) && JodaBeanUtils.equal(getDataField(), other.getDataField()) && JodaBeanUtils.equal(getDataSource(), other.getDataSource()) && JodaBeanUtils.equal(getDataProvider(), other.getDataProvider()) && JodaBeanUtils.equal(getObservationTime(), other.getObservationTime()) && JodaBeanUtils.equal(getTimeSeriesObjectId(), other.getTimeSeriesObjectId()) && JodaBeanUtils.equal(getRequiredPermissions(), other.getRequiredPermissions()); } return false; } @Override public int hashCode() { int hash = getClass().hashCode(); hash = hash * 31 + JodaBeanUtils.hashCode(getUniqueId()); hash = hash * 31 + JodaBeanUtils.hashCode(getExternalIdBundle()); hash = hash * 31 + JodaBeanUtils.hashCode(getName()); hash = hash * 31 + JodaBeanUtils.hashCode(getDataField()); hash = hash * 31 + JodaBeanUtils.hashCode(getDataSource()); hash = hash * 31 + JodaBeanUtils.hashCode(getDataProvider()); hash = hash * 31 + JodaBeanUtils.hashCode(getObservationTime()); hash = hash * 31 + JodaBeanUtils.hashCode(getTimeSeriesObjectId()); hash = hash * 31 + JodaBeanUtils.hashCode(getRequiredPermissions()); return hash; } @Override public String toString() { StringBuilder buf = new StringBuilder(320); buf.append("ManageableHistoricalTimeSeriesInfo{"); int len = buf.length(); toString(buf); if (buf.length() > len) { buf.setLength(buf.length() - 2); } buf.append('}'); return buf.toString(); } protected void toString(StringBuilder buf) { buf.append("uniqueId").append('=').append(JodaBeanUtils.toString(getUniqueId())).append(',').append(' '); buf.append("externalIdBundle").append('=').append(JodaBeanUtils.toString(getExternalIdBundle())).append(',').append(' '); buf.append("name").append('=').append(JodaBeanUtils.toString(getName())).append(',').append(' '); buf.append("dataField").append('=').append(JodaBeanUtils.toString(getDataField())).append(',').append(' '); buf.append("dataSource").append('=').append(JodaBeanUtils.toString(getDataSource())).append(',').append(' '); buf.append("dataProvider").append('=').append(JodaBeanUtils.toString(getDataProvider())).append(',').append(' '); buf.append("observationTime").append('=').append(JodaBeanUtils.toString(getObservationTime())).append(',').append(' '); buf.append("timeSeriesObjectId").append('=').append(JodaBeanUtils.toString(getTimeSeriesObjectId())).append(',').append(' '); buf.append("requiredPermissions").append('=').append(JodaBeanUtils.toString(getRequiredPermissions())).append(',').append(' '); } //----------------------------------------------------------------------- /** * The meta-bean for {@code ManageableHistoricalTimeSeriesInfo}. */ public static class Meta extends DirectMetaBean { /** * The singleton instance of the meta-bean. */ static final Meta INSTANCE = new Meta(); /** * The meta-property for the {@code uniqueId} property. */ private final MetaProperty<UniqueId> _uniqueId = DirectMetaProperty.ofReadWrite( this, "uniqueId", ManageableHistoricalTimeSeriesInfo.class, UniqueId.class); /** * The meta-property for the {@code externalIdBundle} property. */ private final MetaProperty<ExternalIdBundleWithDates> _externalIdBundle = DirectMetaProperty.ofReadWrite( this, "externalIdBundle", ManageableHistoricalTimeSeriesInfo.class, ExternalIdBundleWithDates.class); /** * The meta-property for the {@code name} property. */ private final MetaProperty<String> _name = DirectMetaProperty.ofReadWrite( this, "name", ManageableHistoricalTimeSeriesInfo.class, String.class); /** * The meta-property for the {@code dataField} property. */ private final MetaProperty<String> _dataField = DirectMetaProperty.ofReadWrite( this, "dataField", ManageableHistoricalTimeSeriesInfo.class, String.class); /** * The meta-property for the {@code dataSource} property. */ private final MetaProperty<String> _dataSource = DirectMetaProperty.ofReadWrite( this, "dataSource", ManageableHistoricalTimeSeriesInfo.class, String.class); /** * The meta-property for the {@code dataProvider} property. */ private final MetaProperty<String> _dataProvider = DirectMetaProperty.ofReadWrite( this, "dataProvider", ManageableHistoricalTimeSeriesInfo.class, String.class); /** * The meta-property for the {@code observationTime} property. */ private final MetaProperty<String> _observationTime = DirectMetaProperty.ofReadWrite( this, "observationTime", ManageableHistoricalTimeSeriesInfo.class, String.class); /** * The meta-property for the {@code timeSeriesObjectId} property. */ private final MetaProperty<ObjectId> _timeSeriesObjectId = DirectMetaProperty.ofReadWrite( this, "timeSeriesObjectId", ManageableHistoricalTimeSeriesInfo.class, ObjectId.class); /** * The meta-property for the {@code requiredPermissions} property. */ @SuppressWarnings({"unchecked", "rawtypes" }) private final MetaProperty<Set<String>> _requiredPermissions = DirectMetaProperty.ofReadWrite( this, "requiredPermissions", ManageableHistoricalTimeSeriesInfo.class, (Class) Set.class); /** * The meta-properties. */ private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap( this, null, "uniqueId", "externalIdBundle", "name", "dataField", "dataSource", "dataProvider", "observationTime", "timeSeriesObjectId", "requiredPermissions"); /** * Restricted constructor. */ protected Meta() { } @Override protected MetaProperty<?> metaPropertyGet(String propertyName) { switch (propertyName.hashCode()) { case -294460212: // uniqueId return _uniqueId; case -736922008: // externalIdBundle return _externalIdBundle; case 3373707: // name return _name; case -386794640: // dataField return _dataField; case 1272470629: // dataSource return _dataSource; case 339742651: // dataProvider return _dataProvider; case 951232793: // observationTime return _observationTime; case 2129430654: // timeSeriesObjectId return _timeSeriesObjectId; case 132663141: // requiredPermissions return _requiredPermissions; } return super.metaPropertyGet(propertyName); } @Override public BeanBuilder<? extends ManageableHistoricalTimeSeriesInfo> builder() { return new DirectBeanBuilder<ManageableHistoricalTimeSeriesInfo>(new ManageableHistoricalTimeSeriesInfo()); } @Override public Class<? extends ManageableHistoricalTimeSeriesInfo> beanType() { return ManageableHistoricalTimeSeriesInfo.class; } @Override public Map<String, MetaProperty<?>> metaPropertyMap() { return _metaPropertyMap$; } //----------------------------------------------------------------------- /** * The meta-property for the {@code uniqueId} property. * @return the meta-property, not null */ public final MetaProperty<UniqueId> uniqueId() { return _uniqueId; } /** * The meta-property for the {@code externalIdBundle} property. * @return the meta-property, not null */ public final MetaProperty<ExternalIdBundleWithDates> externalIdBundle() { return _externalIdBundle; } /** * The meta-property for the {@code name} property. * @return the meta-property, not null */ public final MetaProperty<String> name() { return _name; } /** * The meta-property for the {@code dataField} property. * @return the meta-property, not null */ public final MetaProperty<String> dataField() { return _dataField; } /** * The meta-property for the {@code dataSource} property. * @return the meta-property, not null */ public final MetaProperty<String> dataSource() { return _dataSource; } /** * The meta-property for the {@code dataProvider} property. * @return the meta-property, not null */ public final MetaProperty<String> dataProvider() { return _dataProvider; } /** * The meta-property for the {@code observationTime} property. * @return the meta-property, not null */ public final MetaProperty<String> observationTime() { return _observationTime; } /** * The meta-property for the {@code timeSeriesObjectId} property. * @return the meta-property, not null */ public final MetaProperty<ObjectId> timeSeriesObjectId() { return _timeSeriesObjectId; } /** * The meta-property for the {@code requiredPermissions} property. * @return the meta-property, not null */ public final MetaProperty<Set<String>> requiredPermissions() { return _requiredPermissions; } //----------------------------------------------------------------------- @Override protected Object propertyGet(Bean bean, String propertyName, boolean quiet) { switch (propertyName.hashCode()) { case -294460212: // uniqueId return ((ManageableHistoricalTimeSeriesInfo) bean).getUniqueId(); case -736922008: // externalIdBundle return ((ManageableHistoricalTimeSeriesInfo) bean).getExternalIdBundle(); case 3373707: // name return ((ManageableHistoricalTimeSeriesInfo) bean).getName(); case -386794640: // dataField return ((ManageableHistoricalTimeSeriesInfo) bean).getDataField(); case 1272470629: // dataSource return ((ManageableHistoricalTimeSeriesInfo) bean).getDataSource(); case 339742651: // dataProvider return ((ManageableHistoricalTimeSeriesInfo) bean).getDataProvider(); case 951232793: // observationTime return ((ManageableHistoricalTimeSeriesInfo) bean).getObservationTime(); case 2129430654: // timeSeriesObjectId return ((ManageableHistoricalTimeSeriesInfo) bean).getTimeSeriesObjectId(); case 132663141: // requiredPermissions return ((ManageableHistoricalTimeSeriesInfo) bean).getRequiredPermissions(); } return super.propertyGet(bean, propertyName, quiet); } @SuppressWarnings("unchecked") @Override protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) { switch (propertyName.hashCode()) { case -294460212: // uniqueId ((ManageableHistoricalTimeSeriesInfo) bean).setUniqueId((UniqueId) newValue); return; case -736922008: // externalIdBundle ((ManageableHistoricalTimeSeriesInfo) bean).setExternalIdBundle((ExternalIdBundleWithDates) newValue); return; case 3373707: // name ((ManageableHistoricalTimeSeriesInfo) bean).setName((String) newValue); return; case -386794640: // dataField ((ManageableHistoricalTimeSeriesInfo) bean).setDataField((String) newValue); return; case 1272470629: // dataSource ((ManageableHistoricalTimeSeriesInfo) bean).setDataSource((String) newValue); return; case 339742651: // dataProvider ((ManageableHistoricalTimeSeriesInfo) bean).setDataProvider((String) newValue); return; case 951232793: // observationTime ((ManageableHistoricalTimeSeriesInfo) bean).setObservationTime((String) newValue); return; case 2129430654: // timeSeriesObjectId ((ManageableHistoricalTimeSeriesInfo) bean).setTimeSeriesObjectId((ObjectId) newValue); return; case 132663141: // requiredPermissions ((ManageableHistoricalTimeSeriesInfo) bean).setRequiredPermissions((Set<String>) newValue); return; } super.propertySet(bean, propertyName, newValue, quiet); } @Override protected void validate(Bean bean) { JodaBeanUtils.notNull(((ManageableHistoricalTimeSeriesInfo) bean)._requiredPermissions, "requiredPermissions"); } } ///CLOVER:ON //-------------------------- AUTOGENERATED END -------------------------- }
package org.apache.continuum.release.config; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.maven.model.Scm; import org.apache.maven.shared.release.config.PropertiesReleaseDescriptorStore; import org.apache.maven.shared.release.config.ReleaseDescriptor; import org.apache.maven.shared.release.config.ReleaseDescriptorStoreException; import org.apache.maven.shared.release.config.ReleaseUtils; import org.codehaus.plexus.util.IOUtil; import org.eclipse.jetty.util.security.Password; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; public class ContinuumPropertiesReleaseDescriptorStore extends PropertiesReleaseDescriptorStore { public ReleaseDescriptor read( ReleaseDescriptor mergeDescriptor, File file ) throws ReleaseDescriptorStoreException { Properties properties = new Properties(); InputStream inStream = null; try { inStream = new FileInputStream( file ); properties.load( inStream ); } catch ( FileNotFoundException e ) { getLogger().debug( file.getName() + " not found - using empty properties" ); } catch ( IOException e ) { throw new ReleaseDescriptorStoreException( "Error reading properties file '" + file.getName() + "': " + e.getMessage(), e ); } finally { IOUtil.close( inStream ); } ContinuumReleaseDescriptor releaseDescriptor = new ContinuumReleaseDescriptor(); releaseDescriptor.setCompletedPhase( properties.getProperty( "completedPhase" ) ); releaseDescriptor.setScmSourceUrl( properties.getProperty( "scm.url" ) ); releaseDescriptor.setScmUsername( properties.getProperty( "scm.username" ) ); String password = properties.getProperty( "scm.password" ); if ( password != null && password.startsWith( "OBF:" ) ) { releaseDescriptor.setScmPassword( Password.deobfuscate( password ) ); } else { releaseDescriptor.setScmPassword( password ); } releaseDescriptor.setScmPrivateKey( properties.getProperty( "scm.privateKey" ) ); releaseDescriptor.setScmPrivateKeyPassPhrase( properties.getProperty( "scm.passphrase" ) ); releaseDescriptor.setScmTagBase( properties.getProperty( "scm.tagBase" ) ); releaseDescriptor.setScmReleaseLabel( properties.getProperty( "scm.tag" ) ); releaseDescriptor.setScmCommentPrefix( properties.getProperty( "scm.commentPrefix" ) ); releaseDescriptor.setAdditionalArguments( properties.getProperty( "exec.additionalArguments" ) ); releaseDescriptor.setPomFileName( properties.getProperty( "exec.pomFileName" ) ); releaseDescriptor.setPreparationGoals( properties.getProperty( "preparationGoals" ) ); releaseDescriptor.setExecutable( properties.getProperty( "build.executable" ) ); releaseDescriptor.setReleaseBy( properties.getProperty( "release.by" ) ); loadResolvedDependencies( properties, releaseDescriptor ); // boolean properties are not written to the properties file because the value from the caller is always used for ( Object o : properties.keySet() ) { String property = (String) o; if ( property.startsWith( "project.rel." ) ) { releaseDescriptor.mapReleaseVersion( property.substring( "project.rel.".length() ), properties.getProperty( property ) ); } else if ( property.startsWith( "project.dev." ) ) { releaseDescriptor.mapDevelopmentVersion( property.substring( "project.dev.".length() ), properties.getProperty( property ) ); } else if ( property.startsWith( "project.scm." ) ) { int index = property.lastIndexOf( '.' ); if ( index > "project.scm.".length() ) { String key = property.substring( "project.scm.".length(), index ); if ( !releaseDescriptor.getOriginalScmInfo().containsKey( key ) ) { if ( properties.getProperty( "project.scm." + key + ".empty" ) != null ) { releaseDescriptor.mapOriginalScmInfo( key, null ); } else { Scm scm = new Scm(); scm.setConnection( properties.getProperty( "project.scm." + key + ".connection" ) ); scm.setDeveloperConnection( properties.getProperty( "project.scm." + key + ".developerConnection" ) ); scm.setUrl( properties.getProperty( "project.scm." + key + ".url" ) ); scm.setTag( properties.getProperty( "project.scm." + key + ".tag" ) ); releaseDescriptor.mapOriginalScmInfo( key, scm ); } } } } else if ( property.startsWith( "build.env." ) ) { releaseDescriptor.mapEnvironments( property.substring( "build.env.".length() ), properties.getProperty( property ) ); } } if ( mergeDescriptor != null ) { releaseDescriptor = (ContinuumReleaseDescriptor) ReleaseUtils.merge( releaseDescriptor, mergeDescriptor ); releaseDescriptor.setEnvironments( ( (ContinuumReleaseDescriptor) mergeDescriptor ).getEnvironments() ); } return releaseDescriptor; } public void write( ReleaseDescriptor configFile, File file ) throws ReleaseDescriptorStoreException { ContinuumReleaseDescriptor config = (ContinuumReleaseDescriptor) configFile; Properties properties = new Properties(); properties.setProperty( "completedPhase", config.getCompletedPhase() ); properties.setProperty( "scm.url", config.getScmSourceUrl() ); if ( config.getScmUsername() != null ) { properties.setProperty( "scm.username", config.getScmUsername() ); } if ( config.getScmPassword() != null ) { // obfuscate password properties.setProperty( "scm.password", Password.obfuscate( config.getScmPassword() ) ); } if ( config.getScmPrivateKey() != null ) { properties.setProperty( "scm.privateKey", config.getScmPrivateKey() ); } if ( config.getScmPrivateKeyPassPhrase() != null ) { properties.setProperty( "scm.passphrase", config.getScmPrivateKeyPassPhrase() ); } if ( config.getScmTagBase() != null ) { properties.setProperty( "scm.tagBase", config.getScmTagBase() ); } if ( config.getScmReleaseLabel() != null ) { properties.setProperty( "scm.tag", config.getScmReleaseLabel() ); } if ( config.getScmCommentPrefix() != null ) { properties.setProperty( "scm.commentPrefix", config.getScmCommentPrefix() ); } if ( config.getAdditionalArguments() != null ) { properties.setProperty( "exec.additionalArguments", config.getAdditionalArguments() ); } if ( config.getPomFileName() != null ) { properties.setProperty( "exec.pomFileName", config.getPomFileName() ); } if ( config.getPreparationGoals() != null ) { properties.setProperty( "preparationGoals", config.getPreparationGoals() ); } // boolean properties are not written to the properties file because the value from the caller is always used for ( Object o : config.getReleaseVersions().entrySet() ) { Entry entry = (Entry) o; properties.setProperty( "project.rel." + entry.getKey(), (String) entry.getValue() ); } for ( Object o : config.getDevelopmentVersions().entrySet() ) { Entry entry = (Entry) o; properties.setProperty( "project.dev." + entry.getKey(), (String) entry.getValue() ); } for ( Object o : config.getOriginalScmInfo().entrySet() ) { Entry entry = (Entry) o; Scm scm = (Scm) entry.getValue(); String prefix = "project.scm." + entry.getKey(); if ( scm != null ) { if ( scm.getConnection() != null ) { properties.setProperty( prefix + ".connection", scm.getConnection() ); } if ( scm.getDeveloperConnection() != null ) { properties.setProperty( prefix + ".developerConnection", scm.getDeveloperConnection() ); } if ( scm.getUrl() != null ) { properties.setProperty( prefix + ".url", scm.getUrl() ); } if ( scm.getTag() != null ) { properties.setProperty( prefix + ".tag", scm.getTag() ); } } else { properties.setProperty( prefix + ".empty", "true" ); } } for ( Object o : config.getEnvironments().entrySet() ) { Entry entry = (Entry) o; properties.setProperty( "build.env." + entry.getKey(), (String) entry.getValue() ); } if ( ( config.getResolvedSnapshotDependencies() != null ) && ( config.getResolvedSnapshotDependencies().size() > 0 ) ) { processResolvedDependencies( properties, config.getResolvedSnapshotDependencies() ); } // executables if ( config.getExecutable() != null ) { properties.setProperty( "build.executable", config.getExecutable() ); } // release by if ( config.getReleaseBy() != null ) { properties.setProperty( "release.by", config.getReleaseBy() ); } OutputStream outStream = null; //noinspection OverlyBroadCatchBlock try { outStream = new FileOutputStream( file ); properties.store( outStream, "release configuration" ); } catch ( IOException e ) { throw new ReleaseDescriptorStoreException( "Error writing properties file '" + file.getName() + "': " + e.getMessage(), e ); } finally { IOUtil.close( outStream ); } } private void processResolvedDependencies( Properties prop, Map resolvedDependencies ) { Set entries = resolvedDependencies.entrySet(); Iterator iterator = entries.iterator(); Entry currentEntry; while ( iterator.hasNext() ) { currentEntry = (Entry) iterator.next(); Map versionMap = (Map) currentEntry.getValue(); prop.setProperty( "dependency." + currentEntry.getKey() + ".release", (String) versionMap.get( ReleaseDescriptor.RELEASE_KEY ) ); prop.setProperty( "dependency." + currentEntry.getKey() + ".development", (String) versionMap.get( ReleaseDescriptor.DEVELOPMENT_KEY ) ); } } private void loadResolvedDependencies( Properties prop, ReleaseDescriptor descriptor ) { Map<String, Map<String, Object>> resolvedDependencies = new HashMap<String, Map<String, Object>>(); Set entries = prop.entrySet(); Iterator iterator = entries.iterator(); String propertyName; Entry currentEntry; while ( iterator.hasNext() ) { currentEntry = (Entry) iterator.next(); propertyName = (String) currentEntry.getKey(); if ( propertyName.startsWith( "dependency." ) ) { Map<String, Object> versionMap; String artifactVersionlessKey; int startIndex; int endIndex; String versionType; startIndex = propertyName.lastIndexOf( "dependency." ); if ( propertyName.indexOf( ".development" ) != -1 ) { endIndex = propertyName.indexOf( ".development" ); versionType = ReleaseDescriptor.DEVELOPMENT_KEY; } else { endIndex = propertyName.indexOf( ".release" ); versionType = ReleaseDescriptor.RELEASE_KEY; } artifactVersionlessKey = propertyName.substring( startIndex, endIndex ); if ( resolvedDependencies.containsKey( artifactVersionlessKey ) ) { versionMap = resolvedDependencies.get( artifactVersionlessKey ); } else { versionMap = new HashMap<String, Object>(); resolvedDependencies.put( artifactVersionlessKey, versionMap ); } versionMap.put( versionType, currentEntry.getValue() ); } } descriptor.setResolvedSnapshotDependencies( resolvedDependencies ); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.core.rollup.job; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import java.io.IOException; import java.time.ZoneId; import java.util.Collections; import java.util.HashMap; import java.util.Map; import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomDateHistogramGroupConfig; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class DateHistogramGroupConfigSerializingTests extends AbstractSerializingTestCase<DateHistogramGroupConfig> { private enum DateHistoType { LEGACY, FIXED, CALENDAR } private static DateHistoType type; @Override protected DateHistogramGroupConfig doParseInstance(final XContentParser parser) throws IOException { return DateHistogramGroupConfig.fromXContent(parser); } @Override protected Writeable.Reader<DateHistogramGroupConfig> instanceReader() { if (type.equals(DateHistoType.FIXED)) { return DateHistogramGroupConfig.FixedInterval::new; } else if (type.equals(DateHistoType.CALENDAR)) { return DateHistogramGroupConfig.CalendarInterval::new; } return DateHistogramGroupConfig::new; } @Override protected DateHistogramGroupConfig createTestInstance() { DateHistogramGroupConfig config = randomDateHistogramGroupConfig(random()); if (config.getClass().equals(DateHistogramGroupConfig.FixedInterval.class)) { type = DateHistoType.FIXED; } else if (config.getClass().equals(DateHistogramGroupConfig.CalendarInterval.class)) { type = DateHistoType.CALENDAR; } else { type = DateHistoType.LEGACY; } return config; } public void testValidateNoMapping() { ActionRequestValidationException e = new ActionRequestValidationException(); Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>(); DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field", new DateHistogramInterval("1d"), null, null); config.validateMappings(responseMap, e); assertThat(e.validationErrors().get(0), equalTo("Could not find one of [date,date_nanos] fields with name [my_field] in " + "any of the indices matching the index pattern.")); } public void testValidateNomatchingField() { ActionRequestValidationException e = new ActionRequestValidationException(); Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>(); // Have to mock fieldcaps because the ctor's aren't public... FieldCapabilities fieldCaps = mock(FieldCapabilities.class); responseMap.put("some_other_field", Collections.singletonMap("date", fieldCaps)); DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field", new DateHistogramInterval("1d"), null, null); config.validateMappings(responseMap, e); assertThat(e.validationErrors().get(0), equalTo("Could not find one of [date,date_nanos] fields with name [my_field] in " + "any of the indices matching the index pattern.")); } public void testValidateFieldWrongType() { ActionRequestValidationException e = new ActionRequestValidationException(); Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>(); // Have to mock fieldcaps because the ctor's aren't public... FieldCapabilities fieldCaps = mock(FieldCapabilities.class); responseMap.put("my_field", Collections.singletonMap("keyword", fieldCaps)); DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field", new DateHistogramInterval("1d"), null, null); config.validateMappings(responseMap, e); assertThat(e.validationErrors().get(0), equalTo("The field referenced by a date_histo group must be one of type " + "[date,date_nanos] across all indices in the index pattern. Found: [keyword] for field [my_field]")); } public void testValidateFieldMixtureTypes() { ActionRequestValidationException e = new ActionRequestValidationException(); Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>(); // Have to mock fieldcaps because the ctor's aren't public... FieldCapabilities fieldCaps = mock(FieldCapabilities.class); Map<String, FieldCapabilities> types = new HashMap<>(2); types.put("date", fieldCaps); types.put("keyword", fieldCaps); responseMap.put("my_field", types); DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field", new DateHistogramInterval("1d"), null, null); config.validateMappings(responseMap, e); assertThat(e.validationErrors().get(0), equalTo("The field referenced by a date_histo group must be one of type " + "[date,date_nanos] across all indices in the index pattern. Found: [date, keyword] for field [my_field]")); } public void testValidateFieldMatchingNotAggregatable() { ActionRequestValidationException e = new ActionRequestValidationException(); Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>(); // Have to mock fieldcaps because the ctor's aren't public... FieldCapabilities fieldCaps = mock(FieldCapabilities.class); when(fieldCaps.isAggregatable()).thenReturn(false); responseMap.put("my_field", Collections.singletonMap("date", fieldCaps)); DateHistogramGroupConfig config =new DateHistogramGroupConfig.CalendarInterval("my_field", new DateHistogramInterval("1d"), null, null); config.validateMappings(responseMap, e); assertThat(e.validationErrors().get(0), equalTo("The field [my_field] must be aggregatable across all indices, but is not.")); } public void testValidateMatchingField() { ActionRequestValidationException e = new ActionRequestValidationException(); Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>(); // Have to mock fieldcaps because the ctor's aren't public... FieldCapabilities fieldCaps = mock(FieldCapabilities.class); when(fieldCaps.isAggregatable()).thenReturn(true); responseMap.put("my_field", Collections.singletonMap("date", fieldCaps)); DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field", new DateHistogramInterval("1d"), null, null); config.validateMappings(responseMap, e); assertThat(e.validationErrors().size(), equalTo(0)); } public void testValidateWeek() { ActionRequestValidationException e = new ActionRequestValidationException(); Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>(); // Have to mock fieldcaps because the ctor's aren't public... FieldCapabilities fieldCaps = mock(FieldCapabilities.class); when(fieldCaps.isAggregatable()).thenReturn(true); responseMap.put("my_field", Collections.singletonMap("date", fieldCaps)); DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field", new DateHistogramInterval("1w"), null, null); config.validateMappings(responseMap, e); assertThat(e.validationErrors().size(), equalTo(0)); } /** * Tests that a DateHistogramGroupConfig can be serialized/deserialized correctly after * the timezone was changed from DateTimeZone to String. */ public void testBwcSerialization() throws IOException { for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) { final DateHistogramGroupConfig reference = ConfigTestHelpers.randomLegacyDateHistogramGroupConfig(random()); final BytesStreamOutput out = new BytesStreamOutput(); reference.writeTo(out); // previous way to deserialize a DateHistogramGroupConfig final StreamInput in = out.bytes().streamInput(); DateHistogramInterval interval = new DateHistogramInterval(in); String field = in.readString(); DateHistogramInterval delay = in.readOptionalWriteable(DateHistogramInterval::new); ZoneId timeZone = in.readZoneId(); assertEqualInstances(reference, new DateHistogramGroupConfig(field, interval, delay, timeZone.getId())); } for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) { final String field = ConfigTestHelpers.randomField(random()); final DateHistogramInterval interval = ConfigTestHelpers.randomInterval(); final DateHistogramInterval delay = randomBoolean() ? ConfigTestHelpers.randomInterval() : null; final ZoneId timezone = randomZone(); // previous way to serialize a DateHistogramGroupConfig final BytesStreamOutput out = new BytesStreamOutput(); interval.writeTo(out); out.writeString(field); out.writeOptionalWriteable(delay); out.writeZoneId(timezone); final StreamInput in = out.bytes().streamInput(); DateHistogramGroupConfig deserialized = new DateHistogramGroupConfig(in); assertEqualInstances(new DateHistogramGroupConfig(field, interval, delay, timezone.getId()), deserialized); } } /** * Tests that old DateHistogramGroupConfigs can be serialized/deserialized * into the specialized Fixed/Calendar versions */ public void testLegacyConfigBWC() throws IOException { for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) { // Serialize the old format final DateHistogramGroupConfig reference = ConfigTestHelpers.randomLegacyDateHistogramGroupConfig(random()); final BytesStreamOutput out = new BytesStreamOutput(); reference.writeTo(out); final StreamInput in = out.bytes().streamInput(); // Deserialize the new format DateHistogramGroupConfig test = DateHistogramGroupConfig.fromUnknownTimeUnit(in); assertThat(reference.getInterval(), equalTo(test.getInterval())); assertThat(reference.getField(), equalTo(test.getField())); assertThat(reference.getTimeZone(), equalTo(test.getTimeZone())); assertThat(reference.getDelay(), equalTo(test.getDelay())); } for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) { // Serialize the new format final DateHistogramGroupConfig reference = ConfigTestHelpers.randomDateHistogramGroupConfig(random()); final BytesStreamOutput out = new BytesStreamOutput(); reference.writeTo(out); final StreamInput in = out.bytes().streamInput(); // Deserialize the old format DateHistogramGroupConfig test = new DateHistogramGroupConfig(in); assertThat(reference.getInterval(), equalTo(test.getInterval())); assertThat(reference.getField(), equalTo(test.getField())); assertThat(reference.getTimeZone(), equalTo(test.getTimeZone())); assertThat(reference.getDelay(), equalTo(test.getDelay())); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.server.rest; import java.lang.management.ManagementFactory; import java.lang.management.MemoryMXBean; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import org.apache.calcite.schema.SchemaPlus; import org.apache.calcite.tools.ValidationException; import org.apache.drill.common.config.DrillConfig; import org.apache.drill.common.exceptions.UserException; import org.apache.drill.common.exceptions.UserRemoteException; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.proto.UserBitShared; import org.apache.drill.exec.proto.UserBitShared.QueryId; import org.apache.drill.exec.proto.UserBitShared.QueryType; import org.apache.drill.exec.proto.UserBitShared.QueryResult.QueryState; import org.apache.drill.exec.proto.UserProtos.QueryResultsMode; import org.apache.drill.exec.proto.UserProtos.RunQuery; import org.apache.drill.exec.proto.helper.QueryIdHelper; import org.apache.drill.exec.rpc.user.InboundImpersonationManager; import org.apache.drill.exec.server.options.SessionOptionManager; import org.apache.drill.exec.store.SchemaTreeProvider; import org.apache.drill.exec.util.ImpersonationUtil; import org.apache.drill.exec.work.WorkManager; import org.apache.parquet.Strings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class RestQueryRunner { private static final Logger logger = LoggerFactory.getLogger(QueryWrapper.class); private static final MemoryMXBean memMXBean = ManagementFactory.getMemoryMXBean(); private final QueryWrapper query; private final WorkManager workManager; private final WebUserConnection webUserConnection; private final SessionOptionManager options; public RestQueryRunner(final QueryWrapper query, final WorkManager workManager, final WebUserConnection webUserConnection) { this.query = query; this.workManager = workManager; this.webUserConnection = webUserConnection; this.options = webUserConnection.getSession().getOptions(); } public RestQueryRunner.QueryResult run() throws Exception { applyUserName(); applyOptions(); applyDefaultSchema(); int maxRows = applyRowLimit(); return submitQuery(maxRows); } private void applyUserName() { String userName = query.getUserName(); if (!Strings.isNullOrEmpty(userName)) { DrillConfig config = workManager.getContext().getConfig(); if (!config.getBoolean(ExecConstants.IMPERSONATION_ENABLED)) { throw UserException.permissionError() .message("User impersonation is not enabled") .build(logger); } InboundImpersonationManager inboundImpersonationManager = new InboundImpersonationManager(); boolean isAdmin = !config.getBoolean(ExecConstants.USER_AUTHENTICATION_ENABLED) || ImpersonationUtil.hasAdminPrivileges( webUserConnection.getSession().getCredentials().getUserName(), ExecConstants.ADMIN_USERS_VALIDATOR.getAdminUsers(options), ExecConstants.ADMIN_USER_GROUPS_VALIDATOR.getAdminUserGroups(options)); if (isAdmin) { // Admin user can impersonate any user they want to (when authentication is disabled, all users are admin) webUserConnection.getSession().replaceUserCredentials( inboundImpersonationManager, UserBitShared.UserCredentials.newBuilder().setUserName(userName).build()); } else { // Check configured impersonation rules to see if this user is allowed to impersonate the given user inboundImpersonationManager.replaceUserOnSession(userName, webUserConnection.getSession()); } } } private void applyOptions() { Map<String, String> options = query.getOptions(); if (options != null) { SessionOptionManager sessionOptionManager = webUserConnection.getSession().getOptions(); for (Map.Entry<String, String> entry : options.entrySet()) { sessionOptionManager.setLocalOption(entry.getKey(), entry.getValue()); } } } private void applyDefaultSchema() throws ValidationException { String defaultSchema = query.getDefaultSchema(); if (!Strings.isNullOrEmpty(defaultSchema)) { SessionOptionManager options = webUserConnection.getSession().getOptions(); @SuppressWarnings("resource") SchemaTreeProvider schemaTreeProvider = new SchemaTreeProvider(workManager.getContext()); SchemaPlus rootSchema = schemaTreeProvider.createRootSchema(options); webUserConnection.getSession().setDefaultSchemaPath(defaultSchema, rootSchema); } } private int applyRowLimit() { int defaultMaxRows = webUserConnection.getSession().getOptions().getInt(ExecConstants.QUERY_MAX_ROWS); int maxRows; int limit = query.getAutoLimitRowCount(); if (limit > 0 && defaultMaxRows > 0) { maxRows = Math.min(limit, defaultMaxRows); } else { maxRows = Math.max(limit, defaultMaxRows); } webUserConnection.setAutoLimitRowCount(maxRows); return maxRows; } public RestQueryRunner.QueryResult submitQuery(int maxRows) { final RunQuery runQuery = RunQuery.newBuilder() .setType(QueryType.valueOf(query.getQueryType())) .setPlan(query.getQuery()) .setResultsMode(QueryResultsMode.STREAM_FULL) .setAutolimitRowcount(maxRows) .build(); // Heap usage threshold/trigger to provide resiliency on web server for queries submitted via HTTP double memoryFailureThreshold = workManager.getContext().getConfig().getDouble(ExecConstants.HTTP_MEMORY_HEAP_FAILURE_THRESHOLD); // Submit user query to Drillbit work queue. final QueryId queryId = workManager.getUserWorker().submitWork(webUserConnection, runQuery); boolean isComplete = false; boolean nearlyOutOfHeapSpace = false; float usagePercent = getHeapUsage(); // Wait until the query execution is complete or there is error submitting the query logger.debug("Wait until the query execution is complete or there is error submitting the query"); do { try { isComplete = webUserConnection.await(TimeUnit.SECONDS.toMillis(1)); //periodically timeout 1 sec to check heap } catch (InterruptedException e) {} usagePercent = getHeapUsage(); if (memoryFailureThreshold > 0 && usagePercent > memoryFailureThreshold) { nearlyOutOfHeapSpace = true; } } while (!isComplete && !nearlyOutOfHeapSpace); //Fail if nearly out of heap space if (nearlyOutOfHeapSpace) { UserException almostOutOfHeapException = UserException.resourceError() .message("There is not enough heap memory to run this query using the web interface. ") .addContext("Please try a query with fewer columns or with a filter or limit condition to limit the data returned. ") .addContext("You can also try an ODBC/JDBC client. ") .build(logger); //Add event workManager.getBee().getForemanForQueryId(queryId) .addToEventQueue(QueryState.FAILED, almostOutOfHeapException); //Return NearlyOutOfHeap exception throw almostOutOfHeapException; } logger.trace("Query {} is completed ", queryId); if (webUserConnection.getError() != null) { throw new UserRemoteException(webUserConnection.getError()); } // Return the QueryResult. return new QueryResult(queryId, webUserConnection, webUserConnection.results); } //Detect possible excess heap private float getHeapUsage() { return (float) memMXBean.getHeapMemoryUsage().getUsed() / memMXBean.getHeapMemoryUsage().getMax(); } public static class QueryResult { private final String queryId; public final Collection<String> columns; public final List<Map<String, String>> rows; public final List<String> metadata; public final String queryState; public final int attemptedAutoLimit; //DRILL-6847: Modified the constructor so that the method has access to all the properties in webUserConnection public QueryResult(QueryId queryId, WebUserConnection webUserConnection, List<Map<String, String>> rows) { this.queryId = QueryIdHelper.getQueryId(queryId); this.columns = webUserConnection.columns; this.metadata = webUserConnection.metadata; this.queryState = webUserConnection.getQueryState(); this.rows = rows; this.attemptedAutoLimit = webUserConnection.getAutoLimitRowCount(); } public String getQueryId() { return queryId; } } }
package com.kaissersoft.simpler; import android.util.Log; /** * @author eefret (eefretsoul@gmail.com) * Created by Christopher T. Herrera (eefret) on 4/22/2014 [12:41 AM] * Wrapper class for android Logging utility will select a tag automatically from class, method and line number executed. */ public class Simpler { //TODO Create a detail Enum to define the log detail level. //TODO Create a method that halt every log possible based on the development mode Ex: (PRODUCTION, DEVELOPMENT, DEBUG) that can manage what can be and can't be logged //TODO Create a Time based extra in the tags (include the execution time at log) //====================================================================================== //ENUMS //====================================================================================== /** * <strong>LoggerDepth Enum</strong> <br/> * <ul> <li>ACTUAL_METHOD(4) </li> <li>LOGGER_METHOD(3) </li> <li>STACK_TRACE_METHOD(1)</li> <li>JVM_METHOD(0)</li> <ul/> */ public enum LOGGER_DEPTH{ ACTUAL_METHOD(4), LOGGER_METHOD(3), STACK_TRACE_METHOD(1), JVM_METHOD(0); private final int value; private LOGGER_DEPTH(final int newValue){ value = newValue; } public int getValue(){ return value; } } //====================================================================================== //CONSTANTS //====================================================================================== private static final String personalTAG = "Simpler"; //====================================================================================== //FIELDS //====================================================================================== private StringBuilder sb; //====================================================================================== //CONSTRUCTORS //====================================================================================== /** * private Constructor * The Perfect Singleton Pattern as Joshua Bosch Explained at his Effective Java Reloaded talk at Google I/O 2008 */ private Simpler(){ if(LoggerLoader.instance != null){ Log.e(personalTAG,"Error: Simpler already instantiated"); throw new IllegalStateException("Already Instantiated"); }else{ this.sb = new StringBuilder(255); } } //====================================================================================== //METHODS //====================================================================================== /** * getLogger Method * The Perfect Singleton Pattern as Joshua Bosch Explained at his Effective Java Reloaded talk at Google I/O 2008 * @return Simpler (This instance) */ public static Simpler getLogger(){ return LoggerLoader.instance; } /** * Method that creates the tag automatically * @param depth (Defines the depth of the Logging) * @return */ private String getTag(LOGGER_DEPTH depth){ try{ String className = Thread.currentThread().getStackTrace()[depth.getValue()].getClassName(); sb.append(className.substring(className.lastIndexOf(".")+1)); sb.append("["); sb.append(Thread.currentThread().getStackTrace()[depth.getValue()].getMethodName()); sb.append("] - "); sb.append(Thread.currentThread().getStackTrace()[depth.getValue()].getLineNumber()); return sb.toString(); }catch (Exception ex){ ex.printStackTrace(); Log.d(personalTAG, ex.getMessage()); }finally{ sb.setLength(0); } return null; } /** * Simple d Method will log in default depth ACTUAL_METHOD * @param msg */ public void d(String msg){ try { Log.d(getTag(LOGGER_DEPTH.ACTUAL_METHOD), msg); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } /** * d Method that takes LOGGER_DEPTH as the second parameter, will log custom depth level * @see Simpler.LOGGER_DEPTH * @param msg * @param depth */ public void d(String msg, LOGGER_DEPTH depth){ try { Log.d(getTag(depth), msg); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } /** * d Method with Throwable that takes LOGGER_DEPTH as the third parameter, will log custom depth level * @param msg * @param t * @param depth */ public void d(String msg, Throwable t, LOGGER_DEPTH depth){ try{ Log.d(getTag(depth),msg,t); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } /** * Simple e Method will log in default depth ACTUAL_METHOD * @param msg */ public void e(String msg){ try{ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD),msg); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } /** * e Method that takes LOGGER_DEPTH as the second parameter, will log custom depth level * @see Simpler.LOGGER_DEPTH * @param msg * @param depth */ public void e(String msg, LOGGER_DEPTH depth){ try{ Log.e(getTag(depth),msg); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } /** * e Method with Throwable that takes LOGGER_DEPTH as the third parameter, will log custom depth level * @param msg * @param t * @param depth */ public void e(String msg, Throwable t, LOGGER_DEPTH depth){ try{ Log.e(getTag(depth),msg,t); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } /** * Simple w Method will log in default depth ACTUAL_METHOD * @param msg */ public void w(String msg){ try { Log.w(getTag(LOGGER_DEPTH.ACTUAL_METHOD), msg); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } /** * w Method that takes LOGGER_DEPTH as the second parameter, will log custom depth level * @see Simpler.LOGGER_DEPTH * @param msg * @param depth */ public void w(String msg, LOGGER_DEPTH depth){ try{ Log.w(getTag(depth), msg); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } /** * w Method with Throwable that takes LOGGER_DEPTH as the third parameter, will log custom depth level * @param msg * @param t * @param depth */ public void w(String msg, Throwable t, LOGGER_DEPTH depth){ try{ Log.w(getTag(depth), msg, t); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } /** * Simple v Method will log in default depth ACTUAL_METHOD * @param msg */ public void v(String msg){ try{ Log.v(getTag(LOGGER_DEPTH.ACTUAL_METHOD), msg); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } /** * v Method that takes LOGGER_DEPTH as the second parameter, will log custom depth level * @see Simpler.LOGGER_DEPTH * @param msg * @param depth */ public void v(String msg, LOGGER_DEPTH depth){ try{ Log.v(getTag(depth), msg); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } /** * v Method with Throwable that takes LOGGER_DEPTH as the third parameter, will log custom depth level * @param msg * @param t * @param depth */ public void v(String msg, Throwable t, LOGGER_DEPTH depth){ try{ Log.v(getTag(depth), msg, t); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } /** * Simple i Method will log in default depth ACTUAL_METHOD * @param msg */ public void i(String msg){ try{ Log.i(getTag(LOGGER_DEPTH.ACTUAL_METHOD), msg); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } /** * i Method that takes LOGGER_DEPTH as the second parameter, will log custom depth level * @see Simpler.LOGGER_DEPTH * @param msg * @param depth */ public void i(String msg, LOGGER_DEPTH depth){ try{ Log.i(getTag(depth), msg); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } /** * i Method with Throwable that takes LOGGER_DEPTH as the third parameter, will log custom depth level * @param msg * @param t * @param depth */ public void i(String msg, Throwable t, LOGGER_DEPTH depth){ try { Log.i(getTag(depth), msg, t); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } /** * Simple wtf Method will log in default depth ACTUAL_METHOD * @param msg */ public void wtf(String msg){ try{ Log.wtf(getTag(LOGGER_DEPTH.ACTUAL_METHOD), msg); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } /** * wtf Method that takes LOGGER_DEPTH as the second parameter, will log custom depth level * @see Simpler.LOGGER_DEPTH * @param msg * @param depth */ public void wtf(String msg, LOGGER_DEPTH depth){ try{ Log.wtf(getTag(depth), msg); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } /** * wtf Method with Throwable that takes LOGGER_DEPTH as the third parameter, will log custom depth level * @param msg * @param t * @param depth */ public void wtf(String msg, Throwable t, LOGGER_DEPTH depth){ try{ Log.wtf(getTag(depth), msg, t); }catch (Exception exception){ Log.e(getTag(LOGGER_DEPTH.ACTUAL_METHOD), "Simpler failed, exception: "+exception.getMessage()); } } //====================================================================================== //INNER CLASSES //====================================================================================== /** * Simpler Loader Class * The Perfect Singleton Pattern as Joshua Bosch Explained at his Effective Java Reloaded talk at Google I/O 2008 */ private static class LoggerLoader { private static final Simpler instance = new Simpler(); } }
/** * <copyright> * </copyright> * * $Id$ */ package net.opengis.citygml.cityobjectgroup.presentation; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.EventObject; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IMarker; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.IResourceChangeEvent; import org.eclipse.core.resources.IResourceChangeListener; import org.eclipse.core.resources.IResourceDelta; import org.eclipse.core.resources.IResourceDeltaVisitor; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.jface.action.IMenuListener; import org.eclipse.jface.action.IMenuManager; import org.eclipse.jface.action.IStatusLineManager; import org.eclipse.jface.action.IToolBarManager; import org.eclipse.jface.action.MenuManager; import org.eclipse.jface.action.Separator; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.dialogs.ProgressMonitorDialog; import org.eclipse.jface.viewers.ColumnWeightData; import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.jface.viewers.ISelectionProvider; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.ListViewer; import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.jface.viewers.StructuredSelection; import org.eclipse.jface.viewers.StructuredViewer; import org.eclipse.jface.viewers.TableLayout; import org.eclipse.jface.viewers.TableViewer; import org.eclipse.jface.viewers.TreeViewer; import org.eclipse.jface.viewers.Viewer; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.dnd.DND; import org.eclipse.swt.dnd.Transfer; import org.eclipse.swt.events.ControlAdapter; import org.eclipse.swt.events.ControlEvent; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Menu; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.TableColumn; import org.eclipse.swt.widgets.Tree; import org.eclipse.swt.widgets.TreeColumn; import org.eclipse.ui.IActionBars; import org.eclipse.ui.IEditorInput; import org.eclipse.ui.IEditorPart; import org.eclipse.ui.IEditorSite; import org.eclipse.ui.IPartListener; import org.eclipse.ui.IWorkbenchPart; import org.eclipse.ui.PartInitException; import org.eclipse.ui.dialogs.SaveAsDialog; import org.eclipse.ui.ide.IGotoMarker; import org.eclipse.ui.part.FileEditorInput; import org.eclipse.ui.part.MultiPageEditorPart; import org.eclipse.ui.views.contentoutline.ContentOutline; import org.eclipse.ui.views.contentoutline.ContentOutlinePage; import org.eclipse.ui.views.contentoutline.IContentOutlinePage; import org.eclipse.ui.views.properties.IPropertySheetPage; import org.eclipse.ui.views.properties.PropertySheet; import org.eclipse.ui.views.properties.PropertySheetPage; import org.eclipse.emf.common.command.BasicCommandStack; import org.eclipse.emf.common.command.Command; import org.eclipse.emf.common.command.CommandStack; import org.eclipse.emf.common.command.CommandStackListener; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.ui.MarkerHelper; import org.eclipse.emf.common.ui.ViewerPane; import org.eclipse.emf.common.ui.editor.ProblemEditorPart; import org.eclipse.emf.common.ui.viewer.IViewerProvider; import org.eclipse.emf.common.util.BasicDiagnostic; import org.eclipse.emf.common.util.Diagnostic; import org.eclipse.emf.common.util.URI; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EValidator; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.emf.ecore.resource.ResourceSet; import org.eclipse.emf.ecore.util.EContentAdapter; import org.eclipse.emf.ecore.util.EcoreUtil; import org.eclipse.emf.edit.domain.AdapterFactoryEditingDomain; import org.eclipse.emf.edit.domain.EditingDomain; import org.eclipse.emf.edit.domain.IEditingDomainProvider; import org.eclipse.emf.edit.provider.AdapterFactoryItemDelegator; import org.eclipse.emf.edit.provider.ComposedAdapterFactory; import org.eclipse.emf.edit.provider.ReflectiveItemProviderAdapterFactory; import org.eclipse.emf.edit.provider.resource.ResourceItemProviderAdapterFactory; import org.eclipse.emf.edit.ui.action.EditingDomainActionBarContributor; import org.eclipse.emf.edit.ui.celleditor.AdapterFactoryTreeEditor; import org.eclipse.emf.edit.ui.dnd.EditingDomainViewerDropAdapter; import org.eclipse.emf.edit.ui.dnd.LocalTransfer; import org.eclipse.emf.edit.ui.dnd.ViewerDragAdapter; import org.eclipse.emf.edit.ui.provider.AdapterFactoryContentProvider; import org.eclipse.emf.edit.ui.provider.AdapterFactoryLabelProvider; import org.eclipse.emf.edit.ui.provider.UnwrappingSelectionProvider; import org.eclipse.emf.edit.ui.util.EditUIMarkerHelper; import org.eclipse.emf.edit.ui.util.EditUIUtil; import org.eclipse.emf.edit.ui.view.ExtendedPropertySheetPage; import net.opengis.citygml.cityobjectgroup.provider.CityobjectgroupItemProviderAdapterFactory; import net.opengis.citygml.appearance.provider.AppearanceItemProviderAdapterFactory; import net.opengis.citygml.building.presentation.CityGMLEditorPlugin; import net.opengis.citygml.building.provider.BuildingItemProviderAdapterFactory; import net.opengis.citygml.cityfurniture.provider.CityfurnitureItemProviderAdapterFactory; import net.opengis.citygml.generics.provider.GenericsItemProviderAdapterFactory; import net.opengis.citygml.landuse.provider.LanduseItemProviderAdapterFactory; import net.opengis.citygml.provider.CitygmlItemProviderAdapterFactory; import net.opengis.citygml.relief.provider.ReliefItemProviderAdapterFactory; import net.opengis.citygml.texturedsurface.provider.TexturedsurfaceItemProviderAdapterFactory; import net.opengis.citygml.transportation.provider.TransportationItemProviderAdapterFactory; import net.opengis.citygml.vegetation.provider.VegetationItemProviderAdapterFactory; import net.opengis.citygml.waterbody.provider.WaterbodyItemProviderAdapterFactory; import net.opengis.gml.provider.GmlItemProviderAdapterFactory; import org.eclipse.ui.actions.WorkspaceModifyOperation; import org.oasis.xAL.provider.XALItemProviderAdapterFactory; import org.w3._1999.xlink.provider.XlinkItemProviderAdapterFactory; import org.w3._2001.smil20.language.provider.LanguageItemProviderAdapterFactory; import org.w3._2001.smil20.provider.Smil20ItemProviderAdapterFactory; import org.w3.xml._1998.namespace.provider.NamespaceItemProviderAdapterFactory; /** * This is an example of a Cityobjectgroup model editor. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class CityobjectgroupEditor extends MultiPageEditorPart implements IEditingDomainProvider, ISelectionProvider, IMenuListener, IViewerProvider, IGotoMarker { /** * This keeps track of the editing domain that is used to track all changes to the model. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected AdapterFactoryEditingDomain editingDomain; /** * This is the one adapter factory used for providing views of the model. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ComposedAdapterFactory adapterFactory; /** * This is the content outline page. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected IContentOutlinePage contentOutlinePage; /** * This is a kludge... * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected IStatusLineManager contentOutlineStatusLineManager; /** * This is the content outline page's viewer. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected TreeViewer contentOutlineViewer; /** * This is the property sheet page. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected PropertySheetPage propertySheetPage; /** * This is the viewer that shadows the selection in the content outline. * The parent relation must be correctly defined for this to work. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected TreeViewer selectionViewer; /** * This inverts the roll of parent and child in the content provider and show parents as a tree. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected TreeViewer parentViewer; /** * This shows how a tree view works. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected TreeViewer treeViewer; /** * This shows how a list view works. * A list viewer doesn't support icons. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ListViewer listViewer; /** * This shows how a table view works. * A table can be used as a list with icons. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected TableViewer tableViewer; /** * This shows how a tree view with columns works. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected TreeViewer treeViewerWithColumns; /** * This keeps track of the active viewer pane, in the book. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ViewerPane currentViewerPane; /** * This keeps track of the active content viewer, which may be either one of the viewers in the pages or the content outline viewer. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Viewer currentViewer; /** * This listens to which ever viewer is active. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ISelectionChangedListener selectionChangedListener; /** * This keeps track of all the {@link org.eclipse.jface.viewers.ISelectionChangedListener}s that are listening to this editor. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Collection<ISelectionChangedListener> selectionChangedListeners = new ArrayList<ISelectionChangedListener>(); /** * This keeps track of the selection of the editor as a whole. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ISelection editorSelection = StructuredSelection.EMPTY; /** * The MarkerHelper is responsible for creating workspace resource markers presented * in Eclipse's Problems View. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected MarkerHelper markerHelper = new EditUIMarkerHelper(); /** * This listens for when the outline becomes active * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected IPartListener partListener = new IPartListener() { public void partActivated(IWorkbenchPart p) { if (p instanceof ContentOutline) { if (((ContentOutline)p).getCurrentPage() == contentOutlinePage) { getActionBarContributor().setActiveEditor(CityobjectgroupEditor.this); setCurrentViewer(contentOutlineViewer); } } else if (p instanceof PropertySheet) { if (((PropertySheet)p).getCurrentPage() == propertySheetPage) { getActionBarContributor().setActiveEditor(CityobjectgroupEditor.this); handleActivate(); } } else if (p == CityobjectgroupEditor.this) { handleActivate(); } } public void partBroughtToTop(IWorkbenchPart p) { // Ignore. } public void partClosed(IWorkbenchPart p) { // Ignore. } public void partDeactivated(IWorkbenchPart p) { // Ignore. } public void partOpened(IWorkbenchPart p) { // Ignore. } }; /** * Resources that have been removed since last activation. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Collection<Resource> removedResources = new ArrayList<Resource>(); /** * Resources that have been changed since last activation. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Collection<Resource> changedResources = new ArrayList<Resource>(); /** * Resources that have been saved. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Collection<Resource> savedResources = new ArrayList<Resource>(); /** * Map to store the diagnostic associated with a resource. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected Map<Resource, Diagnostic> resourceToDiagnosticMap = new LinkedHashMap<Resource, Diagnostic>(); /** * Controls whether the problem indication should be updated. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected boolean updateProblemIndication = true; /** * Adapter used to update the problem indication when resources are demanded loaded. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected EContentAdapter problemIndicationAdapter = new EContentAdapter() { @Override public void notifyChanged(Notification notification) { if (notification.getNotifier() instanceof Resource) { switch (notification.getFeatureID(Resource.class)) { case Resource.RESOURCE__IS_LOADED: case Resource.RESOURCE__ERRORS: case Resource.RESOURCE__WARNINGS: { Resource resource = (Resource)notification.getNotifier(); Diagnostic diagnostic = analyzeResourceProblems(resource, null); if (diagnostic.getSeverity() != Diagnostic.OK) { resourceToDiagnosticMap.put(resource, diagnostic); } else { resourceToDiagnosticMap.remove(resource); } if (updateProblemIndication) { getSite().getShell().getDisplay().asyncExec (new Runnable() { public void run() { updateProblemIndication(); } }); } break; } } } else { super.notifyChanged(notification); } } @Override protected void setTarget(Resource target) { basicSetTarget(target); } @Override protected void unsetTarget(Resource target) { basicUnsetTarget(target); } }; /** * This listens for workspace changes. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected IResourceChangeListener resourceChangeListener = new IResourceChangeListener() { public void resourceChanged(IResourceChangeEvent event) { IResourceDelta delta = event.getDelta(); try { class ResourceDeltaVisitor implements IResourceDeltaVisitor { protected ResourceSet resourceSet = editingDomain.getResourceSet(); protected Collection<Resource> changedResources = new ArrayList<Resource>(); protected Collection<Resource> removedResources = new ArrayList<Resource>(); public boolean visit(IResourceDelta delta) { if (delta.getResource().getType() == IResource.FILE) { if (delta.getKind() == IResourceDelta.REMOVED || delta.getKind() == IResourceDelta.CHANGED && delta.getFlags() != IResourceDelta.MARKERS) { Resource resource = resourceSet.getResource(URI.createPlatformResourceURI(delta.getFullPath().toString(), true), false); if (resource != null) { if (delta.getKind() == IResourceDelta.REMOVED) { removedResources.add(resource); } else if (!savedResources.remove(resource)) { changedResources.add(resource); } } } } return true; } public Collection<Resource> getChangedResources() { return changedResources; } public Collection<Resource> getRemovedResources() { return removedResources; } } final ResourceDeltaVisitor visitor = new ResourceDeltaVisitor(); delta.accept(visitor); if (!visitor.getRemovedResources().isEmpty()) { getSite().getShell().getDisplay().asyncExec (new Runnable() { public void run() { removedResources.addAll(visitor.getRemovedResources()); if (!isDirty()) { getSite().getPage().closeEditor(CityobjectgroupEditor.this, false); } } }); } if (!visitor.getChangedResources().isEmpty()) { getSite().getShell().getDisplay().asyncExec (new Runnable() { public void run() { changedResources.addAll(visitor.getChangedResources()); if (getSite().getPage().getActiveEditor() == CityobjectgroupEditor.this) { handleActivate(); } } }); } } catch (CoreException exception) { CityGMLEditorPlugin.INSTANCE.log(exception); } } }; /** * Handles activation of the editor or it's associated views. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void handleActivate() { // Recompute the read only state. // if (editingDomain.getResourceToReadOnlyMap() != null) { editingDomain.getResourceToReadOnlyMap().clear(); // Refresh any actions that may become enabled or disabled. // setSelection(getSelection()); } if (!removedResources.isEmpty()) { if (handleDirtyConflict()) { getSite().getPage().closeEditor(CityobjectgroupEditor.this, false); } else { removedResources.clear(); changedResources.clear(); savedResources.clear(); } } else if (!changedResources.isEmpty()) { changedResources.removeAll(savedResources); handleChangedResources(); changedResources.clear(); savedResources.clear(); } } /** * Handles what to do with changed resources on activation. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void handleChangedResources() { if (!changedResources.isEmpty() && (!isDirty() || handleDirtyConflict())) { if (isDirty()) { changedResources.addAll(editingDomain.getResourceSet().getResources()); } editingDomain.getCommandStack().flush(); updateProblemIndication = false; for (Resource resource : changedResources) { if (resource.isLoaded()) { resource.unload(); try { resource.load(Collections.EMPTY_MAP); } catch (IOException exception) { if (!resourceToDiagnosticMap.containsKey(resource)) { resourceToDiagnosticMap.put(resource, analyzeResourceProblems(resource, exception)); } } } } if (AdapterFactoryEditingDomain.isStale(editorSelection)) { setSelection(StructuredSelection.EMPTY); } updateProblemIndication = true; updateProblemIndication(); } } /** * Updates the problems indication with the information described in the specified diagnostic. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void updateProblemIndication() { if (updateProblemIndication) { BasicDiagnostic diagnostic = new BasicDiagnostic (Diagnostic.OK, "de.hub.citygml.emf.ecore.editor", 0, null, new Object [] { editingDomain.getResourceSet() }); for (Diagnostic childDiagnostic : resourceToDiagnosticMap.values()) { if (childDiagnostic.getSeverity() != Diagnostic.OK) { diagnostic.add(childDiagnostic); } } int lastEditorPage = getPageCount() - 1; if (lastEditorPage >= 0 && getEditor(lastEditorPage) instanceof ProblemEditorPart) { ((ProblemEditorPart)getEditor(lastEditorPage)).setDiagnostic(diagnostic); if (diagnostic.getSeverity() != Diagnostic.OK) { setActivePage(lastEditorPage); } } else if (diagnostic.getSeverity() != Diagnostic.OK) { ProblemEditorPart problemEditorPart = new ProblemEditorPart(); problemEditorPart.setDiagnostic(diagnostic); problemEditorPart.setMarkerHelper(markerHelper); try { addPage(++lastEditorPage, problemEditorPart, getEditorInput()); setPageText(lastEditorPage, problemEditorPart.getPartName()); setActivePage(lastEditorPage); showTabs(); } catch (PartInitException exception) { CityGMLEditorPlugin.INSTANCE.log(exception); } } if (markerHelper.hasMarkers(editingDomain.getResourceSet())) { markerHelper.deleteMarkers(editingDomain.getResourceSet()); if (diagnostic.getSeverity() != Diagnostic.OK) { try { markerHelper.createMarkers(diagnostic); } catch (CoreException exception) { CityGMLEditorPlugin.INSTANCE.log(exception); } } } } } /** * Shows a dialog that asks if conflicting changes should be discarded. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected boolean handleDirtyConflict() { return MessageDialog.openQuestion (getSite().getShell(), getString("_UI_FileConflict_label"), getString("_WARN_FileConflict")); } /** * This creates a model editor. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public CityobjectgroupEditor() { super(); initializeEditingDomain(); } /** * This sets up the editing domain for the model editor. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void initializeEditingDomain() { // Create an adapter factory that yields item providers. // adapterFactory = new ComposedAdapterFactory(ComposedAdapterFactory.Descriptor.Registry.INSTANCE); adapterFactory.addAdapterFactory(new ResourceItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new BuildingItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new CitygmlItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new GmlItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new XlinkItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new XALItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new TexturedsurfaceItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new AppearanceItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new TransportationItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new ReliefItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new CityfurnitureItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new CityobjectgroupItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new LanduseItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new VegetationItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new WaterbodyItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new GenericsItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new Smil20ItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new LanguageItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new NamespaceItemProviderAdapterFactory()); adapterFactory.addAdapterFactory(new ReflectiveItemProviderAdapterFactory()); // Create the command stack that will notify this editor as commands are executed. // BasicCommandStack commandStack = new BasicCommandStack(); // Add a listener to set the most recent command's affected objects to be the selection of the viewer with focus. // commandStack.addCommandStackListener (new CommandStackListener() { public void commandStackChanged(final EventObject event) { getContainer().getDisplay().asyncExec (new Runnable() { public void run() { firePropertyChange(IEditorPart.PROP_DIRTY); // Try to select the affected objects. // Command mostRecentCommand = ((CommandStack)event.getSource()).getMostRecentCommand(); if (mostRecentCommand != null) { setSelectionToViewer(mostRecentCommand.getAffectedObjects()); } if (propertySheetPage != null && !propertySheetPage.getControl().isDisposed()) { propertySheetPage.refresh(); } } }); } }); // Create the editing domain with a special command stack. // editingDomain = new AdapterFactoryEditingDomain(adapterFactory, commandStack, new HashMap<Resource, Boolean>()); } /** * This is here for the listener to be able to call it. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected void firePropertyChange(int action) { super.firePropertyChange(action); } /** * This sets the selection into whichever viewer is active. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSelectionToViewer(Collection<?> collection) { final Collection<?> theSelection = collection; // Make sure it's okay. // if (theSelection != null && !theSelection.isEmpty()) { Runnable runnable = new Runnable() { public void run() { // Try to select the items in the current content viewer of the editor. // if (currentViewer != null) { currentViewer.setSelection(new StructuredSelection(theSelection.toArray()), true); } } }; getSite().getShell().getDisplay().asyncExec(runnable); } } /** * This returns the editing domain as required by the {@link IEditingDomainProvider} interface. * This is important for implementing the static methods of {@link AdapterFactoryEditingDomain} * and for supporting {@link org.eclipse.emf.edit.ui.action.CommandAction}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EditingDomain getEditingDomain() { return editingDomain; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class ReverseAdapterFactoryContentProvider extends AdapterFactoryContentProvider { /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ReverseAdapterFactoryContentProvider(AdapterFactory adapterFactory) { super(adapterFactory); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object [] getElements(Object object) { Object parent = super.getParent(object); return (parent == null ? Collections.EMPTY_SET : Collections.singleton(parent)).toArray(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object [] getChildren(Object object) { Object parent = super.getParent(object); return (parent == null ? Collections.EMPTY_SET : Collections.singleton(parent)).toArray(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean hasChildren(Object object) { Object parent = super.getParent(object); return parent != null; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object getParent(Object object) { return null; } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setCurrentViewerPane(ViewerPane viewerPane) { if (currentViewerPane != viewerPane) { if (currentViewerPane != null) { currentViewerPane.showFocus(false); } currentViewerPane = viewerPane; } setCurrentViewer(currentViewerPane.getViewer()); } /** * This makes sure that one content viewer, either for the current page or the outline view, if it has focus, * is the current one. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setCurrentViewer(Viewer viewer) { // If it is changing... // if (currentViewer != viewer) { if (selectionChangedListener == null) { // Create the listener on demand. // selectionChangedListener = new ISelectionChangedListener() { // This just notifies those things that are affected by the section. // public void selectionChanged(SelectionChangedEvent selectionChangedEvent) { setSelection(selectionChangedEvent.getSelection()); } }; } // Stop listening to the old one. // if (currentViewer != null) { currentViewer.removeSelectionChangedListener(selectionChangedListener); } // Start listening to the new one. // if (viewer != null) { viewer.addSelectionChangedListener(selectionChangedListener); } // Remember it. // currentViewer = viewer; // Set the editors selection based on the current viewer's selection. // setSelection(currentViewer == null ? StructuredSelection.EMPTY : currentViewer.getSelection()); } } /** * This returns the viewer as required by the {@link IViewerProvider} interface. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Viewer getViewer() { return currentViewer; } /** * This creates a context menu for the viewer and adds a listener as well registering the menu for extension. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void createContextMenuFor(StructuredViewer viewer) { MenuManager contextMenu = new MenuManager("#PopUp"); contextMenu.add(new Separator("additions")); contextMenu.setRemoveAllWhenShown(true); contextMenu.addMenuListener(this); Menu menu= contextMenu.createContextMenu(viewer.getControl()); viewer.getControl().setMenu(menu); getSite().registerContextMenu(contextMenu, new UnwrappingSelectionProvider(viewer)); int dndOperations = DND.DROP_COPY | DND.DROP_MOVE | DND.DROP_LINK; Transfer[] transfers = new Transfer[] { LocalTransfer.getInstance() }; viewer.addDragSupport(dndOperations, transfers, new ViewerDragAdapter(viewer)); viewer.addDropSupport(dndOperations, transfers, new EditingDomainViewerDropAdapter(editingDomain, viewer)); } /** * This is the method called to load a resource into the editing domain's resource set based on the editor's input. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void createModel() { URI resourceURI = EditUIUtil.getURI(getEditorInput()); Exception exception = null; Resource resource = null; try { // Load the resource through the editing domain. // resource = editingDomain.getResourceSet().getResource(resourceURI, true); } catch (Exception e) { exception = e; resource = editingDomain.getResourceSet().getResource(resourceURI, false); } Diagnostic diagnostic = analyzeResourceProblems(resource, exception); if (diagnostic.getSeverity() != Diagnostic.OK) { resourceToDiagnosticMap.put(resource, analyzeResourceProblems(resource, exception)); } editingDomain.getResourceSet().eAdapters().add(problemIndicationAdapter); } /** * Returns a diagnostic describing the errors and warnings listed in the resource * and the specified exception (if any). * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Diagnostic analyzeResourceProblems(Resource resource, Exception exception) { if (!resource.getErrors().isEmpty() || !resource.getWarnings().isEmpty()) { BasicDiagnostic basicDiagnostic = new BasicDiagnostic (Diagnostic.ERROR, "de.hub.citygml.emf.ecore.editor", 0, getString("_UI_CreateModelError_message", resource.getURI()), new Object [] { exception == null ? (Object)resource : exception }); basicDiagnostic.merge(EcoreUtil.computeDiagnostic(resource, true)); return basicDiagnostic; } else if (exception != null) { return new BasicDiagnostic (Diagnostic.ERROR, "de.hub.citygml.emf.ecore.editor", 0, getString("_UI_CreateModelError_message", resource.getURI()), new Object[] { exception }); } else { return Diagnostic.OK_INSTANCE; } } /** * This is the method used by the framework to install your own controls. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void createPages() { // Creates the model from the editor input // createModel(); // Only creates the other pages if there is something that can be edited // if (!getEditingDomain().getResourceSet().getResources().isEmpty()) { // Create a page for the selection tree view. // { ViewerPane viewerPane = new ViewerPane(getSite().getPage(), CityobjectgroupEditor.this) { @Override public Viewer createViewer(Composite composite) { Tree tree = new Tree(composite, SWT.MULTI); TreeViewer newTreeViewer = new TreeViewer(tree); return newTreeViewer; } @Override public void requestActivation() { super.requestActivation(); setCurrentViewerPane(this); } }; viewerPane.createControl(getContainer()); selectionViewer = (TreeViewer)viewerPane.getViewer(); selectionViewer.setContentProvider(new AdapterFactoryContentProvider(adapterFactory)); selectionViewer.setLabelProvider(new AdapterFactoryLabelProvider(adapterFactory)); selectionViewer.setInput(editingDomain.getResourceSet()); selectionViewer.setSelection(new StructuredSelection(editingDomain.getResourceSet().getResources().get(0)), true); viewerPane.setTitle(editingDomain.getResourceSet()); new AdapterFactoryTreeEditor(selectionViewer.getTree(), adapterFactory); createContextMenuFor(selectionViewer); int pageIndex = addPage(viewerPane.getControl()); setPageText(pageIndex, getString("_UI_SelectionPage_label")); } // Create a page for the parent tree view. // { ViewerPane viewerPane = new ViewerPane(getSite().getPage(), CityobjectgroupEditor.this) { @Override public Viewer createViewer(Composite composite) { Tree tree = new Tree(composite, SWT.MULTI); TreeViewer newTreeViewer = new TreeViewer(tree); return newTreeViewer; } @Override public void requestActivation() { super.requestActivation(); setCurrentViewerPane(this); } }; viewerPane.createControl(getContainer()); parentViewer = (TreeViewer)viewerPane.getViewer(); parentViewer.setAutoExpandLevel(30); parentViewer.setContentProvider(new ReverseAdapterFactoryContentProvider(adapterFactory)); parentViewer.setLabelProvider(new AdapterFactoryLabelProvider(adapterFactory)); createContextMenuFor(parentViewer); int pageIndex = addPage(viewerPane.getControl()); setPageText(pageIndex, getString("_UI_ParentPage_label")); } // This is the page for the list viewer // { ViewerPane viewerPane = new ViewerPane(getSite().getPage(), CityobjectgroupEditor.this) { @Override public Viewer createViewer(Composite composite) { return new ListViewer(composite); } @Override public void requestActivation() { super.requestActivation(); setCurrentViewerPane(this); } }; viewerPane.createControl(getContainer()); listViewer = (ListViewer)viewerPane.getViewer(); listViewer.setContentProvider(new AdapterFactoryContentProvider(adapterFactory)); listViewer.setLabelProvider(new AdapterFactoryLabelProvider(adapterFactory)); createContextMenuFor(listViewer); int pageIndex = addPage(viewerPane.getControl()); setPageText(pageIndex, getString("_UI_ListPage_label")); } // This is the page for the tree viewer // { ViewerPane viewerPane = new ViewerPane(getSite().getPage(), CityobjectgroupEditor.this) { @Override public Viewer createViewer(Composite composite) { return new TreeViewer(composite); } @Override public void requestActivation() { super.requestActivation(); setCurrentViewerPane(this); } }; viewerPane.createControl(getContainer()); treeViewer = (TreeViewer)viewerPane.getViewer(); treeViewer.setContentProvider(new AdapterFactoryContentProvider(adapterFactory)); treeViewer.setLabelProvider(new AdapterFactoryLabelProvider(adapterFactory)); new AdapterFactoryTreeEditor(treeViewer.getTree(), adapterFactory); createContextMenuFor(treeViewer); int pageIndex = addPage(viewerPane.getControl()); setPageText(pageIndex, getString("_UI_TreePage_label")); } // This is the page for the table viewer. // { ViewerPane viewerPane = new ViewerPane(getSite().getPage(), CityobjectgroupEditor.this) { @Override public Viewer createViewer(Composite composite) { return new TableViewer(composite); } @Override public void requestActivation() { super.requestActivation(); setCurrentViewerPane(this); } }; viewerPane.createControl(getContainer()); tableViewer = (TableViewer)viewerPane.getViewer(); Table table = tableViewer.getTable(); TableLayout layout = new TableLayout(); table.setLayout(layout); table.setHeaderVisible(true); table.setLinesVisible(true); TableColumn objectColumn = new TableColumn(table, SWT.NONE); layout.addColumnData(new ColumnWeightData(3, 100, true)); objectColumn.setText(getString("_UI_ObjectColumn_label")); objectColumn.setResizable(true); TableColumn selfColumn = new TableColumn(table, SWT.NONE); layout.addColumnData(new ColumnWeightData(2, 100, true)); selfColumn.setText(getString("_UI_SelfColumn_label")); selfColumn.setResizable(true); tableViewer.setColumnProperties(new String [] {"a", "b"}); tableViewer.setContentProvider(new AdapterFactoryContentProvider(adapterFactory)); tableViewer.setLabelProvider(new AdapterFactoryLabelProvider(adapterFactory)); createContextMenuFor(tableViewer); int pageIndex = addPage(viewerPane.getControl()); setPageText(pageIndex, getString("_UI_TablePage_label")); } // This is the page for the table tree viewer. // { ViewerPane viewerPane = new ViewerPane(getSite().getPage(), CityobjectgroupEditor.this) { @Override public Viewer createViewer(Composite composite) { return new TreeViewer(composite); } @Override public void requestActivation() { super.requestActivation(); setCurrentViewerPane(this); } }; viewerPane.createControl(getContainer()); treeViewerWithColumns = (TreeViewer)viewerPane.getViewer(); Tree tree = treeViewerWithColumns.getTree(); tree.setLayoutData(new FillLayout()); tree.setHeaderVisible(true); tree.setLinesVisible(true); TreeColumn objectColumn = new TreeColumn(tree, SWT.NONE); objectColumn.setText(getString("_UI_ObjectColumn_label")); objectColumn.setResizable(true); objectColumn.setWidth(250); TreeColumn selfColumn = new TreeColumn(tree, SWT.NONE); selfColumn.setText(getString("_UI_SelfColumn_label")); selfColumn.setResizable(true); selfColumn.setWidth(200); treeViewerWithColumns.setColumnProperties(new String [] {"a", "b"}); treeViewerWithColumns.setContentProvider(new AdapterFactoryContentProvider(adapterFactory)); treeViewerWithColumns.setLabelProvider(new AdapterFactoryLabelProvider(adapterFactory)); createContextMenuFor(treeViewerWithColumns); int pageIndex = addPage(viewerPane.getControl()); setPageText(pageIndex, getString("_UI_TreeWithColumnsPage_label")); } getSite().getShell().getDisplay().asyncExec (new Runnable() { public void run() { setActivePage(0); } }); } // Ensures that this editor will only display the page's tab // area if there are more than one page // getContainer().addControlListener (new ControlAdapter() { boolean guard = false; @Override public void controlResized(ControlEvent event) { if (!guard) { guard = true; hideTabs(); guard = false; } } }); getSite().getShell().getDisplay().asyncExec (new Runnable() { public void run() { updateProblemIndication(); } }); } /** * If there is just one page in the multi-page editor part, * this hides the single tab at the bottom. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void hideTabs() { if (getPageCount() <= 1) { setPageText(0, ""); if (getContainer() instanceof CTabFolder) { ((CTabFolder)getContainer()).setTabHeight(1); Point point = getContainer().getSize(); getContainer().setSize(point.x, point.y + 6); } } } /** * If there is more than one page in the multi-page editor part, * this shows the tabs at the bottom. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void showTabs() { if (getPageCount() > 1) { setPageText(0, getString("_UI_SelectionPage_label")); if (getContainer() instanceof CTabFolder) { ((CTabFolder)getContainer()).setTabHeight(SWT.DEFAULT); Point point = getContainer().getSize(); getContainer().setSize(point.x, point.y - 6); } } } /** * This is used to track the active viewer. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected void pageChange(int pageIndex) { super.pageChange(pageIndex); if (contentOutlinePage != null) { handleContentOutlineSelection(contentOutlinePage.getSelection()); } } /** * This is how the framework determines which interfaces we implement. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("rawtypes") @Override public Object getAdapter(Class key) { if (key.equals(IContentOutlinePage.class)) { return showOutlineView() ? getContentOutlinePage() : null; } else if (key.equals(IPropertySheetPage.class)) { return getPropertySheetPage(); } else if (key.equals(IGotoMarker.class)) { return this; } else { return super.getAdapter(key); } } /** * This accesses a cached version of the content outliner. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public IContentOutlinePage getContentOutlinePage() { if (contentOutlinePage == null) { // The content outline is just a tree. // class MyContentOutlinePage extends ContentOutlinePage { @Override public void createControl(Composite parent) { super.createControl(parent); contentOutlineViewer = getTreeViewer(); contentOutlineViewer.addSelectionChangedListener(this); // Set up the tree viewer. // contentOutlineViewer.setContentProvider(new AdapterFactoryContentProvider(adapterFactory)); contentOutlineViewer.setLabelProvider(new AdapterFactoryLabelProvider(adapterFactory)); contentOutlineViewer.setInput(editingDomain.getResourceSet()); // Make sure our popups work. // createContextMenuFor(contentOutlineViewer); if (!editingDomain.getResourceSet().getResources().isEmpty()) { // Select the root object in the view. // contentOutlineViewer.setSelection(new StructuredSelection(editingDomain.getResourceSet().getResources().get(0)), true); } } @Override public void makeContributions(IMenuManager menuManager, IToolBarManager toolBarManager, IStatusLineManager statusLineManager) { super.makeContributions(menuManager, toolBarManager, statusLineManager); contentOutlineStatusLineManager = statusLineManager; } @Override public void setActionBars(IActionBars actionBars) { super.setActionBars(actionBars); getActionBarContributor().shareGlobalActions(this, actionBars); } } contentOutlinePage = new MyContentOutlinePage(); // Listen to selection so that we can handle it is a special way. // contentOutlinePage.addSelectionChangedListener (new ISelectionChangedListener() { // This ensures that we handle selections correctly. // public void selectionChanged(SelectionChangedEvent event) { handleContentOutlineSelection(event.getSelection()); } }); } return contentOutlinePage; } /** * This accesses a cached version of the property sheet. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public IPropertySheetPage getPropertySheetPage() { if (propertySheetPage == null) { propertySheetPage = new ExtendedPropertySheetPage(editingDomain) { @Override public void setSelectionToViewer(List<?> selection) { CityobjectgroupEditor.this.setSelectionToViewer(selection); CityobjectgroupEditor.this.setFocus(); } @Override public void setActionBars(IActionBars actionBars) { super.setActionBars(actionBars); getActionBarContributor().shareGlobalActions(this, actionBars); } }; propertySheetPage.setPropertySourceProvider(new AdapterFactoryContentProvider(adapterFactory)); } return propertySheetPage; } /** * This deals with how we want selection in the outliner to affect the other views. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void handleContentOutlineSelection(ISelection selection) { if (currentViewerPane != null && !selection.isEmpty() && selection instanceof IStructuredSelection) { Iterator<?> selectedElements = ((IStructuredSelection)selection).iterator(); if (selectedElements.hasNext()) { // Get the first selected element. // Object selectedElement = selectedElements.next(); // If it's the selection viewer, then we want it to select the same selection as this selection. // if (currentViewerPane.getViewer() == selectionViewer) { ArrayList<Object> selectionList = new ArrayList<Object>(); selectionList.add(selectedElement); while (selectedElements.hasNext()) { selectionList.add(selectedElements.next()); } // Set the selection to the widget. // selectionViewer.setSelection(new StructuredSelection(selectionList)); } else { // Set the input to the widget. // if (currentViewerPane.getViewer().getInput() != selectedElement) { currentViewerPane.getViewer().setInput(selectedElement); currentViewerPane.setTitle(selectedElement); } } } } } /** * This is for implementing {@link IEditorPart} and simply tests the command stack. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean isDirty() { return ((BasicCommandStack)editingDomain.getCommandStack()).isSaveNeeded(); } /** * This is for implementing {@link IEditorPart} and simply saves the model file. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void doSave(IProgressMonitor progressMonitor) { // Save only resources that have actually changed. // final Map<Object, Object> saveOptions = new HashMap<Object, Object>(); saveOptions.put(Resource.OPTION_SAVE_ONLY_IF_CHANGED, Resource.OPTION_SAVE_ONLY_IF_CHANGED_MEMORY_BUFFER); // Do the work within an operation because this is a long running activity that modifies the workbench. // WorkspaceModifyOperation operation = new WorkspaceModifyOperation() { // This is the method that gets invoked when the operation runs. // @Override public void execute(IProgressMonitor monitor) { // Save the resources to the file system. // boolean first = true; for (Resource resource : editingDomain.getResourceSet().getResources()) { if ((first || !resource.getContents().isEmpty() || isPersisted(resource)) && !editingDomain.isReadOnly(resource)) { try { long timeStamp = resource.getTimeStamp(); resource.save(saveOptions); if (resource.getTimeStamp() != timeStamp) { savedResources.add(resource); } } catch (Exception exception) { resourceToDiagnosticMap.put(resource, analyzeResourceProblems(resource, exception)); } first = false; } } } }; updateProblemIndication = false; try { // This runs the options, and shows progress. // new ProgressMonitorDialog(getSite().getShell()).run(true, false, operation); // Refresh the necessary state. // ((BasicCommandStack)editingDomain.getCommandStack()).saveIsDone(); firePropertyChange(IEditorPart.PROP_DIRTY); } catch (Exception exception) { // Something went wrong that shouldn't. // CityGMLEditorPlugin.INSTANCE.log(exception); } updateProblemIndication = true; updateProblemIndication(); } /** * This returns whether something has been persisted to the URI of the specified resource. * The implementation uses the URI converter from the editor's resource set to try to open an input stream. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected boolean isPersisted(Resource resource) { boolean result = false; try { InputStream stream = editingDomain.getResourceSet().getURIConverter().createInputStream(resource.getURI()); if (stream != null) { result = true; stream.close(); } } catch (IOException e) { // Ignore } return result; } /** * This always returns true because it is not currently supported. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean isSaveAsAllowed() { return true; } /** * This also changes the editor's input. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void doSaveAs() { SaveAsDialog saveAsDialog = new SaveAsDialog(getSite().getShell()); saveAsDialog.open(); IPath path = saveAsDialog.getResult(); if (path != null) { IFile file = ResourcesPlugin.getWorkspace().getRoot().getFile(path); if (file != null) { doSaveAs(URI.createPlatformResourceURI(file.getFullPath().toString(), true), new FileEditorInput(file)); } } } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void doSaveAs(URI uri, IEditorInput editorInput) { (editingDomain.getResourceSet().getResources().get(0)).setURI(uri); setInputWithNotify(editorInput); setPartName(editorInput.getName()); IProgressMonitor progressMonitor = getActionBars().getStatusLineManager() != null ? getActionBars().getStatusLineManager().getProgressMonitor() : new NullProgressMonitor(); doSave(progressMonitor); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void gotoMarker(IMarker marker) { try { if (marker.getType().equals(EValidator.MARKER)) { String uriAttribute = marker.getAttribute(EValidator.URI_ATTRIBUTE, null); if (uriAttribute != null) { URI uri = URI.createURI(uriAttribute); EObject eObject = editingDomain.getResourceSet().getEObject(uri, true); if (eObject != null) { setSelectionToViewer(Collections.singleton(editingDomain.getWrapper(eObject))); } } } } catch (CoreException exception) { CityGMLEditorPlugin.INSTANCE.log(exception); } } /** * This is called during startup. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void init(IEditorSite site, IEditorInput editorInput) { setSite(site); setInputWithNotify(editorInput); setPartName(editorInput.getName()); site.setSelectionProvider(this); site.getPage().addPartListener(partListener); ResourcesPlugin.getWorkspace().addResourceChangeListener(resourceChangeListener, IResourceChangeEvent.POST_CHANGE); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void setFocus() { if (currentViewerPane != null) { currentViewerPane.setFocus(); } else { getControl(getActivePage()).setFocus(); } } /** * This implements {@link org.eclipse.jface.viewers.ISelectionProvider}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void addSelectionChangedListener(ISelectionChangedListener listener) { selectionChangedListeners.add(listener); } /** * This implements {@link org.eclipse.jface.viewers.ISelectionProvider}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void removeSelectionChangedListener(ISelectionChangedListener listener) { selectionChangedListeners.remove(listener); } /** * This implements {@link org.eclipse.jface.viewers.ISelectionProvider} to return this editor's overall selection. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ISelection getSelection() { return editorSelection; } /** * This implements {@link org.eclipse.jface.viewers.ISelectionProvider} to set this editor's overall selection. * Calling this result will notify the listeners. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSelection(ISelection selection) { editorSelection = selection; for (ISelectionChangedListener listener : selectionChangedListeners) { listener.selectionChanged(new SelectionChangedEvent(this, selection)); } setStatusLineManager(selection); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setStatusLineManager(ISelection selection) { IStatusLineManager statusLineManager = currentViewer != null && currentViewer == contentOutlineViewer ? contentOutlineStatusLineManager : getActionBars().getStatusLineManager(); if (statusLineManager != null) { if (selection instanceof IStructuredSelection) { Collection<?> collection = ((IStructuredSelection)selection).toList(); switch (collection.size()) { case 0: { statusLineManager.setMessage(getString("_UI_NoObjectSelected")); break; } case 1: { String text = new AdapterFactoryItemDelegator(adapterFactory).getText(collection.iterator().next()); statusLineManager.setMessage(getString("_UI_SingleObjectSelected", text)); break; } default: { statusLineManager.setMessage(getString("_UI_MultiObjectSelected", Integer.toString(collection.size()))); break; } } } else { statusLineManager.setMessage(""); } } } /** * This looks up a string in the plugin's plugin.properties file. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private static String getString(String key) { return CityGMLEditorPlugin.INSTANCE.getString(key); } /** * This looks up a string in plugin.properties, making a substitution. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private static String getString(String key, Object s1) { return CityGMLEditorPlugin.INSTANCE.getString(key, new Object [] { s1 }); } /** * This implements {@link org.eclipse.jface.action.IMenuListener} to help fill the context menus with contributions from the Edit menu. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void menuAboutToShow(IMenuManager menuManager) { ((IMenuListener)getEditorSite().getActionBarContributor()).menuAboutToShow(menuManager); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EditingDomainActionBarContributor getActionBarContributor() { return (EditingDomainActionBarContributor)getEditorSite().getActionBarContributor(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public IActionBars getActionBars() { return getActionBarContributor().getActionBars(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public AdapterFactory getAdapterFactory() { return adapterFactory; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void dispose() { updateProblemIndication = false; ResourcesPlugin.getWorkspace().removeResourceChangeListener(resourceChangeListener); getSite().getPage().removePartListener(partListener); adapterFactory.dispose(); if (getActionBarContributor().getActiveEditor() == this) { getActionBarContributor().setActiveEditor(null); } if (propertySheetPage != null) { propertySheetPage.dispose(); } if (contentOutlinePage != null) { contentOutlinePage.dispose(); } super.dispose(); } /** * Returns whether the outline view should be presented to the user. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected boolean showOutlineView() { return true; } }
/* * Copyright 2000-2008 JetBrains s.r.o. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.scala.lang.lexer; import com.intellij.lexer.Lexer; import com.intellij.lexer.LexerPosition; import com.intellij.lexer.LexerState; import com.intellij.psi.tree.IElementType; import com.intellij.psi.tree.xml.IXmlLeafElementType; import com.intellij.psi.xml.XmlTokenType; import com.intellij.util.containers.Stack; import gnu.trove.TIntStack; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import static org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypesEx.*; /** * @author ilyas */ public class ScalaLexer extends Lexer { protected final Lexer myScalaPlainLexer; private final Lexer myXmlLexer = new ScalaXmlTokenTypes.PatchedXmlLexer(); protected Lexer myCurrentLexer; protected static final int MASK = 0x3F; protected static final int XML_SHIFT = 6; protected TIntStack myBraceStack = new TIntStack(); protected Stack<Stack<MyOpenXmlTag>> myLayeredTagStack = new Stack<Stack<MyOpenXmlTag>>(); protected int myBufferEnd; protected int myBufferStart; protected CharSequence myBuffer; protected int myXmlState; private int myTokenStart; private int myTokenEnd; private boolean inCdata = false; protected IElementType myTokenType; public final String XML_BEGIN_PATTERN = "<\\w"; private int xmlSteps = -1; /* We need to store it as in some cases (e.g. when we have uninterrupted xml elements sequence like '<a></a>') * when the last xml element was located in 'locateToken()' there is no way to determine from xml state/xml tags stack * that the lexer was inside xml. That means the lexer isn't able to patch state => the state can be == 0 => * any incremental stuff (highlighting etc) can start lexical analysis from this position with 0 state => we get wrong token sequence */ private IElementType previousToken = null; public ScalaLexer() { this(false); } public ScalaLexer(boolean treatDocCommentAsBlockComment) { myScalaPlainLexer = new ScalaLayeredPlainLexer(treatDocCommentAsBlockComment); myCurrentLexer = myScalaPlainLexer; } public void start(@NotNull CharSequence buffer, int startOffset, int endOffset, int initialState) { myCurrentLexer = myScalaPlainLexer; myCurrentLexer.start(buffer, startOffset, endOffset, initialState & MASK); myBraceStack.clear(); myLayeredTagStack.clear(); myXmlState = (initialState >> XML_SHIFT) & MASK; inCdata = false; xmlSteps = -1; myBuffer = buffer; myBufferStart = startOffset; myBufferEnd = endOffset; myTokenType = null; } public int getState() { locateToken(); int state = 0; if (myLayeredTagStack.size() > 0) state = 239; if (myXmlState != 0 || isXmlTokenType(previousToken)) state = 239; int scalaState = myScalaPlainLexer.getState(); if (scalaState != 0) state = 239; // work-around for the strange advance()-related assumption / behavior in locateToken() if (myTokenStart == 0) return 0; return state; } @Nullable public IElementType getTokenType() { locateToken(); return myTokenType; } private void locateToken() { previousToken = myTokenType; if (myTokenType == null) { IElementType type = myCurrentLexer.getTokenType(); int start = myCurrentLexer.getTokenStart(); String tokenText = myCurrentLexer.getBufferSequence().subSequence(start, myCurrentLexer.getTokenEnd()).toString(); if (myCurrentLexer == myXmlLexer && xmlSteps == 0) { myCurrentLexer = myScalaPlainLexer; myCurrentLexer.start(getBufferSequence(), start, myXmlLexer.getBufferEnd(), 0); } --xmlSteps; if (type == SCALA_XML_CONTENT_START) { final XmlTagValidator xmlTagValidator = new XmlTagValidator(myCurrentLexer); if (!xmlTagValidator.validate()) { xmlSteps = xmlTagValidator.step; } myCurrentLexer = myXmlLexer; myXmlState = 0; myCurrentLexer.start(getBufferSequence(), start, myBufferEnd, 0); myLayeredTagStack.push(new Stack<MyOpenXmlTag>()); myLayeredTagStack.peek().push(new MyOpenXmlTag()); myTokenType = myCurrentLexer.getTokenType(); locateTextRange(); } else if ((/*type == XML_ATTRIBUTE_VALUE_TOKEN || */type == ScalaXmlTokenTypes.XML_DATA_CHARACTERS()) && //todo: Dafuq??? tokenText.startsWith("{") && !tokenText.startsWith("{{") && !inCdata) { myXmlState = myCurrentLexer.getState(); (myCurrentLexer = myScalaPlainLexer).start(getBufferSequence(), start, myBufferEnd, 0); locateTextRange(); myBraceStack.push(1); myTokenType = SCALA_IN_XML_INJECTION_START; } else if (type == ScalaTokenTypes.tRBRACE && myBraceStack.size() > 0) { int currentLayer = myBraceStack.pop(); if (currentLayer == 1) { locateTextRange(); (myCurrentLexer = myXmlLexer).start(getBufferSequence(), start + 1, myBufferEnd, myXmlState); myTokenType = SCALA_IN_XML_INJECTION_END; } else { myBraceStack.push(--currentLayer); } } else if (type == ScalaTokenTypes.tLBRACE && myBraceStack.size() > 0) { int currentLayer = myBraceStack.pop(); myBraceStack.push(++currentLayer); } else if ((ScalaXmlTokenTypes.XML_START_TAG_START() == type || ScalaXmlTokenTypes.XML_COMMENT_START() == type || ScalaXmlTokenTypes.XML_CDATA_START() == type || ScalaXmlTokenTypes.XML_PI_START() == type) && !myLayeredTagStack.isEmpty()) { if (type == ScalaXmlTokenTypes.XML_CDATA_START()) { inCdata = true; } myLayeredTagStack.peek().push(new MyOpenXmlTag()); } else if (ScalaXmlTokenTypes.XML_EMPTY_ELEMENT_END() == type && !myLayeredTagStack.isEmpty() && !myLayeredTagStack.peek().isEmpty() && myLayeredTagStack.peek().peek().state == TAG_STATE.UNDEFINED) { myLayeredTagStack.peek().pop(); if (myLayeredTagStack.peek().isEmpty() && checkNotNextXmlBegin(myCurrentLexer)) { myLayeredTagStack.pop(); locateTextRange(); startScalaPlainLexer(start + 2); myTokenType = ScalaXmlTokenTypes.XML_EMPTY_ELEMENT_END(); return; } } else if (ScalaXmlTokenTypes.XML_TAG_END() == type && !myLayeredTagStack.isEmpty() && !myLayeredTagStack.peek().isEmpty()) { MyOpenXmlTag tag = myLayeredTagStack.peek().peek(); if (tag.state == TAG_STATE.UNDEFINED) { tag.state = TAG_STATE.NONEMPTY; } else if (tag.state == TAG_STATE.NONEMPTY) { myLayeredTagStack.peek().pop(); } if (myLayeredTagStack.peek().isEmpty() && checkNotNextXmlBegin(myCurrentLexer)) { myLayeredTagStack.pop(); locateTextRange(); startScalaPlainLexer(start + 1); myTokenType = ScalaXmlTokenTypes.XML_TAG_END(); return; } } else if (ScalaXmlTokenTypes.XML_PI_END() == type && !myLayeredTagStack.isEmpty() && !myLayeredTagStack.peek().isEmpty() && myLayeredTagStack.peek().peek().state == TAG_STATE.UNDEFINED) { myLayeredTagStack.peek().pop(); if (myLayeredTagStack.peek().isEmpty() && checkNotNextXmlBegin(myCurrentLexer)) { myLayeredTagStack.pop(); locateTextRange(); startScalaPlainLexer(start + 2); myTokenType = ScalaXmlTokenTypes.XML_PI_END(); return; } } else if (ScalaXmlTokenTypes.XML_COMMENT_END() == type && !myLayeredTagStack.isEmpty() && !myLayeredTagStack.peek().isEmpty() && myLayeredTagStack.peek().peek().state == TAG_STATE.UNDEFINED) { myLayeredTagStack.peek().pop(); if (myLayeredTagStack.peek().isEmpty() && checkNotNextXmlBegin(myCurrentLexer)) { myLayeredTagStack.pop(); locateTextRange(); startScalaPlainLexer(start + 3); myTokenType = ScalaXmlTokenTypes.XML_COMMENT_END(); return; } } else if (ScalaXmlTokenTypes.XML_CDATA_END() == type && !myLayeredTagStack.isEmpty() && !myLayeredTagStack.peek().isEmpty() && myLayeredTagStack.peek().peek().state == TAG_STATE.UNDEFINED) { inCdata = false; myLayeredTagStack.peek().pop(); if (myLayeredTagStack.peek().isEmpty() && checkNotNextXmlBegin(myCurrentLexer)) { myLayeredTagStack.pop(); locateTextRange(); startScalaPlainLexer(start + 3); myTokenType = ScalaXmlTokenTypes.XML_CDATA_END(); return; } } else if (type == ScalaXmlTokenTypes.XML_DATA_CHARACTERS() && tokenText.indexOf('{') != -1 && !inCdata) { int scalaToken = tokenText.indexOf('{'); while (scalaToken != -1 && scalaToken + 1 < tokenText.length() && tokenText.charAt(scalaToken + 1) == '{') scalaToken = tokenText.indexOf('{', scalaToken + 2); if (scalaToken != -1) { myTokenType = ScalaXmlTokenTypes.XML_DATA_CHARACTERS(); myTokenStart = myCurrentLexer.getTokenStart(); myTokenEnd = myTokenStart + scalaToken; myCurrentLexer.start(getBufferSequence(), myTokenEnd, myBufferEnd, myCurrentLexer.getState()); } } else if ((type == XmlTokenType.XML_REAL_WHITE_SPACE || type == XmlTokenType.XML_WHITE_SPACE || type == XmlTokenType.TAG_WHITE_SPACE) && tokenText.matches("\\s*\n(\n|\\s)*")) { type = ScalaTokenTypes.tWHITE_SPACE_IN_LINE; } else if (type == null || !(type instanceof IXmlLeafElementType) && !ScalaXmlTokenTypes.isSubstituted(type.toString())) { ++xmlSteps; } if (myTokenType == null) { myTokenType = type; if (myTokenType == null) return; locateTextRange(); } //we have to advance current lexer only if we didn't start scala plain lexer on this iteration //because of wrong behaviour of the latter ScalaPlainLexer myCurrentLexer.advance(); } } private void startScalaPlainLexer(int start) { (myCurrentLexer = myScalaPlainLexer).start(getBufferSequence(), start, myBufferEnd); } private void locateTextRange() { myTokenStart = myCurrentLexer.getTokenStart(); myTokenEnd = myCurrentLexer.getTokenEnd(); } private boolean checkNotNextXmlBegin(Lexer lexer) { String text = lexer.getBufferSequence().toString(); int beginIndex = lexer.getTokenEnd(); if (beginIndex < text.length()) { text = text.substring(beginIndex).trim(); if (text.length() > 2) { text = text.substring(0, 2); } return !text.matches(XML_BEGIN_PATTERN); } return true; } public int getTokenStart() { locateToken(); if (myTokenType == null) return myTokenEnd; return myTokenStart; } public int getTokenEnd() { locateToken(); return myTokenEnd; } public void advance() { myTokenType = null; } @NotNull public LexerPosition getCurrentPosition() { return new MyPosition( myTokenStart, myTokenEnd, new MyState( myXmlState, 0, myBraceStack, myCurrentLexer, myLayeredTagStack)); } public void restore(@NotNull LexerPosition position) { MyPosition pos = (MyPosition) position; myBraceStack = pos.state.braceStack; myCurrentLexer = pos.state.currentLexer; myTokenStart = pos.getOffset(); myTokenEnd = pos.end; myLayeredTagStack = pos.state.tagStack; myCurrentLexer.start(myCurrentLexer.getBufferSequence(), myTokenStart, myBufferEnd, myCurrentLexer instanceof ScalaXmlTokenTypes.PatchedXmlLexer ? pos.state.xmlState : 0); } @NotNull public CharSequence getBufferSequence() { return myBuffer; } public int getBufferEnd() { return myBufferEnd; } private boolean isXmlTokenType(IElementType tpe) { return tpe != null && ScalaXmlTokenTypes.XML_ELEMENTS().contains(tpe); } private static class MyState implements LexerState { public TIntStack braceStack; public Stack<Stack<MyOpenXmlTag>> tagStack; public Lexer currentLexer; public int xmlState; public int scalaState; public MyState(final int xmlState, final int scalaState, TIntStack braceStack, Lexer lexer, Stack<Stack<MyOpenXmlTag>> tagStack) { this.braceStack = braceStack; this.tagStack = tagStack; this.currentLexer = lexer; this.xmlState = xmlState; this.scalaState = scalaState; } public short intern() { return 0; } } private static class MyPosition implements LexerPosition { public int start; public int end; public MyState state; public MyPosition(final int start, final int end, final MyState state) { this.start = start; this.end = end; this.state = state; } public int getOffset() { return start; } public int getState() { return state.currentLexer.getState(); } } protected enum TAG_STATE { UNDEFINED, EMPTY, NONEMPTY } private static class MyOpenXmlTag { public TAG_STATE state = TAG_STATE.UNDEFINED; } private static class XmlTagValidator { final private static List<IElementType> allStopTokens = Arrays.<IElementType>asList(ScalaXmlTokenTypes.XML_TAG_END(), ScalaXmlTokenTypes.XML_EMPTY_ELEMENT_END(), ScalaXmlTokenTypes.XML_PI_END(), ScalaXmlTokenTypes.XML_COMMENT_END(), null, ScalaXmlTokenTypes.XML_START_TAG_START()); final private static List<IElementType> validStopTokens = Arrays.<IElementType>asList(ScalaXmlTokenTypes.XML_TAG_END(), ScalaXmlTokenTypes.XML_EMPTY_ELEMENT_END(), ScalaXmlTokenTypes.XML_PI_END(), ScalaXmlTokenTypes.XML_COMMENT_END()); final private Lexer lexer; final private LinkedList<IElementType> xmlTokens; final private ScalaXmlTokenTypes.PatchedXmlLexer valLexer; private int step = 0; private XmlTagValidator(Lexer lexer) { this.lexer = lexer; xmlTokens = new LinkedList<IElementType>(); valLexer = new ScalaXmlTokenTypes.PatchedXmlLexer(); } private boolean validate() { valLexer.start(lexer.getBufferSequence(), lexer.getTokenStart(), lexer.getBufferEnd(), 0); step = 0; boolean isCdata = valLexer.getTokenType() == ScalaXmlTokenTypes.XML_CDATA_START(); advanceLexer(); step = 1; if (!isCdata) { while (canProcess()) { if (valLexer.getTokenType() == ScalaXmlTokenTypes.XML_DATA_CHARACTERS()) { return xmlTokens.peekLast() == ScalaXmlTokenTypes.XML_EQ() && valLexer.getTokenText().startsWith("{") && !valLexer.getTokenText().startsWith("{{"); } else { advanceLexer(); } } return validStopTokens.contains(valLexer.getTokenType()); } if (valLexer.getTokenType() == ScalaXmlTokenTypes.XML_CDATA_END()) return true; advanceLexer(); return valLexer.getTokenType() == ScalaXmlTokenTypes.XML_CDATA_END(); } private void advanceLexer() { xmlTokens.addLast(valLexer.getTokenType()); valLexer.advance(); ++step; } private boolean canProcess() { if (allStopTokens.contains(valLexer.getTokenType())) return false; if (valLexer.getTokenType() == ScalaXmlTokenTypes.XML_NAME()) { if (xmlTokens.size() == 1) return true; if (xmlTokens.peekLast() != XmlTokenType.XML_WHITE_SPACE) return false; xmlTokens.pollLast(); if (xmlTokens.peekLast() == ScalaXmlTokenTypes.XML_NAME()) { return xmlTokens.size() == 2; } return xmlTokens.peekLast() == ScalaXmlTokenTypes.XML_ATTRIBUTE_VALUE_END_DELIMITER(); } else { return true; } } } }
/* * The MIT License (MIT) * * Copyright (c) 2007-2015 Broad Institute * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.broad.igv.sam; import htsjdk.samtools.*; import htsjdk.samtools.util.CloseableIterator; import org.broad.igv.feature.Range; import org.broad.igv.sam.reader.AlignmentReader; import org.broad.igv.sam.reader.AlignmentReaderFactory; import org.broad.igv.ui.panel.ReferenceFrame; import org.broad.igv.util.ResourceLocator; import org.broad.igv.util.StringUtils; import org.broad.igv.util.Utilities; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.util.*; /** * Write SAM/BAM Alignments to a file or stream * <p/> * * @author jacob * @since 2012/05/04 */ public class SAMWriter { private static final String SAM_FIELD_SEPARATOR = "\t"; private SAMFileHeader header; public SAMWriter(SAMFileHeader header) { this.header = header; } public int writeToFile(File outFile, Iterator<PicardAlignment> alignments, boolean createIndex) { SAMFileWriterFactory factory = new SAMFileWriterFactory(); factory.setCreateIndex(createIndex); SAMFileWriter writer = factory.makeSAMOrBAMWriter(header, true, outFile); return writeAlignments(writer, alignments); } public int writeToStream(OutputStream stream, Iterator<PicardAlignment> alignments, boolean bam) { SAMFileWriterImpl writer; // if (bam) { // return 0; // Don't know how to output bams // //writer = new BAMFileWriter(stream, null); // } else { writer = new SAMTextWriter(stream); writer.setHeader(header); return writeAlignments(writer, alignments); // } } private int writeAlignments(SAMFileWriter writer, Iterator<PicardAlignment> alignments) { int count = 0; while (alignments.hasNext()) { PicardAlignment al = alignments.next(); writer.addAlignment(al.getRecord()); count++; } writer.close(); return count; } private static int getFlags(Alignment alignment) { int result = alignment.isPaired() ? 0x1 : 0; ReadMate mate = alignment.getMate(); if (mate != null) { result += !mate.isMapped() ? 0x8 : 0; result += mate.isNegativeStrand() ? 0x20 : 0; } result += alignment.isProperPair() ? 0x2 : 0; result += !alignment.isMapped() ? 0x4 : 0; result += alignment.isNegativeStrand() ? 0x10 : 0; result += alignment.isFirstOfPair() ? 0x40 : 0; result += alignment.isSecondOfPair() ? 0x80 : 0; //TODO Not really clear on the meaning of this flag : it seems like we //can do without it though //result += false ? 0x100 : 0; result += alignment.isVendorFailedRead() ? 0x200 : 0; result += alignment.isDuplicate() ? 0x400 : 0; return result; } /** * Create SAM string from alignment. Work in progress. * Currently ignores the quality string and any optional attributes, * but should otherwise be correct. */ public static String getSAMString(Alignment alignment) { String refName = alignment.getChr(); List<String> tokens = new ArrayList<String>(11); tokens.add(alignment.getReadName()); tokens.add(Integer.toString(getFlags(alignment))); tokens.add(refName); tokens.add(Integer.toString(alignment.getAlignmentStart())); tokens.add(Integer.toString(alignment.getMappingQuality())); tokens.add(alignment.getCigarString()); ReadMate mate = alignment.getMate(); String mateRefName = mate != null ? mate.getChr() : null; if (refName.equals(mateRefName) && !SAMRecord.NO_ALIGNMENT_REFERENCE_NAME.equals(mateRefName)) { tokens.add("="); } else { tokens.add(mateRefName); } int mateStart = mate != null ? mate.getStart() : 0; tokens.add(Integer.toString(mateStart)); tokens.add(Integer.toString(alignment.getInferredInsertSize())); tokens.add(alignment.getReadSequence()); //TODO Implement quality tokens.add("*"); //tokens.add(SAMUtils.phredToFastq(alignment.getQualityArray())); //We add a newline to be consistent with samtools String out = StringUtils.join(tokens, SAM_FIELD_SEPARATOR) + "\n"; return out; //TODO Most of our alignment implementations don't have these attributes // SAMBinaryTagAndValue attribute = alignment.getBinaryAttributes(); // while (attribute != null) { // out.write(FIELD_SEPARATOR); // final String encodedTag; // if (attribute.isUnsignedArray()) { // encodedTag = tagCodec.encodeUnsignedArray(tagUtil.makeStringTag(attribute.tag), attribute.value); // } else { // encodedTag = tagCodec.encode(tagUtil.makeStringTag(attribute.tag), attribute.value); // } // out.write(encodedTag); // attribute = attribute.getNext(); // } } /** * Takes an iterator of Alignments, and returns an iterable/iterator * consisting only of the SamAlignments contained therein. * Can also be used to filter by position */ public static class SamAlignmentIterable implements Iterable<PicardAlignment>, Iterator<PicardAlignment> { private Iterator<Alignment> alignments; private PicardAlignment nextAlignment; private String chr = null; private int start = -1; private int end = -1; public SamAlignmentIterable(Iterator<Alignment> alignments, String chr, int start, int end) { this.alignments = alignments; this.chr = chr; this.start = start; this.end = end; advance(); } private void advance() { Alignment next; nextAlignment = null; while (alignments.hasNext() && nextAlignment == null) { next = alignments.next(); if (next instanceof PicardAlignment && passLocFilter(next)) { nextAlignment = (PicardAlignment) next; } } } @Override public boolean hasNext() { return nextAlignment != null; } @Override public PicardAlignment next() { if (!hasNext()) throw new NoSuchElementException("No more SamAlignments"); PicardAlignment next = nextAlignment; advance(); return next; } @Override public void remove() { //pass } @Override public Iterator<PicardAlignment> iterator() { return this; } private boolean passLocFilter(Alignment al) { return this.chr != null && this.overlaps(al.getChr(), al.getStart(), al.getEnd()); } /** * Determine whether there is any overlap between this interval and the specified interval */ private boolean overlaps(String chr, int start, int end) { return Utilities.objectEqual(this.chr, chr) && this.start <= end && this.end >= start; } } /** * Use Picard to write alignments which are already stored in memory * * @param dataManager * @param outFile * @param frame *@param sequence * @param start * @param end @return * @throws IOException */ public static int writeAlignmentFilePicard(AlignmentDataManager dataManager, File outFile, ReferenceFrame frame, String sequence, int start, int end) throws IOException { ResourceLocator inlocator = dataManager.getLocator(); checkExportableAlignmentFile(inlocator.getTypeString()); final SAMFileHeader fileHeader = dataManager.getReader().getFileHeader(); //IGV can only load files sorted in coordinate order, but they aren't always //labelled as such. fileHeader.setSortOrder(SAMFileHeader.SortOrder.coordinate); Range range = new Range(sequence, start, end); AlignmentInterval interval = dataManager.getLoadedInterval(frame); if (interval != null) { List<Alignment> alignments = new ArrayList(interval.getAlignments()); // We need to sort if soft-clipping is on, so just sort always. Its cheap. alignments.sort((o1, o2) -> o1.getAlignmentStart() - o2.getAlignmentStart()); Iterator<PicardAlignment> samIter = new SamAlignmentIterable(alignments.iterator(), sequence, start, end); SAMWriter writer = new SAMWriter(fileHeader); return writer.writeToFile(outFile, samIter, true); } else { return 0; } } /** * Use Picard to write alignment subset, as read from a file * * @param inlocator * @param outPath * @param sequence * @param start * @param end * @return */ public static int writeAlignmentFilePicard(ResourceLocator inlocator, String outPath, String sequence, int start, int end) throws IOException { checkExportableAlignmentFile(inlocator.getTypeString()); AlignmentReader reader = AlignmentReaderFactory.getReader(inlocator); CloseableIterator<PicardAlignment> iter = reader.query(sequence, start, end, false); final SAMFileHeader fileHeader = reader.getFileHeader(); SAMWriter writer = new SAMWriter(fileHeader); int count = writer.writeToFile(new File(outPath), iter, true); iter.close(); return count; } private static void checkExportableAlignmentFile(String typeString) { String[] validExts = new String[]{".cram", ".bam", ".sam", ".bam.list", ".sam.list"}; boolean isValidExt = false; for (String validExt : validExts) { isValidExt |= typeString.endsWith(validExt); } if (!isValidExt) { throw new IllegalArgumentException("Input alignment valid not valid for export"); } } }
/** * */ package wblut.hemesh; import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.Iterator; import org.eclipse.collections.impl.map.mutable.primitive.IntObjectHashMap; import org.eclipse.collections.impl.map.mutable.primitive.LongIntHashMap; import org.eclipse.collections.impl.map.mutable.primitive.LongObjectHashMap; import wblut.geom.WB_Coord; import wblut.geom.WB_GeometryOp; import wblut.geom.WB_Point; import wblut.math.WB_Epsilon; /** * @author FVH * */ public class HEC_IsoSkin extends HEC_Creator { private Substrate substrate; private double isolevel; private double gamma; public HEC_IsoSkin() { override = true; substrate = null; isolevel = 0.5; gamma = 0.0; } public HEC_IsoSkin(final HE_Mesh mesh, final double[] layers) { this(); substrate = new Substrate(mesh, layers); isolevel = 0.5; } public HEC_IsoSkin(final HE_Mesh mesh, final int layers, final double offset, final double d) { this(); double[] l = new double[layers + 1]; for (int i = 0; i <= layers; i++) { l[i] = offset + d * i; } if (checkMesh(mesh)) { substrate = new Substrate(mesh, l); } isolevel = 0.5; } public HEC_IsoSkin setSubstrate(final HE_Mesh mesh, final double[] layers) { if (checkMesh(mesh)) { substrate = new Substrate(mesh, layers); } return this; } public HEC_IsoSkin setSubstrate(final HE_Mesh mesh, final int layers, final double offset, final double d) { double[] l = new double[layers]; for (int i = 0; i < layers; i++) { l[i] = offset + d * i; } if (checkMesh(mesh)) { substrate = new Substrate(mesh, l); } isolevel = 0.5; return this; } private boolean checkMesh(final HE_Mesh mesh) { HE_FaceIterator fItr = mesh.fItr(); HE_Face f; int fd; while (fItr.hasNext()) { f = fItr.next(); fd = f.getFaceDegree(); if (fd != 3 && fd != 4) { return false; } } return true; } public HEC_IsoSkin setValues(final double[][] values) { substrate.setValues(values); return this; } public HEC_IsoSkin setValuesBilayer() { substrate.setValuesBilayer(); return this; } public HEC_IsoSkin setValuesSolid() { substrate.setValuesSolid(); return this; } public HEC_IsoSkin setValuesLayers() { substrate.setValuesLayers(); return this; } public HEC_IsoSkin setIsolevel(final double isolevel) { this.isolevel = isolevel; return this; } public HEC_IsoSkin setGamma(final double gamma) { this.gamma = gamma; return this; } public int getNumberOfLayers() { return substrate.getNumberOfLayers(); } /* * (non-Javadoc) * * @see wblut.hemesh.HEC_Creator#createBase() */ @Override protected HE_Mesh createBase() { if (substrate == null || substrate.values == null || substrate.values.length != substrate.numberOfLayers + 1 || substrate.values[0].length != substrate.numberOfVertices) { return new HE_Mesh(); } return substrate.getSkin(isolevel, gamma); } /** * @return */ public WB_Coord[][] getGridpositions() { return substrate.getGridpositions(); } /** * @return */ public Cell[][] getCells() { return substrate.getCells3D(); } private static class Substrate { private static class VertexRemap { int i, layeri; double closestd; WB_Point p; double originalvalue; HE_Vertex snapvertex; } final static int ONVERTEX = 0; final static int ONEDGE = 1; final static int EQUAL = 1; final static int POSITIVE = 2; private IntObjectHashMap<VertexRemap> vertexremaps; private int numberOfLayers; private double[][] values; private Cell[][] cells3D; private Cell[][] cells2D; private WB_Coord[][] gridpositions; private LongObjectHashMap<HE_Vertex> edges; private HE_Mesh substrate; private int numberOfVertices; private int totalNumberOfVertices; private int[] digits; private LongIntHashMap keysToIndex; final static int[][] isovertices2D = new int[][] { { ONVERTEX, 0 }, { ONVERTEX, 1 }, { ONVERTEX, 2 }, { ONVERTEX, 3 }, { ONEDGE, 0 }, { ONEDGE, 1 }, { ONEDGE, 2 }, { ONEDGE, 3 } }; // ISOVERTICES3D: 20 // type=ONVERTEX iso vertex on vertex, index in vertex list // type=ONEDGE iso vertex on edge, index in edge list final static int[][] isovertices3D = new int[][] { { ONVERTEX, 0 }, { ONVERTEX, 1 }, { ONVERTEX, 2 }, { ONVERTEX, 3 }, { ONVERTEX, 4 }, { ONVERTEX, 5 }, { ONVERTEX, 6 }, { ONVERTEX, 7 }, { ONEDGE, 0 }, { ONEDGE, 1 }, { ONEDGE, 2 }, { ONEDGE, 3 }, { ONEDGE, 4 }, { ONEDGE, 5 }, { ONEDGE, 6 }, { ONEDGE, 7 }, { ONEDGE, 8 }, { ONEDGE, 9 }, { ONEDGE, 10 }, { ONEDGE, 11 } }; final static int[][] entries2D = new int[][] { { 0 }, { 0 }, { 1, 3, 0, 4, 5 }, { 0 }, { 0 }, { 1, 3, 0, 1, 5 }, { 1, 3, 1, 6, 4 }, { 1, 3, 1, 6, 0 }, { 1, 4, 0, 1, 6, 5 }, { 0 }, { 0 }, { 1, 3, 0, 4, 2 }, { 0 }, { 1, 3, 0, 1, 2 }, { 1, 3, 0, 1, 2 }, { 2, 3, 1, 6, 2, 3, 1, 2, 4 }, { 2, 3, 0, 6, 2, 3, 6, 0, 1 }, { 2, 3, 0, 6, 2, 3, 6, 0, 1 }, { 1, 3, 2, 5, 7 }, { 1, 3, 2, 0, 7 }, { 1, 4, 0, 4, 7, 2 }, { 2, 3, 5, 1, 2, 3, 1, 7, 2 }, { 2, 3, 1, 7, 2, 3, 1, 2, 0 }, { 2, 3, 1, 7, 2, 3, 1, 2, 0 }, { 4, 3, 5, 4, 1, 3, 5, 1, 2, 3, 6, 7, 2, 3, 6, 2, 1 }, { 3, 3, 0, 6, 7, 3, 6, 0, 1, 3, 0, 7, 2 }, { 3, 3, 0, 6, 7, 3, 0, 7, 2, 3, 6, 0, 1 }, { 0 }, { 0 }, { 2, 3, 0, 4, 3, 3, 0, 3, 5 }, { 0 }, { 1, 3, 3, 0, 1 }, { 2, 3, 1, 3, 5, 3, 1, 5, 0 }, { 1, 3, 1, 3, 4 }, { 1, 3, 1, 3, 0 }, { 2, 3, 3, 5, 1, 3, 1, 5, 0 }, { 0 }, { 1, 3, 2, 0, 3 }, { 2, 3, 4, 3, 2, 3, 4, 2, 0 }, { 1, 3, 2, 1, 3 }, { 1, 4, 0, 1, 3, 2 }, { 1, 4, 0, 1, 3, 2 }, { 2, 3, 2, 4, 3, 3, 3, 4, 1 }, { 1, 4, 0, 1, 3, 2 }, { 1, 4, 0, 1, 3, 2 }, { 1, 3, 5, 3, 2 }, { 1, 3, 0, 3, 2 }, { 2, 3, 4, 3, 0, 3, 0, 3, 2 }, { 2, 3, 5, 1, 3, 3, 5, 3, 2 }, { 1, 4, 0, 1, 3, 2 }, { 1, 4, 0, 1, 3, 2 }, { 3, 3, 5, 4, 3, 3, 3, 4, 1, 3, 5, 3, 2 }, { 1, 4, 0, 1, 3, 2 }, { 1, 4, 0, 1, 3, 2 }, { 1, 3, 3, 7, 6 }, { 2, 3, 0, 6, 3, 3, 0, 3, 7 }, { 4, 3, 4, 6, 0, 3, 0, 6, 3, 3, 7, 5, 0, 3, 7, 0, 3 }, { 1, 3, 3, 7, 1 }, { 2, 3, 1, 7, 0, 3, 7, 1, 3 }, { 3, 3, 7, 5, 1, 3, 1, 5, 0, 3, 7, 1, 3 }, { 1, 4, 7, 4, 1, 3 }, { 2, 3, 7, 0, 1, 3, 7, 1, 3 }, { 3, 3, 7, 5, 1, 3, 1, 5, 0, 3, 7, 1, 3 }, { 1, 3, 2, 6, 3 }, { 2, 3, 0, 6, 2, 3, 2, 6, 3 }, { 3, 3, 4, 6, 2, 3, 2, 6, 3, 3, 4, 2, 0 }, { 1, 3, 2, 1, 3 }, { 1, 4, 0, 1, 3, 2 }, { 1, 4, 0, 1, 3, 2 }, { 2, 3, 2, 4, 3, 3, 3, 4, 1 }, { 1, 4, 0, 1, 3, 2 }, { 1, 4, 0, 1, 3, 2 }, { 1, 4, 5, 6, 3, 2 }, { 2, 3, 0, 6, 2, 3, 2, 6, 3 }, { 3, 3, 2, 4, 6, 3, 4, 2, 0, 3, 2, 6, 3 }, { 2, 3, 5, 1, 2, 3, 2, 1, 3 }, { 1, 4, 0, 1, 3, 2 }, { 1, 4, 0, 1, 3, 2 }, { 3, 3, 3, 5, 4, 3, 5, 3, 2, 3, 3, 4, 1 }, { 1, 4, 0, 1, 3, 2 }, { 1, 4, 0, 1, 3, 2 } }; int[][] entries3D; private IntObjectHashMap<HE_Vertex> vertices; private double gamma; Substrate(final HE_Mesh mesh, final double[] d) { this.substrate = mesh; this.numberOfLayers = d.length - 1; numberOfVertices = mesh.getNumberOfVertices(); totalNumberOfVertices = numberOfVertices * (numberOfLayers + 1); setGridPositions(d); createGrid(); keysToIndex = null; String line = ""; final String cvsSplitBy = " "; BufferedReader br = null; InputStream is = null; InputStreamReader isr = null; entries3D = new int[6561][]; try { is = this.getClass().getClassLoader().getResourceAsStream("resources/isonepcube3D.txt"); isr = new InputStreamReader(is); br = new BufferedReader(isr); int i = 0; while ((line = br.readLine()) != null) { final String[] cell = line.split(cvsSplitBy); final int[] indices = new int[cell.length]; for (int j = 0; j < cell.length; j++) { indices[j] = Integer.parseInt(cell[j]); } entries3D[i] = indices; i++; } } catch (final FileNotFoundException e) { e.printStackTrace(); } catch (final IOException e) { e.printStackTrace(); } finally { if (br != null) { try { br.close(); isr.close(); is.close(); } catch (final IOException e) { e.printStackTrace(); } } } gamma = 0.0; } void setGridPositions(final double d[]) { gridpositions = new WB_Coord[numberOfLayers + 1][numberOfVertices]; values = new double[numberOfLayers + 1][numberOfVertices]; keysToIndex = new LongIntHashMap(); HE_VertexIterator vitr = substrate.vItr(); HE_Vertex v; WB_Coord n; int j = 0; while (vitr.hasNext()) { v = vitr.next(); n = v.getVertexNormal(); keysToIndex.put(v.getKey(), j); for (int i = 0; i < numberOfLayers + 1; i++) { gridpositions[i][j] = v.getPosition().addMul(d[i], n); values[i][j] = -1; } j++; } } void setValues(final double[][] values) { for (int i = 0; i < numberOfLayers; i++) { for (int j = 0; j < numberOfVertices; j++) { this.values[i][j] = values[i][j]; } } } void setValuesBilayer() { HE_VertexIterator vitr = substrate.vItr(); int j = 0; while (vitr.hasNext()) { vitr.next(); for (int i = 1; i < numberOfLayers; i++) { values[i][j] = i == 1 || i == numberOfLayers - 1 ? 1 : -1; } j++; } } void setValuesSolid() { HE_VertexIterator vitr = substrate.vItr(); int j = 0; while (vitr.hasNext()) { vitr.next(); for (int i = 1; i < numberOfLayers; i++) { values[i][j] = 1; } j++; } } void setValuesLayers() { HE_VertexIterator vitr = substrate.vItr(); int j = 0; while (vitr.hasNext()) { vitr.next(); for (int i = 1; i < numberOfLayers; i++) { values[i][j] = 2 * (i % 2) - 1; } j++; } } private void createGrid() { int nof = substrate.getNumberOfFaces(); setCells3D(new Cell[numberOfLayers][nof]); HE_Halfedge he; int j = 0; int[] cornerIndices; HE_FaceIterator fitr = substrate.fItr(); HE_Face f; while (fitr.hasNext()) { f = fitr.next(); cornerIndices = new int[4]; he = f.getHalfedge(); cornerIndices[0] = keysToIndex.get(he.getVertex().getKey()); he = he.getNextInFace(); cornerIndices[1] = keysToIndex.get(he.getVertex().getKey()); he = he.getNextInFace(); cornerIndices[3] = keysToIndex.get(he.getVertex().getKey()); he = he.getNextInFace(); cornerIndices[2] = keysToIndex.get(he.getVertex().getKey()); for (int i = 0; i < numberOfLayers; i++) { getCells3D()[i][j] = new Cell(); getCells3D()[i][j].setCornerIndices(cornerIndices); getCells3D()[i][j].layer = i; } j++; } // Boundary caps int nobe = 0; Iterator<HE_Halfedge> heitr = substrate.heItr(); while (heitr.hasNext()) { he = heitr.next(); if (he.isInnerBoundary()) { nobe++; } } cells2D = new Cell[numberOfLayers][nobe]; heitr = substrate.heItr(); j = 0; while (heitr.hasNext()) { he = heitr.next(); if (he.isInnerBoundary()) { cornerIndices = new int[2]; cornerIndices[0] = keysToIndex.get(he.getVertex().getKey()); cornerIndices[1] = keysToIndex.get(he.getEndVertex().getKey()); for (int i = 0; i < numberOfLayers; i++) { cells2D[i][j] = new Cell(); cells2D[i][j].setCornerIndices(cornerIndices); cells2D[i][j].layer = i; } j++; } } } HE_Mesh getSkin(final double isolevel, final double gamma) { this.gamma = gamma; HE_Mesh patch = new HE_Mesh(); edges = new LongObjectHashMap<HE_Vertex>(); vertices = new IntObjectHashMap<HE_Vertex>(); vertexremaps = new IntObjectHashMap<VertexRemap>(); polygonise(isolevel, true, patch); setvalues(isolevel); polygonise(isolevel, false, patch); snapvertices(); resetvalues(); patch.clean(); HET_Fixer.fixNonManifoldVertices(patch); return patch; } int getNumberOfLayers() { return numberOfLayers; } private void polygonise(final double isolevel, final boolean dummyrun, final HE_Mesh mesh) { for (int i = 0; i < numberOfLayers; i++) { for (int j = 0; j < getCells3D()[0].length; j++) { getPolygons3D(getCells3D()[i][j], isolevel, dummyrun, mesh); } for (int j = 0; j < cells2D[0].length; j++) { getPolygons2D(cells2D[i][j], isolevel, dummyrun, mesh); } } } private void getPolygons2D(final Cell cell, final double isolevel, final boolean dummyrun, final HE_Mesh mesh) { int squareindex = classifyCell2D(cell, isolevel); final int[] indices = entries2D[squareindex]; final int numfaces = indices[0]; int currentindex = 1; for (int t = 0; t < numfaces; t++) { int n = indices[currentindex++]; final HE_Face f = new HE_Face(); final HE_Vertex v1 = getIsoVertex2D(indices[currentindex++], cell, isolevel, dummyrun, mesh); final HE_Vertex v2 = getIsoVertex2D(indices[currentindex++], cell, isolevel, dummyrun, mesh); final HE_Vertex v3 = getIsoVertex2D(indices[currentindex++], cell, isolevel, dummyrun, mesh); HE_Vertex v4 = n == 4 ? getIsoVertex2D(indices[currentindex++], cell, isolevel, dummyrun, mesh) : null; if (!dummyrun) { if (n == 3) { final HE_Halfedge he1 = new HE_Halfedge(); final HE_Halfedge he2 = new HE_Halfedge(); final HE_Halfedge he3 = new HE_Halfedge(); mesh.setNext(he2, he1); mesh.setNext(he1, he3); mesh.setNext(he3, he2); mesh.setFace(he1, f); mesh.setFace(he2, f); mesh.setFace(he3, f); mesh.setVertex(he1, v1); mesh.setHalfedge(v1, he1); mesh.setVertex(he2, v2); mesh.setHalfedge(v2, he2); mesh.setVertex(he3, v3); mesh.setHalfedge(v3, he3); mesh.setHalfedge(f, he1); mesh.setInternalLabel(squareindex); if (f.getFaceArea() > 0) { mesh.add(f); mesh.add(he1); mesh.add(he2); mesh.add(he3); } } else if (n == 4) { final HE_Halfedge he1 = new HE_Halfedge(); final HE_Halfedge he2 = new HE_Halfedge(); final HE_Halfedge he3 = new HE_Halfedge(); final HE_Halfedge he4 = new HE_Halfedge(); mesh.setNext(he1, he2); mesh.setNext(he2, he3); mesh.setNext(he3, he4); mesh.setNext(he4, he1); mesh.setFace(he1, f); mesh.setFace(he2, f); mesh.setFace(he3, f); mesh.setFace(he4, f); mesh.setVertex(he1, v1); mesh.setHalfedge(v1, he1); mesh.setVertex(he2, v2); mesh.setHalfedge(v2, he2); mesh.setVertex(he3, v3); mesh.setHalfedge(v3, he3); mesh.setVertex(he4, v4); mesh.setHalfedge(v4, he4); mesh.setHalfedge(f, he1); f.setInternalLabel(squareindex); if (f.getFaceArea() > 0) { mesh.add(f); mesh.add(he1); mesh.add(he2); mesh.add(he3); mesh.add(he4); } } } } } private void getPolygons3D(final Cell cell, final double isolevel, final boolean dummyrun, final HE_Mesh mesh) { int cubeindex = classifyCell3D(cell, isolevel); final int[] indices = entries3D[cubeindex]; final int numtris = indices[0]; int currentindex = 1; for (int t = 0; t < numtris; t++) { final HE_Face f = new HE_Face(); final HE_Vertex v1 = getIsoVertex3D(indices[currentindex++], cell, isolevel, dummyrun, mesh); final HE_Vertex v2 = getIsoVertex3D(indices[currentindex++], cell, isolevel, dummyrun, mesh); final HE_Vertex v3 = getIsoVertex3D(indices[currentindex++], cell, isolevel, dummyrun, mesh); if (!dummyrun) { final HE_Halfedge he1 = new HE_Halfedge(); final HE_Halfedge he2 = new HE_Halfedge(); final HE_Halfedge he3 = new HE_Halfedge(); mesh.setNext(he3, he2); mesh.setNext(he2, he1); mesh.setNext(he1, he3); mesh.setFace(he1, f); mesh.setFace(he2, f); mesh.setFace(he3, f); mesh.setVertex(he1, v1); mesh.setHalfedge(v1, he1); mesh.setVertex(he2, v2); mesh.setHalfedge(v2, he2); mesh.setVertex(he3, v3); mesh.setHalfedge(v3, he3); mesh.setHalfedge(f, he1); if (f.getFaceArea() > 0) { mesh.add(f); mesh.add(he1); mesh.add(he2); mesh.add(he3); } } } } private int classifyCell2D(final Cell cell, final double isolevel) { int layer = cell.layer; int squareindex = 0; int offset = 1; if (values[layer][cell.getCornerIndices()[0]] > isolevel) { squareindex += 2 * offset; } else if (values[layer][cell.getCornerIndices()[0]] == isolevel) { squareindex += offset; } offset *= 3; if (values[layer][cell.getCornerIndices()[1]] > isolevel) { squareindex += 2 * offset; } else if (values[layer][cell.getCornerIndices()[1]] == isolevel) { squareindex += offset; } offset *= 3; if (values[layer + 1][cell.getCornerIndices()[0]] > isolevel) { squareindex += 2 * offset; } else if (values[layer + 1][cell.getCornerIndices()[0]] == isolevel) { squareindex += offset; } offset *= 3; if (values[layer + 1][cell.getCornerIndices()[1]] > isolevel) { squareindex += 2 * offset; } else if (values[layer + 1][cell.getCornerIndices()[1]] == isolevel) { squareindex += offset; } return squareindex; } private int classifyCell3D(final Cell cell, final double isolevel) { int layer = cell.layer; int cubeindex = 0; int offset = 1; digits = new int[8]; if (values[layer][cell.getCornerIndices()[0]] > isolevel) { cubeindex += 2 * offset; digits[0] = POSITIVE; } else if (values[layer][cell.getCornerIndices()[0]] == isolevel) { cubeindex += offset; digits[0] = EQUAL; } offset *= 3; if (values[layer][cell.getCornerIndices()[1]] > isolevel) { cubeindex += 2 * offset; digits[1] = POSITIVE; } else if (values[layer][cell.getCornerIndices()[1]] == isolevel) { cubeindex += offset; digits[1] = EQUAL; } offset *= 3; if (values[layer][cell.getCornerIndices()[2]] > isolevel) { cubeindex += 2 * offset; digits[3] = POSITIVE; } else if (values[layer][cell.getCornerIndices()[2]] == isolevel) { cubeindex += offset; digits[3] = EQUAL; } offset *= 3; if (values[layer][cell.getCornerIndices()[3]] > isolevel) { cubeindex += 2 * offset; digits[2] = POSITIVE; } else if (values[layer][cell.getCornerIndices()[3]] == isolevel) { cubeindex += offset; digits[2] = EQUAL; } offset *= 3; if (values[layer + 1][cell.getCornerIndices()[0]] > isolevel) { cubeindex += 2 * offset; digits[0] = POSITIVE; } else if (values[layer + 1][cell.getCornerIndices()[0]] == isolevel) { cubeindex += offset; digits[0] = EQUAL; } offset *= 3; if (values[layer + 1][cell.getCornerIndices()[1]] > isolevel) { cubeindex += 2 * offset; digits[1] = POSITIVE; } else if (values[layer + 1][cell.getCornerIndices()[1]] == isolevel) { cubeindex += offset; digits[1] = EQUAL; } offset *= 3; if (values[layer + 1][cell.getCornerIndices()[2]] > isolevel) { cubeindex += 2 * offset; digits[3] = POSITIVE; } else if (values[layer + 1][cell.getCornerIndices()[2]] == isolevel) { cubeindex += offset; digits[3] = EQUAL; } offset *= 3; if (values[layer + 1][cell.getCornerIndices()[3]] > isolevel) { cubeindex += 2 * offset; digits[2] = POSITIVE; } else if (values[layer + 1][cell.getCornerIndices()[3]] == isolevel) { cubeindex += offset; digits[2] = EQUAL; } return cubeindex; } private HE_Vertex getIsoVertex2D(final int isopointindex, final Cell cell, final double isolevel, final boolean dummyrun, final HE_Mesh mesh) { int layer = cell.layer; int[] indices = cell.getCornerIndices(); if (isovertices2D[isopointindex][0] == ONVERTEX) { switch (isovertices2D[isopointindex][1]) { case 0: return vertex(layer, indices[0], mesh); case 1: return vertex(layer, indices[1], mesh); case 2: return vertex(layer + 1, indices[0], mesh); case 3: return vertex(layer + 1, indices[1], mesh); default: return null; } } else if (isovertices2D[isopointindex][0] == ONEDGE) { switch (isovertices2D[isopointindex][1]) { case 0: return edge(isolevel, indices[0], layer, indices[1], layer, dummyrun, mesh); case 1: return edge(isolevel, indices[0], layer, indices[0], layer + 1, dummyrun, mesh); case 2: return edge(isolevel, indices[1], layer, indices[1], layer + 1, dummyrun, mesh); case 3: return edge(isolevel, indices[0], layer + 1, indices[1], layer + 1, dummyrun, mesh); default: return null; } } return null; } private HE_Vertex getIsoVertex3D(final int isopointindex, final Cell cell, final double isolevel, final boolean dummyrun, final HE_Mesh mesh) { int layer = cell.layer; int[] indices = cell.getCornerIndices(); if (isovertices3D[isopointindex][0] == ONVERTEX) { switch (isovertices3D[isopointindex][1]) { case 0: return vertex(layer, indices[0], mesh); case 1: return vertex(layer, indices[1], mesh); case 2: return vertex(layer, indices[2], mesh); case 3: return vertex(layer, indices[3], mesh); case 4: return vertex(layer + 1, indices[0], mesh); case 5: return vertex(layer + 1, indices[1], mesh); case 6: return vertex(layer + 1, indices[2], mesh); case 7: return vertex(layer + 1, indices[3], mesh); default: return null; } } else if (isovertices3D[isopointindex][0] == ONEDGE) { switch (isovertices3D[isopointindex][1]) { case 0: return edge(isolevel, indices[0], layer, indices[1], layer, dummyrun, mesh); case 1: return edge(isolevel, indices[0], layer, indices[2], layer, dummyrun, mesh); case 2: return edge(isolevel, indices[1], layer, indices[3], layer, dummyrun, mesh); case 3: return edge(isolevel, indices[2], layer, indices[3], layer, dummyrun, mesh); case 4: return edge(isolevel, indices[0], layer, indices[0], layer + 1, dummyrun, mesh); case 5: return edge(isolevel, indices[1], layer, indices[1], layer + 1, dummyrun, mesh); case 6: return edge(isolevel, indices[2], layer, indices[2], layer + 1, dummyrun, mesh); case 7: return edge(isolevel, indices[3], layer, indices[3], layer + 1, dummyrun, mesh); case 8: return edge(isolevel, indices[0], layer + 1, indices[1], layer + 1, dummyrun, mesh); case 9: return edge(isolevel, indices[0], layer + 1, indices[2], layer + 1, dummyrun, mesh); case 10: return edge(isolevel, indices[1], layer + 1, indices[3], layer + 1, dummyrun, mesh); case 11: return edge(isolevel, indices[2], layer + 1, indices[3], layer + 1, dummyrun, mesh); default: return null; } } return null; } private HE_Vertex edge(final double isolevel, final int i, final int layeri, final int j, final int layerj, final boolean dummyrun, final HE_Mesh mesh) { long index = edgeindex(layeri, i, layerj, j); HE_Vertex edge = edges.get(index); if (edge != null) { return edge; } final WB_Coord pi = gridpositions[layeri][i]; final WB_Coord pj = gridpositions[layerj][j]; final double vali = values[layeri][i]; final double valj = values[layerj][j]; double mu; if (dummyrun) { mu = (isolevel - vali) / (valj - vali); if (mu < gamma) { VertexRemap vr = vertexremaps.get(vertexindex(layeri, i)); if (vr == null) { vr = new VertexRemap(); vr.closestd = mu * WB_GeometryOp.getDistance3D(pi, pj); vr.i = i; vr.layeri = layeri; vr.originalvalue = vali; vr.p = interp(isolevel, pi, pj, vali, valj); vr.snapvertex = vertex(layeri, i, mesh); vertexremaps.put(vertexindex(layeri, i), vr); } else { double d = mu * WB_GeometryOp.getDistance3D(pi, pj); if (vr.closestd > d) { vr.closestd = d; vr.i = i; vr.layeri = layeri; vr.originalvalue = vali; vr.p = interp(isolevel, pi, pj, vali, valj); vr.snapvertex = vertex(layeri, i, mesh); } } } else if (mu > 1 - gamma) { VertexRemap vr = vertexremaps.get(vertexindex(layerj, j)); if (vr == null) { vr = new VertexRemap(); vr.closestd = (1 - mu) * WB_GeometryOp.getDistance3D(pi, pj); vr.i = j; vr.layeri = layerj; vr.originalvalue = valj; vr.p = interp(isolevel, pi, pj, vali, valj); vr.snapvertex = vertex(layerj, j, mesh); vertexremaps.put(vertexindex(layerj, j), vr); } else { double d = (1 - mu) * WB_GeometryOp.getDistance3D(pi, pj); if (vr.closestd > d) { vr.closestd = d; vr.layeri = layerj; vr.i = j; vr.originalvalue = valj; vr.p = interp(isolevel, pi, pj, vali, valj); vr.snapvertex = vertex(layerj, j, mesh); } } } return null; } edge = new HE_Vertex(interp(isolevel, pi, pj, vali, valj)); mesh.add(edge); edges.put(index, edge); return edge; } private HE_Vertex vertex(final int layer, final int i, final HE_Mesh mesh) { HE_Vertex vertex = vertices.get(vertexindex(layer, i)); if (vertex != null) { return vertex; } vertex = new HE_Vertex(gridpositions[layer][i]); mesh.add(vertex); vertices.put(vertexindex(layer, i), vertex); return vertex; } private WB_Point interp(final double isolevel, final WB_Coord p1, final WB_Coord p2, final double valp1, final double valp2) { double mu; if (WB_Epsilon.isEqualAbs(isolevel, valp1)) { return new WB_Point(p1); } if (WB_Epsilon.isEqualAbs(isolevel, valp2)) { return new WB_Point(p2); } if (WB_Epsilon.isEqualAbs(valp1, valp2)) { return new WB_Point(p1); } mu = (isolevel - valp1) / (valp2 - valp1); return new WB_Point(p1.xd() + mu * (p2.xd() - p1.xd()), p1.yd() + mu * (p2.yd() - p1.yd()), p1.zd() + mu * (p2.zd() - p1.zd())); } private long edgeindex(final int layeri, final int i, final int layerj, final int j) { return (long) (layeri * numberOfVertices + i) * totalNumberOfVertices + layerj * numberOfVertices + j; } private int vertexindex(final int layeri, final int i) { if (layeri < 0 || layeri > numberOfLayers) { return -1; } return layeri * numberOfVertices + i; } private void setvalues(final double isolevel) { VertexRemap vr; for (final Object o : vertexremaps.values()) { vr = (VertexRemap) o; vr.snapvertex.set(vr.p); values[vr.layeri][vr.i] = isolevel; } } private void snapvertices() { VertexRemap vr; for (final Object o : vertexremaps.values()) { vr = (VertexRemap) o; vr.snapvertex.set(vr.p); } } private void resetvalues() { VertexRemap vr; for (final Object o : vertexremaps.values()) { vr = (VertexRemap) o; values[vr.layeri][vr.i] = vr.originalvalue; } } /** * @return the cells3D */ Cell[][] getCells3D() { return cells3D; } /** * @param cells3d * the cells3D to set */ private void setCells3D(final Cell[][] cells3d) { cells3D = cells3d; } /** * @return the gridpositions */ WB_Coord[][] getGridpositions() { return gridpositions; } } public static class Cell { private int layer; /** * @return the layer */ int getLayer() { return layer; } /** * @param layer * the layer to set */ void setLayer(final int layer) { this.layer = layer; } private int[] cornerIndices; /** * @return the cornerIndices */ public int[] getCornerIndices() { return cornerIndices; } /** * @param cornerIndices * the cornerIndices to set */ void setCornerIndices(final int[] cornerIndices) { this.cornerIndices = cornerIndices; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapred.JobQueueJobInProgressListener.JobSchedulingInfo; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.TreeMap; /** * */ class JobQueue extends AbstractQueue { static final Log LOG = LogFactory.getLog(JobQueue.class); public JobQueue(AbstractQueue parent, QueueSchedulingContext qsc) { super(parent, qsc); if (qsc.supportsPriorities()) { // use the default priority-aware comparator comparator = JobQueueJobInProgressListener.FIFO_JOB_QUEUE_COMPARATOR; } else { comparator = STARTTIME_JOB_COMPARATOR; } waitingJobs = new TreeMap<JobSchedulingInfo, JobInProgress>( comparator); runningJobs = new TreeMap<JobSchedulingInfo, JobInProgress>( comparator); } /* * If a queue supports priorities, jobs must be * sorted on priorities, and then on their start times (technically, * their insertion time. * If a queue doesn't support priorities, jobs are * sorted based on their start time. */ static final Comparator<JobSchedulingInfo> STARTTIME_JOB_COMPARATOR; static { STARTTIME_JOB_COMPARATOR = new Comparator<JobSchedulingInfo>() { // comparator for jobs in queues that don't support priorities public int compare( JobSchedulingInfo o1, JobSchedulingInfo o2) { // the job that started earlier wins if (o1.getStartTime() < o2.getStartTime()) { return -1; } else { return (o1.getStartTime() == o2.getStartTime() ? o1.getJobID().compareTo(o2.getJobID()) : 1); } } }; } /** * This involves updating each qC structure. * * @param mapClusterCapacity * @param reduceClusterCapacity */ @Override public void update(int mapClusterCapacity, int reduceClusterCapacity) { super.update(mapClusterCapacity, reduceClusterCapacity); for (JobInProgress j : this.getRunningJobs()) { updateStatsOnRunningJob(qsc, j); } } private void updateStatsOnRunningJob( QueueSchedulingContext qC, JobInProgress j) { if (j.getStatus().getRunState() != JobStatus.RUNNING) { return; } TaskSchedulingContext mapTSI = qC.getMapTSC(); TaskSchedulingContext reduceTSI = qC.getReduceTSC(); int numMapsRunningForThisJob = j.runningMaps(); int numReducesRunningForThisJob = j.runningReduces(); TaskDataView mapScheduler = TaskDataView.getTaskDataView(TaskType.MAP); TaskDataView reduceScheduler = TaskDataView.getTaskDataView(TaskType.REDUCE); int numRunningMapSlots = numMapsRunningForThisJob * mapScheduler.getSlotsPerTask(j); int numRunningReduceSlots = numReducesRunningForThisJob * reduceScheduler.getSlotsPerTask(j); int numMapSlotsForThisJob = mapScheduler.getSlotsOccupied(j); int numReduceSlotsForThisJob = reduceScheduler.getSlotsOccupied(j); int numReservedMapSlotsForThisJob = (mapScheduler.getNumReservedTaskTrackers(j) * mapScheduler.getSlotsPerTask(j)); int numReservedReduceSlotsForThisJob = (reduceScheduler.getNumReservedTaskTrackers(j) * reduceScheduler.getSlotsPerTask(j)); j.setSchedulingInfo( String.format( TaskSchedulingContext.JOB_SCHEDULING_INFO_FORMAT_STRING, numMapsRunningForThisJob, numRunningMapSlots, numReservedMapSlotsForThisJob, numReducesRunningForThisJob, numRunningReduceSlots, numReservedReduceSlotsForThisJob)); mapTSI.setNumRunningTasks( mapTSI.getNumRunningTasks() + numMapsRunningForThisJob); reduceTSI.setNumRunningTasks( reduceTSI.getNumRunningTasks() + numReducesRunningForThisJob); mapTSI.setNumSlotsOccupied( mapTSI.getNumSlotsOccupied() + numMapSlotsForThisJob); reduceTSI.setNumSlotsOccupied( reduceTSI.getNumSlotsOccupied() + numReduceSlotsForThisJob); Integer i = mapTSI.getNumSlotsOccupiedByUser().get( j.getProfile().getUser()); mapTSI.getNumSlotsOccupiedByUser().put( j.getProfile().getUser(), i.intValue() + numMapSlotsForThisJob); i = reduceTSI.getNumSlotsOccupiedByUser().get( j.getProfile().getUser()); reduceTSI.getNumSlotsOccupiedByUser().put( j.getProfile().getUser(), i.intValue() + numReduceSlotsForThisJob); if (LOG.isDebugEnabled()) { synchronized (j) { LOG.debug(String.format("updateQSI: job %s: run(m)=%d, " + "occupied(m)=%d, run(r)=%d, occupied(r)=%d, finished(m)=%d," + " finished(r)=%d, failed(m)=%d, failed(r)=%d, " + "spec(m)=%d, spec(r)=%d, total(m)=%d, total(r)=%d", j.getJobID() .toString(), numMapsRunningForThisJob, numMapSlotsForThisJob, numReducesRunningForThisJob, numReduceSlotsForThisJob, j .finishedMaps(), j.finishedReduces(), j.failedMapTasks, j.failedReduceTasks, j.speculativeMapTasks, j.speculativeReduceTasks, j.numMapTasks, j.numReduceTasks)); } } /* * it's fine walking down the entire list of running jobs - there * probably will not be many, plus, we may need to go through the * list to compute numSlotsOccupiedByUser. If this is expensive, we * can keep a list of running jobs per user. Then we only need to * consider the first few jobs per user. */ } Map<JobSchedulingInfo, JobInProgress> waitingJobs; // for waiting jobs Map<JobSchedulingInfo, JobInProgress> runningJobs; // for running jobs public Comparator<JobSchedulingInfo> comparator; Collection<JobInProgress> getWaitingJobs() { synchronized (waitingJobs) { return Collections.unmodifiableCollection( new LinkedList<JobInProgress>(waitingJobs.values())); } } Collection<JobInProgress> getRunningJobs() { synchronized (runningJobs) { return Collections.unmodifiableCollection( new LinkedList<JobInProgress>(runningJobs.values())); } } private void addRunningJob(JobInProgress job) { synchronized (runningJobs) { runningJobs.put( new JobSchedulingInfo( job), job); } } private JobInProgress removeRunningJob( JobSchedulingInfo jobInfo) { synchronized (runningJobs) { return runningJobs.remove(jobInfo); } } JobInProgress removeWaitingJob( JobSchedulingInfo schedInfo) { synchronized (waitingJobs) { JobInProgress jip = waitingJobs.remove(schedInfo); this.qsc.setNumOfWaitingJobs(waitingJobs.size()); return jip; } } private void addWaitingJob(JobInProgress job) { synchronized (waitingJobs) { waitingJobs.put( new JobSchedulingInfo( job), job); this.qsc.setNumOfWaitingJobs(waitingJobs.size()); } } int getWaitingJobCount() { synchronized (waitingJobs) { return waitingJobs.size(); } } // called when a job is added synchronized void jobAdded(JobInProgress job) throws IOException { // add job to waiting queue. It will end up in the right place, // based on priority. addWaitingJob(job); // update user-specific info Integer i = qsc.getNumJobsByUser().get(job.getProfile().getUser()); if (null == i) { i = 1; // set the count for running tasks to 0 qsc.getMapTSC().getNumSlotsOccupiedByUser().put( job.getProfile().getUser(), 0); qsc.getReduceTSC().getNumSlotsOccupiedByUser(). put( job.getProfile().getUser(), 0); } else { i++; } qsc.getNumJobsByUser().put(job.getProfile().getUser(), i); // setup scheduler specific job information preInitializeJob(job); LOG.debug( "Job " + job.getJobID().toString() + " is added under user " + job.getProfile().getUser() + ", user now has " + i + " jobs"); } /** * Setup {@link CapacityTaskScheduler} specific information prior to * job initialization. * <p/> * TO DO: Currently this method uses , CapacityTaskScheduler based variables * need to shift those. */ void preInitializeJob(JobInProgress job) { JobConf jobConf = job.getJobConf(); // Compute number of slots required to run a single map/reduce task int slotsPerMap = 1; int slotsPerReduce = 1; if (MemoryMatcher.isSchedulingBasedOnMemEnabled()) { slotsPerMap = jobConf.computeNumSlotsPerMap( MemoryMatcher.getMemSizeForMapSlot()); slotsPerReduce = jobConf.computeNumSlotsPerReduce( MemoryMatcher.getMemSizeForReduceSlot()); } job.setNumSlotsPerMap(slotsPerMap); job.setNumSlotsPerReduce(slotsPerReduce); } // called when a job completes synchronized void jobCompleted(JobInProgress job) { LOG.debug("Job to be removed for user " + job.getProfile().getUser()); Integer i = qsc.getNumJobsByUser().get(job.getProfile().getUser()); i--; if (0 == i.intValue()) { qsc.getNumJobsByUser().remove(job.getProfile().getUser()); // remove job footprint from our TSIs qsc.getMapTSC().getNumSlotsOccupiedByUser().remove( job.getProfile().getUser()); qsc.getReduceTSC().getNumSlotsOccupiedByUser().remove( job.getProfile().getUser()); LOG.debug( "No more jobs for user, number of users = " + qsc .getNumJobsByUser().size()); } else { qsc.getNumJobsByUser().put(job.getProfile().getUser(), i); LOG.debug( "User still has " + i + " jobs, number of users = " + qsc.getNumJobsByUser().size()); } } // This is used to reposition a job in the queue. A job can get repositioned // because of the change in the job priority or job start-time. private void reorderJobs( JobInProgress job, JobSchedulingInfo oldInfo ) { if (removeWaitingJob(oldInfo) != null) { addWaitingJob(job); } if (removeRunningJob(oldInfo) != null) { addRunningJob(job); } } /** * @return */ @Override List<AbstractQueue> getDescendentJobQueues() { List<AbstractQueue> l = new ArrayList<AbstractQueue>(); l.add(this); return l; } @Override List<AbstractQueue> getDescendantContainerQueues() { return new ArrayList<AbstractQueue>(); } public void jobUpdated(JobChangeEvent event) { // Check if this is the status change if (event instanceof JobStatusChangeEvent) { jobStateChanged((JobStatusChangeEvent) event); } } /** * @return */ @Override List<AbstractQueue> getChildren() { return null; } /** * Dont do anything in sort , this is leaf level queue. * * @param queueComparator */ @Override public void sort(Comparator queueComparator) { return; } // Update the scheduler as job's state has changed private void jobStateChanged(JobStatusChangeEvent event) { JobInProgress job = event.getJobInProgress(); JobSchedulingInfo oldJobStateInfo = new JobSchedulingInfo(event.getOldStatus()); // Check if the ordering of the job has changed // For now priority and start-time can change the job ordering if (event.getEventType() == JobStatusChangeEvent.EventType.PRIORITY_CHANGED || event.getEventType() == JobStatusChangeEvent.EventType.START_TIME_CHANGED) { // Make a priority change reorderJobs(job, oldJobStateInfo); } else if (event.getEventType() == JobStatusChangeEvent.EventType.RUN_STATE_CHANGED) { // Check if the job is complete int runState = job.getStatus().getRunState(); if (runState == JobStatus.SUCCEEDED || runState == JobStatus.FAILED || runState == JobStatus.KILLED) { jobCompleted(job, oldJobStateInfo); } else if (runState == JobStatus.RUNNING) { // Removing of the job from job list is responsibility of the //initialization poller. // Add the job to the running queue addRunningJob(job); } } } /* * Method removes the jobs from both running and waiting job queue in * job queue manager. */ private void jobCompleted( JobInProgress job, JobSchedulingInfo oldInfo ) { LOG.info( "Job " + job.getJobID().toString() + " submitted to queue " + job.getProfile().getQueueName() + " has completed"); //remove jobs from both queue's a job can be in //running and waiting queue at the same time. removeRunningJob(oldInfo); removeWaitingJob(oldInfo); // let scheduler know jobCompleted(job); } @Override public void addChild(AbstractQueue queue) { throw new UnsupportedOperationException( "addChildren is not allowed for " + "" + getName()); } @Override void distributeUnConfiguredCapacity() { return; } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.segment.incremental; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Predicate; import com.google.common.base.Splitter; import com.google.common.base.Throwables; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.metamx.common.ISE; import com.metamx.common.parsers.ParseException; import io.druid.data.input.InputRow; import io.druid.data.input.Row; import io.druid.data.input.impl.SpatialDimensionSchema; import org.joda.time.DateTime; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Set; /** * We throw away all invalid spatial dimensions */ public class SpatialDimensionRowTransformer implements Function<InputRow, InputRow> { private static final Joiner JOINER = Joiner.on(","); private static final Splitter SPLITTER = Splitter.on(","); private final Map<String, SpatialDimensionSchema> spatialDimensionMap; private final Set<String> spatialPartialDimNames; public SpatialDimensionRowTransformer(List<SpatialDimensionSchema> spatialDimensions) { this.spatialDimensionMap = Maps.newHashMap(); for (SpatialDimensionSchema spatialDimension : spatialDimensions) { if (this.spatialDimensionMap.put(spatialDimension.getDimName(), spatialDimension) != null) { throw new ISE("Duplicate spatial dimension names found! Check your schema yo!"); } } this.spatialPartialDimNames = Sets.newHashSet( Iterables.concat( Lists.transform( spatialDimensions, new Function<SpatialDimensionSchema, List<String>>() { @Override public List<String> apply(SpatialDimensionSchema input) { return input.getDims(); } } ) ) ); } @Override public InputRow apply(final InputRow row) { final Map<String, List<String>> spatialLookup = Maps.newHashMap(); // remove all spatial dimensions final List<String> finalDims = Lists.newArrayList( Iterables.filter( row.getDimensions(), new Predicate<String>() { @Override public boolean apply(String input) { return !spatialDimensionMap.containsKey(input) && !spatialPartialDimNames.contains(input); } } ) ); InputRow retVal = new InputRow() { @Override public List<String> getDimensions() { return finalDims; } @Override public long getTimestampFromEpoch() { return row.getTimestampFromEpoch(); } @Override public DateTime getTimestamp() { return row.getTimestamp(); } @Override public List<String> getDimension(String dimension) { List<String> retVal = spatialLookup.get(dimension); return (retVal == null) ? row.getDimension(dimension) : retVal; } @Override public Object getRaw(String dimension) { return row.getRaw(dimension); } @Override public long getLongMetric(String metric) { try { return row.getLongMetric(metric); } catch (ParseException e) { throw Throwables.propagate(e); } } @Override public float getFloatMetric(String metric) { try { return row.getFloatMetric(metric); } catch (ParseException e) { throw Throwables.propagate(e); } } @Override public String toString() { return row.toString(); } @Override public int compareTo(Row o) { return getTimestamp().compareTo(o.getTimestamp()); } }; for (Map.Entry<String, SpatialDimensionSchema> entry : spatialDimensionMap.entrySet()) { final String spatialDimName = entry.getKey(); final SpatialDimensionSchema spatialDim = entry.getValue(); List<String> dimVals = row.getDimension(spatialDimName); if (dimVals != null && !dimVals.isEmpty()) { if (dimVals.size() != 1) { throw new ISE("Spatial dimension value must be in an array!"); } if (isJoinedSpatialDimValValid(dimVals.get(0))) { spatialLookup.put(spatialDimName, dimVals); finalDims.add(spatialDimName); } } else { List<String> spatialDimVals = Lists.newArrayList(); for (String dim : spatialDim.getDims()) { List<String> partialDimVals = row.getDimension(dim); if (isSpatialDimValsValid(partialDimVals)) { spatialDimVals.addAll(partialDimVals); } } if (spatialDimVals.size() == spatialDim.getDims().size()) { spatialLookup.put(spatialDimName, Arrays.asList(JOINER.join(spatialDimVals))); finalDims.add(spatialDimName); } } } return retVal; } private boolean isSpatialDimValsValid(List<String> dimVals) { if (dimVals == null || dimVals.isEmpty()) { return false; } for (String dimVal : dimVals) { if (tryParseFloat(dimVal) == null) { return false; } } return true; } private boolean isJoinedSpatialDimValValid(String dimVal) { if (dimVal == null || dimVal.isEmpty()) { return false; } Iterable<String> dimVals = SPLITTER.split(dimVal); for (String val : dimVals) { if (tryParseFloat(val) == null) { return false; } } return true; } private static Float tryParseFloat(String val) { try { return Float.parseFloat(val); } catch (NullPointerException | NumberFormatException e) { return null; } } }
package com.oregonscientific.meep.browser.ui.fragment; import java.io.FileOutputStream; import java.io.FileWriter; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import android.app.ProgressDialog; import android.content.DialogInterface; import android.content.DialogInterface.OnCancelListener; import android.graphics.Bitmap; import android.net.http.SslError; import android.os.Bundle; import android.support.v4.app.Fragment; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.webkit.SslErrorHandler; import android.webkit.WebChromeClient; import android.webkit.WebSettings; import android.webkit.WebView; import android.webkit.WebViewClient; import com.oregonscientific.meep.browser.BrowserUtility; import com.oregonscientific.meep.browser.Consts; import com.oregonscientific.meep.browser.R; import com.oregonscientific.meep.browser.WebBrowserActivity; import com.oregonscientific.meep.browser.database.Bookmark; import com.oregonscientific.meep.browser.database.History; import com.oregonscientific.meep.customdialog.CommonPopup; public class BrowserFragment extends Fragment { WebView webview; private String currentURL; public static final String TAG = "BrowserFragment"; public void init(String url) { currentURL = url; } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { Log.d(TAG, "onCreateView"); View view = inflater.inflate(R.layout.browser_fragment, container, false); webview = (WebView) view.findViewById(R.id.webview); webview.setWebViewClient(new MyWebViewClient()); webview.setWebChromeClient(new MyWebChromeClient()); webview.getSettings().setPluginsEnabled(true); webview.getSettings().setBuiltInZoomControls(true); webview.getSettings().setSupportZoom(true); webview.getSettings().setJavaScriptCanOpenWindowsAutomatically(true); webview.getSettings().setAllowFileAccess(true); //for html5 webview.getSettings().setDomStorageEnabled(true); webview.getSettings().setJavaScriptEnabled(true); webview.getSettings().setLayoutAlgorithm(WebSettings.LayoutAlgorithm.NARROW_COLUMNS); webview.getSettings().setUseWideViewPort(true); webview.getSettings().setLoadWithOverviewMode(true); webview.getSettings().setSavePassword(true); webview.getSettings().setSaveFormData(true); webview.getSettings().setGeolocationEnabled(true); webview.getSettings().setGeolocationDatabasePath("/data/data/com.oregonscientific.meep.browser/databases/"); if (currentURL != null) { webview.loadUrl(currentURL); } return view; } public static BrowserFragment newInstance() { Log.d(TAG, "new Instance"); // create a new content fragment BrowserFragment f = new BrowserFragment(); return f; } public void updateUrl(String url) { Log.d(TAG, "Update URL [" + url + "] - View [" + getView() + "]"); currentURL = url; webview = (WebView) getView().findViewById(R.id.webview); webview.loadUrl(url); } private class MyWebChromeClient extends WebChromeClient { @Override public void onReceivedIcon(WebView view, Bitmap icon) { super.onReceivedIcon(view, icon); } } public class MyWebViewClient extends WebViewClient { private int webViewPreviousState; private final int PAGE_STARTED = 0x1; private final int PAGE_REDIRECTED = 0x2; @Override public boolean shouldOverrideUrlLoading(WebView view, String urlNewString) { webViewPreviousState = PAGE_REDIRECTED; if(((WebBrowserActivity)getActivity()).isAccessiableUrl(urlNewString) || isRecommended) { return super.shouldOverrideUrlLoading(view, urlNewString); } else { view.stopLoading(); BrowserUtility.alertMessage(getActivity(), R.string.browser_title_blocked, R.string.cannot_access_website); ((WebBrowserActivity) getActivity()).resetSearchBox(); return false; } } @Override public void onPageStarted(final WebView view, String url, Bitmap favicon) { // start super.onPageStarted(view, url, favicon); // // TODO:(blacklist) pre-check whether url is safe // if (IsIgnoreWebsite(url)) { // view.stopLoading(); // CommonPopup popup = new CommonPopup(getActivity(), R.string.browser_title_blocked, R.string.browser_msg_web_blocked); // popup.show(); // // reset data // ((WebBrowserActivity) getActivity()).updateSearchBox(null, null); // return; // } if (url.contains("http://get.adobe.com/flashplayer/mobile")) { url = "http://download.macromedia.com/pub/flashplayer/installers/archive/android/11.1.115.20/install_flash_player_ics.apk"; } webViewPreviousState = PAGE_STARTED; BrowserUtility.printLogcatMessageWithTimeStamp("onPageStart"); showLoadingDialog(); } @Override public void onReceivedError(WebView view, int errorCode, String description, String failingUrl) { super.onReceivedError(view, errorCode, description, failingUrl); // // TODO: - do something else in here if the site is down // String url = "https://www.google.com/search?safe=active&q=" // + failingUrl; // view.loadUrl(url); dismissLoadingDialog(); BrowserUtility.printLogcatMessageWithTimeStamp("onReceivedError"); CommonPopup popup = new CommonPopup(getActivity(), R.string.browser_title_blocked, getActivity().getResources().getString(R.string.load_website_fail)+" "+description); popup.show(); } @Override public void onReceivedSslError(WebView view, SslErrorHandler handler, SslError error) { super.onReceivedSslError(view, handler, error); // this will ignore the Ssl error and go forward to the site handler.proceed(); } @Override public void onPageFinished(WebView view, String url) { Log.v(TAG, "load:" + url); try { if (webViewPreviousState == PAGE_STARTED) { dismissLoadingDialog(); if (getActivity() instanceof WebBrowserActivity) { ((WebBrowserActivity) getActivity()).updateSearchBox(url, webview.getFavicon()); ((WebBrowserActivity) getActivity()).recordHistoryItem(getHistoryObject()); Log.d(TAG, "really load:" + url); getScreenshot(view, url); } BrowserUtility.printLogcatMessageWithTimeStamp("onPageFinished -- stop loading"); BrowserUtility.printLogcatMessageWithTimeStamp("EOF"); } } catch (Exception e) { // Log.d(TAG, e.getMessage()); } } } private void animateInRight(final WebView view) { Animation anim = AnimationUtils.loadAnimation(getActivity(), R.anim.slide_in_right); view.startAnimation(anim); } // public boolean IsIgnoreWebsite(String url) { // // ExecutorService service = Executors.newSingleThreadExecutor(); // // service.execute(new Runnable() { // // // // @Override // // public void run() { // PermissionManager permissionManager = (PermissionManager) ServiceManager.getService(getActivity().getApplicationContext(), ServiceManager.PERMISSION_SERVICE); // String id = BrowserUtility.getAccountID(getActivity()); // if (id == null || id.equals("")) { // BrowserUtility.printLogcatDebugMessage("ID is Empty"); // } else { // boolean isBlocked = permissionManager.isItemInBlacklist(id, "", url); // BrowserUtility.printLogcatDebugMessage(url + " isBlocked : "+isBlocked); // return isBlocked; // } // // } // // }); // return false; // } private void animateOutRight(final WebView view) { Animation anim = AnimationUtils.loadAnimation(getActivity(), R.anim.slide_out_right); view.startAnimation(anim); } private void animateInLeft(final WebView view) { Animation anim = AnimationUtils.loadAnimation(getActivity(), R.anim.slide_in_left); view.startAnimation(anim); } private void animateOutLeft(final WebView view) { Animation anim = AnimationUtils.loadAnimation(getActivity(), R.anim.slide_out_left); view.startAnimation(anim); } public boolean canGoBack() { if (webview != null && webview.canGoBack()) { return true; } else { return false; } } public void goBack() { if (webview != null && webview.canGoBack()) { webview.goBack(); } } public void goForwards() { if (webview != null && webview.canGoForward()) { webview.goForward(); } } public void reload() { // webview = (WebView) getView().findViewById(R.id.webview); if (webview != null) { webview.reload(); } } public String getCurrentUrl() { if (webview != null) { return webview.getUrl(); } return null; } public Bookmark getBookmarkObject() { String name = webview.getTitle(); String url = webview.getUrl(); Bitmap favicon = webview.getFavicon(); Bookmark bookmark = new Bookmark(name, url); if (favicon != null) bookmark.setFavicon(Bookmark.getByteBitmap(favicon)); return bookmark; } public History getHistoryObject() { String name = webview.getTitle(); String url = webview.getUrl(); Bitmap favicon = webview.getFavicon(); History item = new History(name, url); if (favicon != null) item.setFavicon(Bookmark.getByteBitmap(favicon)); return item; } @Override public void onDestroyView() { super.onDestroyView(); if (getActivity() instanceof WebBrowserActivity) { ((WebBrowserActivity) getActivity()).resetSearchBox(); } } public void getScreenshot(WebView view, String url) { // Picture picture = view.capturePicture(); // Bitmap b = Bitmap.createBitmap(800, 444, Bitmap.Config.ARGB_8888); // Canvas c = new Canvas(b); // picture.draw(c); // image view.setDrawingCacheEnabled(true); Bitmap b = Bitmap.createBitmap(webview.getDrawingCache()); view.setDrawingCacheEnabled(false); FileOutputStream fos = null; try { fos = new FileOutputStream(getActivity().getApplicationContext().getFilesDir() + Consts.PATH_RECENTLY_PICTURE); if (fos != null) { b.compress(Bitmap.CompressFormat.JPEG, 100, fos); fos.close(); } } catch (Exception e) { } // url try { FileWriter fw = new FileWriter(getActivity().getApplicationContext().getFilesDir() + Consts.PATH_RECENTLY_TEXT, false); fw.write(url); fw.close(); } catch (Exception e) { } } private boolean isRecommended = false; public void init(String url, boolean isRecommended) { this.isRecommended = isRecommended; init(url); } public void updateUrl(String url, boolean isRecommended) { this.isRecommended = isRecommended; updateUrl(url); } private void showLoadingDialog() { if(getActivity() instanceof WebBrowserActivity) { ((WebBrowserActivity)getActivity()).showLoadingDialog(); } } private void dismissLoadingDialog() { if(getActivity() instanceof WebBrowserActivity) { ((WebBrowserActivity)getActivity()).stopLoadingDialog(); } } public void stopLoading() { if(webview!=null) { webview.stopLoading(); } } }
package org.apache.lucene.index.memory; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; import org.apache.lucene.index.AtomicReader; import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.Norm; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocsAndPositionsEnum; import org.apache.lucene.index.DocsEnum; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.FieldInvertState; import org.apache.lucene.index.Fields; import org.apache.lucene.index.OrdTermState; import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.TermState; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.memory.MemoryIndexNormDocValues.SingleValueSource; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.store.RAMDirectory; // for javadocs import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Constants; // for javadocs import org.elasticsearch.common.io.FastStringReader; /** * High-performance single-document main memory Apache Lucene fulltext search index. * * <h4>Overview</h4> * * This class is a replacement/substitute for a large subset of * {@link RAMDirectory} functionality. It is designed to * enable maximum efficiency for on-the-fly matchmaking combining structured and * fuzzy fulltext search in realtime streaming applications such as Nux XQuery based XML * message queues, publish-subscribe systems for Blogs/newsfeeds, text chat, data acquisition and * distribution systems, application level routers, firewalls, classifiers, etc. * Rather than targeting fulltext search of infrequent queries over huge persistent * data archives (historic search), this class targets fulltext search of huge * numbers of queries over comparatively small transient realtime data (prospective * search). * For example as in * <pre class="prettyprint"> * float score = search(String text, Query query) * </pre> * <p> * Each instance can hold at most one Lucene "document", with a document containing * zero or more "fields", each field having a name and a fulltext value. The * fulltext value is tokenized (split and transformed) into zero or more index terms * (aka words) on <code>addField()</code>, according to the policy implemented by an * Analyzer. For example, Lucene analyzers can split on whitespace, normalize to lower case * for case insensitivity, ignore common terms with little discriminatory value such as "he", "in", "and" (stop * words), reduce the terms to their natural linguistic root form such as "fishing" * being reduced to "fish" (stemming), resolve synonyms/inflexions/thesauri * (upon indexing and/or querying), etc. For details, see * <a target="_blank" href="http://today.java.net/pub/a/today/2003/07/30/LuceneIntro.html">Lucene Analyzer Intro</a>. * <p> * Arbitrary Lucene queries can be run against this class - see <a target="_blank" * href="{@docRoot}/../queryparser/org/apache/lucene/queryparser/classic/package-summary.html#package_description"> * Lucene Query Syntax</a> * as well as <a target="_blank" * href="http://today.java.net/pub/a/today/2003/11/07/QueryParserRules.html">Query Parser Rules</a>. * Note that a Lucene query selects on the field names and associated (indexed) * tokenized terms, not on the original fulltext(s) - the latter are not stored * but rather thrown away immediately after tokenization. * <p> * For some interesting background information on search technology, see Bob Wyman's * <a target="_blank" * href="http://bobwyman.pubsub.com/main/2005/05/mary_hodder_poi.html">Prospective Search</a>, * Jim Gray's * <a target="_blank" href="http://www.acmqueue.org/modules.php?name=Content&pa=showpage&pid=293&page=4"> * A Call to Arms - Custom subscriptions</a>, and Tim Bray's * <a target="_blank" * href="http://www.tbray.org/ongoing/When/200x/2003/07/30/OnSearchTOC">On Search, the Series</a>. * * * <h4>Example Usage</h4> * * <pre class="prettyprint"> * Analyzer analyzer = new SimpleAnalyzer(version); * MemoryIndex index = new MemoryIndex(); * index.addField("content", "Readings about Salmons and other select Alaska fishing Manuals", analyzer); * index.addField("author", "Tales of James", analyzer); * QueryParser parser = new QueryParser(version, "content", analyzer); * float score = index.search(parser.parse("+author:james +salmon~ +fish* manual~")); * if (score &gt; 0.0f) { * System.out.println("it's a match"); * } else { * System.out.println("no match found"); * } * System.out.println("indexData=" + index.toString()); * </pre> * * * <h4>Example XQuery Usage</h4> * * <pre class="prettyprint"> * (: An XQuery that finds all books authored by James that have something to do with "salmon fishing manuals", sorted by relevance :) * declare namespace lucene = "java:nux.xom.pool.FullTextUtil"; * declare variable $query := "+salmon~ +fish* manual~"; (: any arbitrary Lucene query can go here :) * * for $book in /books/book[author="James" and lucene:match(abstract, $query) > 0.0] * let $score := lucene:match($book/abstract, $query) * order by $score descending * return $book * </pre> * * * <h4>No thread safety guarantees</h4> * * An instance can be queried multiple times with the same or different queries, * but an instance is not thread-safe. If desired use idioms such as: * <pre class="prettyprint"> * MemoryIndex index = ... * synchronized (index) { * // read and/or write index (i.e. add fields and/or query) * } * </pre> * * * <h4>Performance Notes</h4> * * Internally there's a new data structure geared towards efficient indexing * and searching, plus the necessary support code to seamlessly plug into the Lucene * framework. * <p> * This class performs very well for very small texts (e.g. 10 chars) * as well as for large texts (e.g. 10 MB) and everything in between. * Typically, it is about 10-100 times faster than <code>RAMDirectory</code>. * Note that <code>RAMDirectory</code> has particularly * large efficiency overheads for small to medium sized texts, both in time and space. * Indexing a field with N tokens takes O(N) in the best case, and O(N logN) in the worst * case. Memory consumption is probably larger than for <code>RAMDirectory</code>. * <p> * Example throughput of many simple term queries over a single MemoryIndex: * ~500000 queries/sec on a MacBook Pro, jdk 1.5.0_06, server VM. * As always, your mileage may vary. * <p> * If you're curious about * the whereabouts of bottlenecks, run java 1.5 with the non-perturbing '-server * -agentlib:hprof=cpu=samples,depth=10' flags, then study the trace log and * correlate its hotspot trailer with its call stack headers (see <a * target="_blank" * href="http://java.sun.com/developer/technicalArticles/Programming/HPROF.html"> * hprof tracing </a>). * */ // LUCENE MONITOR - Support adding same field several times // -- Added pos to Info // -- Use current info of existing field public class CustomMemoryIndex { /** info for each field: Map<String fieldName, Info field> */ private final HashMap<String,Info> fields = new HashMap<String,Info>(); /** fields sorted ascending by fieldName; lazily computed on demand */ private transient Map.Entry<String,Info>[] sortedFields; /** pos: positions[3*i], startOffset: positions[3*i +1], endOffset: positions[3*i +2] */ private final int stride; /** Could be made configurable; */ private static final float docBoost = 1.0f; private static final boolean DEBUG = false; private HashMap<String,FieldInfo> fieldInfos = new HashMap<String,FieldInfo>(); /** * Sorts term entries into ascending order; also works for * Arrays.binarySearch() and Arrays.sort() */ private static final Comparator<Object> termComparator = new Comparator<Object>() { @SuppressWarnings({"unchecked","rawtypes"}) public int compare(Object o1, Object o2) { if (o1 instanceof Map.Entry<?,?>) o1 = ((Map.Entry<?,?>) o1).getKey(); if (o2 instanceof Map.Entry<?,?>) o2 = ((Map.Entry<?,?>) o2).getKey(); if (o1 == o2) return 0; return ((Comparable) o1).compareTo((Comparable) o2); } }; /** * Constructs an empty instance. */ public CustomMemoryIndex() { this(false); } /** * Constructs an empty instance that can optionally store the start and end * character offset of each token term in the text. This can be useful for * highlighting of hit locations with the Lucene highlighter package. * Protected until the highlighter package matures, so that this can actually * be meaningfully integrated. * * @param storeOffsets * whether or not to store the start and end character offset of * each token term in the text */ protected CustomMemoryIndex(boolean storeOffsets) { this.stride = storeOffsets ? 3 : 1; } /** * Convenience method; Tokenizes the given field text and adds the resulting * terms to the index; Equivalent to adding an indexed non-keyword Lucene * {@link org.apache.lucene.document.Field} that is tokenized, not stored, * termVectorStored with positions (or termVectorStored with positions and offsets), * * @param fieldName * a name to be associated with the text * @param text * the text to tokenize and index. * @param analyzer * the analyzer to use for tokenization */ public void addField(String fieldName, String text, Analyzer analyzer) { if (fieldName == null) throw new IllegalArgumentException("fieldName must not be null"); if (text == null) throw new IllegalArgumentException("text must not be null"); if (analyzer == null) throw new IllegalArgumentException("analyzer must not be null"); TokenStream stream; try { stream = analyzer.tokenStream(fieldName, new FastStringReader(text)); } catch (IOException ex) { throw new RuntimeException(ex); } addField(fieldName, stream); } /** * Convenience method; Creates and returns a token stream that generates a * token for each keyword in the given collection, "as is", without any * transforming text analysis. The resulting token stream can be fed into * {@link #addField(String, TokenStream)}, perhaps wrapped into another * {@link org.apache.lucene.analysis.TokenFilter}, as desired. * * @param keywords * the keywords to generate tokens for * @return the corresponding token stream */ public <T> TokenStream keywordTokenStream(final Collection<T> keywords) { // TODO: deprecate & move this method into AnalyzerUtil? if (keywords == null) throw new IllegalArgumentException("keywords must not be null"); return new TokenStream() { private Iterator<T> iter = keywords.iterator(); private int start = 0; private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); @Override public boolean incrementToken() { if (!iter.hasNext()) return false; T obj = iter.next(); if (obj == null) throw new IllegalArgumentException("keyword must not be null"); String term = obj.toString(); clearAttributes(); termAtt.setEmpty().append(term); offsetAtt.setOffset(start, start+termAtt.length()); start += term.length() + 1; // separate words by 1 (blank) character return true; } }; } /** * Equivalent to <code>addField(fieldName, stream, 1.0f)</code>. * * @param fieldName * a name to be associated with the text * @param stream * the token stream to retrieve tokens from */ public void addField(String fieldName, TokenStream stream) { addField(fieldName, stream, 1.0f); } /** * Iterates over the given token stream and adds the resulting terms to the index; * Equivalent to adding a tokenized, indexed, termVectorStored, unstored, * Lucene {@link org.apache.lucene.document.Field}. * Finally closes the token stream. Note that untokenized keywords can be added with this method via * {@link #keywordTokenStream(Collection)}, the Lucene <code>KeywordTokenizer</code> or similar utilities. * * @param fieldName * a name to be associated with the text * @param stream * the token stream to retrieve tokens from. * @param boost * the boost factor for hits for this field * @see org.apache.lucene.document.Field#setBoost(float) */ public void addField(String fieldName, TokenStream stream, float boost) { try { if (fieldName == null) throw new IllegalArgumentException("fieldName must not be null"); if (stream == null) throw new IllegalArgumentException("token stream must not be null"); if (boost <= 0.0f) throw new IllegalArgumentException("boost factor must be greater than 0.0"); HashMap<BytesRef,ArrayIntList> terms = new HashMap<BytesRef,ArrayIntList>(); int numTokens = 0; int numOverlapTokens = 0; int pos = -1; if (!fieldInfos.containsKey(fieldName)) { fieldInfos.put(fieldName, new FieldInfo(fieldName, true, fieldInfos.size(), false, false, false, IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, null, null, null)); } // CHANGE if (fields.get(fieldName) != null) { Info info = fields.get(fieldName); terms = info.terms; numTokens = info.numTokens; numOverlapTokens = info.numOverlapTokens; pos = info.pos; } else { terms = new HashMap<BytesRef, ArrayIntList>(); } TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); PositionIncrementAttribute posIncrAttribute = stream.addAttribute(PositionIncrementAttribute.class); OffsetAttribute offsetAtt = stream.addAttribute(OffsetAttribute.class); BytesRef ref = termAtt.getBytesRef(); stream.reset(); while (stream.incrementToken()) { termAtt.fillBytesRef(); if (ref.length == 0) continue; // nothing to do // if (DEBUG) System.err.println("token='" + term + "'"); numTokens++; final int posIncr = posIncrAttribute.getPositionIncrement(); if (posIncr == 0) numOverlapTokens++; pos += posIncr; ArrayIntList positions = terms.get(ref); if (positions == null) { // term not seen before positions = new ArrayIntList(stride); terms.put(BytesRef.deepCopyOf(ref), positions); } if (stride == 1) { positions.add(pos); } else { positions.add(pos, offsetAtt.startOffset(), offsetAtt.endOffset()); } } stream.end(); // ensure infos.numTokens > 0 invariant; needed for correct operation of terms() if (numTokens > 0) { boost = boost * docBoost; // see DocumentWriter.addDocument(...) fields.put(fieldName, new Info(terms, numTokens, numOverlapTokens, boost, pos)); sortedFields = null; // invalidate sorted view, if any } } catch (IOException e) { // can never happen throw new RuntimeException(e); } finally { try { if (stream != null) stream.close(); } catch (IOException e2) { throw new RuntimeException(e2); } } } /** * Creates and returns a searcher that can be used to execute arbitrary * Lucene queries and to collect the resulting query results as hits. * * @return a searcher */ public IndexSearcher createSearcher() { MemoryIndexReader reader = new MemoryIndexReader(); IndexSearcher searcher = new IndexSearcher(reader); // ensures no auto-close !! reader.setSearcher(searcher); // to later get hold of searcher.getSimilarity() return searcher; } /** * Convenience method that efficiently returns the relevance score by * matching this index against the given Lucene query expression. * * @param query * an arbitrary Lucene query to run against this index * @return the relevance score of the matchmaking; A number in the range * [0.0 .. 1.0], with 0.0 indicating no match. The higher the number * the better the match. * */ public float search(Query query) { if (query == null) throw new IllegalArgumentException("query must not be null"); IndexSearcher searcher = createSearcher(); try { final float[] scores = new float[1]; // inits to 0.0f (no match) searcher.search(query, new Collector() { private Scorer scorer; @Override public void collect(int doc) throws IOException { scores[0] = scorer.score(); } @Override public void setScorer(Scorer scorer) { this.scorer = scorer; } @Override public boolean acceptsDocsOutOfOrder() { return true; } @Override public void setNextReader(AtomicReaderContext context) { } }); float score = scores[0]; return score; } catch (IOException e) { // can never happen (RAMDirectory) throw new RuntimeException(e); } finally { // searcher.close(); /* * Note that it is harmless and important for good performance to * NOT close the index reader!!! This avoids all sorts of * unnecessary baggage and locking in the Lucene IndexReader * superclass, all of which is completely unnecessary for this main * memory index data structure without thread-safety claims. * * Wishing IndexReader would be an interface... * * Actually with the new tight createSearcher() API auto-closing is now * made impossible, hence searcher.close() would be harmless and also * would not degrade performance... */ } } /** * Returns a reasonable approximation of the main memory [bytes] consumed by * this instance. Useful for smart memory sensititive caches/pools. Assumes * fieldNames are interned, whereas tokenized terms are memory-overlaid. * * @return the main memory consumption */ public int getMemorySize() { // for example usage in a smart cache see nux.xom.pool.Pool int PTR = VM.PTR; int INT = VM.INT; int size = 0; size += VM.sizeOfObject(2 * PTR + INT); // memory index if (sortedFields != null) size += VM.sizeOfObjectArray(sortedFields.length); size += VM.sizeOfHashMap(fields.size()); for (Map.Entry<String, Info> entry : fields.entrySet()) { // for each Field Info Info info = entry.getValue(); size += VM.sizeOfObject(2 * INT + 3 * PTR); // Info instance vars if (info.sortedTerms != null) size += VM.sizeOfObjectArray(info.sortedTerms.length); int len = info.terms.size(); size += VM.sizeOfHashMap(len); Iterator<Map.Entry<BytesRef, ArrayIntList>> iter2 = info.terms.entrySet().iterator(); while (--len >= 0) { // for each term Map.Entry<BytesRef, ArrayIntList> e = iter2.next(); size += VM.sizeOfObject(PTR + 3 * INT); // assumes substring() memory overlay // size += STR + 2 * ((String) e.getKey()).length(); ArrayIntList positions = e.getValue(); size += VM.sizeOfArrayIntList(positions.size()); } } return size; } private int numPositions(ArrayIntList positions) { return positions.size() / stride; } /** sorts into ascending order (on demand), reusing memory along the way */ private void sortFields() { if (sortedFields == null) sortedFields = sort(fields); } /** returns a view of the given map's entries, sorted ascending by key */ private static <K,V> Map.Entry<K,V>[] sort(HashMap<K,V> map) { int size = map.size(); @SuppressWarnings("unchecked") Map.Entry<K,V>[] entries = new Map.Entry[size]; Iterator<Map.Entry<K,V>> iter = map.entrySet().iterator(); for (int i=0; i < size; i++) { entries[i] = iter.next(); } if (size > 1) ArrayUtil.quickSort(entries, termComparator); return entries; } /** * Returns a String representation of the index data for debugging purposes. * * @return the string representation */ @Override public String toString() { StringBuilder result = new StringBuilder(256); sortFields(); int sumChars = 0; int sumPositions = 0; int sumTerms = 0; for (int i=0; i < sortedFields.length; i++) { Map.Entry<String,Info> entry = sortedFields[i]; String fieldName = entry.getKey(); Info info = entry.getValue(); info.sortTerms(); result.append(fieldName + ":\n"); int numChars = 0; int numPositions = 0; for (int j=0; j < info.sortedTerms.length; j++) { Map.Entry<BytesRef,ArrayIntList> e = info.sortedTerms[j]; BytesRef term = e.getKey(); ArrayIntList positions = e.getValue(); result.append("\t'" + term + "':" + numPositions(positions) + ":"); result.append(positions.toString(stride)); // ignore offsets result.append("\n"); numPositions += numPositions(positions); numChars += term.length; } result.append("\tterms=" + info.sortedTerms.length); result.append(", positions=" + numPositions); result.append(", Kchars=" + (numChars / 1000.0f)); result.append("\n"); sumPositions += numPositions; sumChars += numChars; sumTerms += info.sortedTerms.length; } result.append("\nfields=" + sortedFields.length); result.append(", terms=" + sumTerms); result.append(", positions=" + sumPositions); result.append(", Kchars=" + (sumChars / 1000.0f)); return result.toString(); } /** * Index data structure for a field; Contains the tokenized term texts and * their positions. */ private static final class Info { /** * Term strings and their positions for this field: Map <String * termText, ArrayIntList positions> */ private final HashMap<BytesRef,ArrayIntList> terms; /** Terms sorted ascending by term text; computed on demand */ private transient Map.Entry<BytesRef,ArrayIntList>[] sortedTerms; /** Number of added tokens for this field */ private final int numTokens; /** Number of overlapping tokens for this field */ private final int numOverlapTokens; /** Boost factor for hits for this field */ private final float boost; private final int pos; private final long sumTotalTermFreq; public Info(HashMap<BytesRef, ArrayIntList> terms, int numTokens, int numOverlapTokens, float boost, int pos) { this.terms = terms; this.numTokens = numTokens; this.numOverlapTokens = numOverlapTokens; this.boost = boost; this.pos = pos; long sum = 0; for(Map.Entry<BytesRef,ArrayIntList> ent : terms.entrySet()) { sum += ent.getValue().size(); } sumTotalTermFreq = sum; } public long getSumTotalTermFreq() { return sumTotalTermFreq; } /** * Sorts hashed terms into ascending order, reusing memory along the * way. Note that sorting is lazily delayed until required (often it's * not required at all). If a sorted view is required then hashing + * sort + binary search is still faster and smaller than TreeMap usage * (which would be an alternative and somewhat more elegant approach, * apart from more sophisticated Tries / prefix trees). */ public void sortTerms() { if (sortedTerms == null) sortedTerms = sort(terms); } public float getBoost() { return boost; } } /////////////////////////////////////////////////////////////////////////////// // Nested classes: /////////////////////////////////////////////////////////////////////////////// /** * Efficient resizable auto-expanding list holding <code>int</code> elements; * implemented with arrays. */ private static final class ArrayIntList { private int[] elements; private int size = 0; private static final long serialVersionUID = 2282195016849084649L; public ArrayIntList() { this(10); } public ArrayIntList(int initialCapacity) { elements = new int[initialCapacity]; } public void add(int elem) { if (size == elements.length) ensureCapacity(size + 1); elements[size++] = elem; } public void add(int pos, int start, int end) { if (size + 3 > elements.length) ensureCapacity(size + 3); elements[size] = pos; elements[size+1] = start; elements[size+2] = end; size += 3; } public int get(int index) { if (index >= size) throwIndex(index); return elements[index]; } public int size() { return size; } private void ensureCapacity(int minCapacity) { int newCapacity = Math.max(minCapacity, (elements.length * 3) / 2 + 1); int[] newElements = new int[newCapacity]; System.arraycopy(elements, 0, newElements, 0, size); elements = newElements; } private void throwIndex(int index) { throw new IndexOutOfBoundsException("index: " + index + ", size: " + size); } /** returns the first few positions (without offsets); debug only */ public String toString(int stride) { int s = size() / stride; int len = Math.min(10, s); // avoid printing huge lists StringBuilder buf = new StringBuilder(4*len); buf.append("["); for (int i = 0; i < len; i++) { buf.append(get(i*stride)); if (i < len-1) buf.append(", "); } if (len != s) buf.append(", ..."); // and some more... buf.append("]"); return buf.toString(); } } /////////////////////////////////////////////////////////////////////////////// // Nested classes: /////////////////////////////////////////////////////////////////////////////// /** * Search support for Lucene framework integration; implements all methods * required by the Lucene IndexReader contracts. */ final class MemoryIndexReader extends AtomicReader { private IndexSearcher searcher; // needed to find searcher.getSimilarity() private MemoryIndexReader() { super(); // avoid as much superclass baggage as possible } private Info getInfo(String fieldName) { return fields.get(fieldName); } private Info getInfo(int pos) { return sortedFields[pos].getValue(); } @Override public Bits getLiveDocs() { return null; } @Override public FieldInfos getFieldInfos() { return new FieldInfos(fieldInfos.values().toArray(new FieldInfo[fieldInfos.size()])); } private class MemoryFields extends Fields { @Override public Iterator<String> iterator() { return new Iterator<String>() { int upto = -1; @Override public String next() { upto++; if (upto >= sortedFields.length) { throw new NoSuchElementException(); } return sortedFields[upto].getKey(); } @Override public boolean hasNext() { return upto+1 < sortedFields.length; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } @Override public Terms terms(final String field) { int i = Arrays.binarySearch(sortedFields, field, termComparator); if (i < 0) { return null; } else { final Info info = getInfo(i); info.sortTerms(); return new Terms() { @Override public TermsEnum iterator(TermsEnum reuse) { return new MemoryTermsEnum(info); } @Override public Comparator<BytesRef> getComparator() { return BytesRef.getUTF8SortedAsUnicodeComparator(); } @Override public long size() { return info.sortedTerms.length; } @Override public long getSumTotalTermFreq() { return info.getSumTotalTermFreq(); } @Override public long getSumDocFreq() { // each term has df=1 return info.sortedTerms.length; } @Override public int getDocCount() { return info.sortedTerms.length > 0 ? 1 : 0; } @Override public boolean hasOffsets() { return stride == 3; } @Override public boolean hasPositions() { return true; } @Override public boolean hasPayloads() { return false; } }; } } @Override public int size() { return sortedFields.length; } } @Override public Fields fields() { sortFields(); return new MemoryFields(); } private class MemoryTermsEnum extends TermsEnum { private final Info info; private final BytesRef br = new BytesRef(); int termUpto = -1; public MemoryTermsEnum(Info info) { this.info = info; info.sortTerms(); } @Override public boolean seekExact(BytesRef text, boolean useCache) { termUpto = Arrays.binarySearch(info.sortedTerms, text, termComparator); if (termUpto >= 0) { br.copyBytes(info.sortedTerms[termUpto].getKey()); return true; } else { return false; } } @Override public SeekStatus seekCeil(BytesRef text, boolean useCache) { termUpto = Arrays.binarySearch(info.sortedTerms, text, termComparator); if (termUpto < 0) { // not found; choose successor termUpto = -termUpto -1; if (termUpto >= info.sortedTerms.length) { return SeekStatus.END; } else { br.copyBytes(info.sortedTerms[termUpto].getKey()); return SeekStatus.NOT_FOUND; } } else { br.copyBytes(info.sortedTerms[termUpto].getKey()); return SeekStatus.FOUND; } } @Override public void seekExact(long ord) { assert ord < info.sortedTerms.length; termUpto = (int) ord; } @Override public BytesRef next() { termUpto++; if (termUpto >= info.sortedTerms.length) { return null; } else { br.copyBytes(info.sortedTerms[termUpto].getKey()); return br; } } @Override public BytesRef term() { return br; } @Override public long ord() { return termUpto; } @Override public int docFreq() { return 1; } @Override public long totalTermFreq() { return info.sortedTerms[termUpto].getValue().size(); } @Override public DocsEnum docs(Bits liveDocs, DocsEnum reuse, int flags) { if (reuse == null || !(reuse instanceof MemoryDocsEnum)) { reuse = new MemoryDocsEnum(); } return ((MemoryDocsEnum) reuse).reset(liveDocs, info.sortedTerms[termUpto].getValue()); } @Override public DocsAndPositionsEnum docsAndPositions(Bits liveDocs, DocsAndPositionsEnum reuse, int flags) { if (reuse == null || !(reuse instanceof MemoryDocsAndPositionsEnum)) { reuse = new MemoryDocsAndPositionsEnum(); } return ((MemoryDocsAndPositionsEnum) reuse).reset(liveDocs, info.sortedTerms[termUpto].getValue()); } @Override public Comparator<BytesRef> getComparator() { return BytesRef.getUTF8SortedAsUnicodeComparator(); } @Override public void seekExact(BytesRef term, TermState state) throws IOException { assert state != null; this.seekExact(((OrdTermState)state).ord); } @Override public TermState termState() throws IOException { OrdTermState ts = new OrdTermState(); ts.ord = termUpto; return ts; } } private class MemoryDocsEnum extends DocsEnum { private ArrayIntList positions; private boolean hasNext; private Bits liveDocs; private int doc = -1; public DocsEnum reset(Bits liveDocs, ArrayIntList positions) { this.liveDocs = liveDocs; this.positions = positions; hasNext = true; doc = -1; return this; } @Override public int docID() { return doc; } @Override public int nextDoc() { if (hasNext && (liveDocs == null || liveDocs.get(0))) { hasNext = false; return doc = 0; } else { return doc = NO_MORE_DOCS; } } @Override public int advance(int target) { return nextDoc(); } @Override public int freq() throws IOException { return positions.size(); } } private class MemoryDocsAndPositionsEnum extends DocsAndPositionsEnum { private ArrayIntList positions; private int posUpto; private boolean hasNext; private Bits liveDocs; private int doc = -1; public DocsAndPositionsEnum reset(Bits liveDocs, ArrayIntList positions) { this.liveDocs = liveDocs; this.positions = positions; posUpto = 0; hasNext = true; doc = -1; return this; } @Override public int docID() { return doc; } @Override public int nextDoc() { if (hasNext && (liveDocs == null || liveDocs.get(0))) { hasNext = false; return doc = 0; } else { return doc = NO_MORE_DOCS; } } @Override public int advance(int target) { return nextDoc(); } @Override public int freq() throws IOException { return positions.size() / stride; } @Override public int nextPosition() { return positions.get(posUpto++ * stride); } @Override public int startOffset() { return stride == 1 ? -1 : positions.get((posUpto - 1) * stride + 1); } @Override public int endOffset() { return stride == 1 ? -1 : positions.get((posUpto - 1) * stride + 2); } @Override public BytesRef getPayload() { return null; } } @Override public Fields getTermVectors(int docID) { if (docID == 0) { return fields(); } else { return null; } } private Similarity getSimilarity() { if (searcher != null) return searcher.getSimilarity(); return IndexSearcher.getDefaultSimilarity(); } private void setSearcher(IndexSearcher searcher) { this.searcher = searcher; } @Override public int numDocs() { if (DEBUG) System.err.println("MemoryIndexReader.numDocs"); return fields.size() > 0 ? 1 : 0; } @Override public int maxDoc() { if (DEBUG) System.err.println("MemoryIndexReader.maxDoc"); return 1; } @Override public void document(int docID, StoredFieldVisitor visitor) { if (DEBUG) System.err.println("MemoryIndexReader.document"); // no-op: there are no stored fields } @Override public boolean hasDeletions() { if (DEBUG) System.err.println("MemoryIndexReader.hasDeletions"); return false; } @Override protected void doClose() { if (DEBUG) System.err.println("MemoryIndexReader.doClose"); } @Override public DocValues docValues(String field) { return null; } /** performance hack: cache norms to avoid repeated expensive calculations */ private DocValues cachedNormValues; private String cachedFieldName; private Similarity cachedSimilarity; @Override public DocValues normValues(String field) { DocValues norms = cachedNormValues; Similarity sim = getSimilarity(); if (!field.equals(cachedFieldName) || sim != cachedSimilarity) { // not cached? Info info = getInfo(field); int numTokens = info != null ? info.numTokens : 0; int numOverlapTokens = info != null ? info.numOverlapTokens : 0; float boost = info != null ? info.getBoost() : 1.0f; FieldInvertState invertState = new FieldInvertState(field, 0, numTokens, numOverlapTokens, 0, boost); Norm norm = new Norm(); sim.computeNorm(invertState, norm); SingleValueSource singleByteSource = new SingleValueSource(norm); norms = new MemoryIndexNormDocValues(singleByteSource); // cache it for future reuse cachedNormValues = norms; cachedFieldName = field; cachedSimilarity = sim; if (DEBUG) System.err.println("MemoryIndexReader.norms: " + field + ":" + norm + ":" + numTokens); } return norms; } } /////////////////////////////////////////////////////////////////////////////// // Nested classes: /////////////////////////////////////////////////////////////////////////////// private static final class VM { public static final int PTR = Constants.JRE_IS_64BIT ? 8 : 4; // bytes occupied by primitive data types public static final int BOOLEAN = 1; public static final int BYTE = 1; public static final int CHAR = 2; public static final int SHORT = 2; public static final int INT = 4; public static final int LONG = 8; public static final int FLOAT = 4; public static final int DOUBLE = 8; private static final int LOG_PTR = (int) Math.round(log2(PTR)); /** * Object header of any heap allocated Java object. * ptr to class, info for monitor, gc, hash, etc. */ private static final int OBJECT_HEADER = 2 * PTR; private VM() { } // not instantiable // assumes n > 0 // 64 bit VM: // 0 --> 0*PTR // 1..8 --> 1*PTR // 9..16 --> 2*PTR private static int sizeOf(int n) { return (((n - 1) >> LOG_PTR) + 1) << LOG_PTR; } public static int sizeOfObject(int n) { return sizeOf(OBJECT_HEADER + n); } public static int sizeOfObjectArray(int len) { return sizeOfObject(INT + PTR * len); } public static int sizeOfCharArray(int len) { return sizeOfObject(INT + CHAR * len); } public static int sizeOfIntArray(int len) { return sizeOfObject(INT + INT * len); } public static int sizeOfString(int len) { return sizeOfObject(3 * INT + PTR) + sizeOfCharArray(len); } public static int sizeOfHashMap(int len) { return sizeOfObject(4 * PTR + 4 * INT) + sizeOfObjectArray(len) + len * sizeOfObject(3 * PTR + INT); // entries } // note: does not include referenced objects public static int sizeOfArrayList(int len) { return sizeOfObject(PTR + 2 * INT) + sizeOfObjectArray(len); } public static int sizeOfArrayIntList(int len) { return sizeOfObject(PTR + INT) + sizeOfIntArray(len); } /** * logarithm to the base 2. Example: log2(4) == 2, log2(8) == 3 */ private static double log2(double value) { return Math.log(value) / Math.log(2); } } }
package org.hl7.fhir.instance.model; /* Copyright (c) 2011+, HL7, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // Generated on Tue, Jul 21, 2015 10:37-0400 for FHIR v0.5.0 import java.util.*; import org.hl7.fhir.instance.model.Enumerations.*; import org.hl7.fhir.instance.model.annotations.ResourceDef; import org.hl7.fhir.instance.model.annotations.SearchParamDefinition; import org.hl7.fhir.instance.model.annotations.Child; import org.hl7.fhir.instance.model.annotations.Description; import org.hl7.fhir.instance.model.annotations.Block; import org.hl7.fhir.instance.model.api.*; /** * This resource provides the status of the payment for goods and services rendered, and the request and response resource references. */ @ResourceDef(name="PaymentNotice", profile="http://hl7.org/fhir/Profile/PaymentNotice") public class PaymentNotice extends DomainResource { /** * The Response Business Identifier. */ @Child(name = "identifier", type = {Identifier.class}, order=0, min=0, max=Child.MAX_UNLIMITED) @Description(shortDefinition="Business Identifier", formalDefinition="The Response Business Identifier." ) protected List<Identifier> identifier; /** * The version of the style of resource contents. This should be mapped to the allowable profiles for this and supporting resources. */ @Child(name = "ruleset", type = {Coding.class}, order=1, min=0, max=1) @Description(shortDefinition="Resource version", formalDefinition="The version of the style of resource contents. This should be mapped to the allowable profiles for this and supporting resources." ) protected Coding ruleset; /** * The style (standard) and version of the original material which was converted into this resource. */ @Child(name = "originalRuleset", type = {Coding.class}, order=2, min=0, max=1) @Description(shortDefinition="Original version", formalDefinition="The style (standard) and version of the original material which was converted into this resource." ) protected Coding originalRuleset; /** * The date when this resource was created. */ @Child(name = "created", type = {DateTimeType.class}, order=3, min=0, max=1) @Description(shortDefinition="Creation date", formalDefinition="The date when this resource was created." ) protected DateTimeType created; /** * The Insurer who is target of the request. */ @Child(name = "target", type = {Organization.class}, order=4, min=0, max=1) @Description(shortDefinition="Insurer or Regulatory body", formalDefinition="The Insurer who is target of the request." ) protected Reference target; /** * The actual object that is the target of the reference (The Insurer who is target of the request.) */ protected Organization targetTarget; /** * The practitioner who is responsible for the services rendered to the patient. */ @Child(name = "provider", type = {Practitioner.class}, order=5, min=0, max=1) @Description(shortDefinition="Responsible practitioner", formalDefinition="The practitioner who is responsible for the services rendered to the patient." ) protected Reference provider; /** * The actual object that is the target of the reference (The practitioner who is responsible for the services rendered to the patient.) */ protected Practitioner providerTarget; /** * The organization which is responsible for the services rendered to the patient. */ @Child(name = "organization", type = {Organization.class}, order=6, min=0, max=1) @Description(shortDefinition="Responsible organization", formalDefinition="The organization which is responsible for the services rendered to the patient." ) protected Reference organization; /** * The actual object that is the target of the reference (The organization which is responsible for the services rendered to the patient.) */ protected Organization organizationTarget; /** * Reference of resource to reverse. */ @Child(name = "request", type = {}, order=7, min=0, max=1) @Description(shortDefinition="Request reference", formalDefinition="Reference of resource to reverse." ) protected Reference request; /** * The actual object that is the target of the reference (Reference of resource to reverse.) */ protected Resource requestTarget; /** * Reference of response to resource to reverse. */ @Child(name = "response", type = {}, order=8, min=0, max=1) @Description(shortDefinition="Response reference", formalDefinition="Reference of response to resource to reverse." ) protected Reference response; /** * The actual object that is the target of the reference (Reference of response to resource to reverse.) */ protected Resource responseTarget; /** * The payment status, typically paid: payment sent, cleared: payment received. */ @Child(name = "paymentStatus", type = {Coding.class}, order=9, min=1, max=1) @Description(shortDefinition="Status of the payment", formalDefinition="The payment status, typically paid: payment sent, cleared: payment received." ) protected Coding paymentStatus; private static final long serialVersionUID = -394826458L; /* * Constructor */ public PaymentNotice() { super(); } /* * Constructor */ public PaymentNotice(Coding paymentStatus) { super(); this.paymentStatus = paymentStatus; } /** * @return {@link #identifier} (The Response Business Identifier.) */ public List<Identifier> getIdentifier() { if (this.identifier == null) this.identifier = new ArrayList<Identifier>(); return this.identifier; } public boolean hasIdentifier() { if (this.identifier == null) return false; for (Identifier item : this.identifier) if (!item.isEmpty()) return true; return false; } /** * @return {@link #identifier} (The Response Business Identifier.) */ // syntactic sugar public Identifier addIdentifier() { //3 Identifier t = new Identifier(); if (this.identifier == null) this.identifier = new ArrayList<Identifier>(); this.identifier.add(t); return t; } // syntactic sugar public PaymentNotice addIdentifier(Identifier t) { //3 if (t == null) return this; if (this.identifier == null) this.identifier = new ArrayList<Identifier>(); this.identifier.add(t); return this; } /** * @return {@link #ruleset} (The version of the style of resource contents. This should be mapped to the allowable profiles for this and supporting resources.) */ public Coding getRuleset() { if (this.ruleset == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create PaymentNotice.ruleset"); else if (Configuration.doAutoCreate()) this.ruleset = new Coding(); // cc return this.ruleset; } public boolean hasRuleset() { return this.ruleset != null && !this.ruleset.isEmpty(); } /** * @param value {@link #ruleset} (The version of the style of resource contents. This should be mapped to the allowable profiles for this and supporting resources.) */ public PaymentNotice setRuleset(Coding value) { this.ruleset = value; return this; } /** * @return {@link #originalRuleset} (The style (standard) and version of the original material which was converted into this resource.) */ public Coding getOriginalRuleset() { if (this.originalRuleset == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create PaymentNotice.originalRuleset"); else if (Configuration.doAutoCreate()) this.originalRuleset = new Coding(); // cc return this.originalRuleset; } public boolean hasOriginalRuleset() { return this.originalRuleset != null && !this.originalRuleset.isEmpty(); } /** * @param value {@link #originalRuleset} (The style (standard) and version of the original material which was converted into this resource.) */ public PaymentNotice setOriginalRuleset(Coding value) { this.originalRuleset = value; return this; } /** * @return {@link #created} (The date when this resource was created.). This is the underlying object with id, value and extensions. The accessor "getCreated" gives direct access to the value */ public DateTimeType getCreatedElement() { if (this.created == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create PaymentNotice.created"); else if (Configuration.doAutoCreate()) this.created = new DateTimeType(); // bb return this.created; } public boolean hasCreatedElement() { return this.created != null && !this.created.isEmpty(); } public boolean hasCreated() { return this.created != null && !this.created.isEmpty(); } /** * @param value {@link #created} (The date when this resource was created.). This is the underlying object with id, value and extensions. The accessor "getCreated" gives direct access to the value */ public PaymentNotice setCreatedElement(DateTimeType value) { this.created = value; return this; } /** * @return The date when this resource was created. */ public Date getCreated() { return this.created == null ? null : this.created.getValue(); } /** * @param value The date when this resource was created. */ public PaymentNotice setCreated(Date value) { if (value == null) this.created = null; else { if (this.created == null) this.created = new DateTimeType(); this.created.setValue(value); } return this; } /** * @return {@link #target} (The Insurer who is target of the request.) */ public Reference getTarget() { if (this.target == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create PaymentNotice.target"); else if (Configuration.doAutoCreate()) this.target = new Reference(); // cc return this.target; } public boolean hasTarget() { return this.target != null && !this.target.isEmpty(); } /** * @param value {@link #target} (The Insurer who is target of the request.) */ public PaymentNotice setTarget(Reference value) { this.target = value; return this; } /** * @return {@link #target} The actual object that is the target of the reference. The reference library doesn't populate this, but you can use it to hold the resource if you resolve it. (The Insurer who is target of the request.) */ public Organization getTargetTarget() { if (this.targetTarget == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create PaymentNotice.target"); else if (Configuration.doAutoCreate()) this.targetTarget = new Organization(); // aa return this.targetTarget; } /** * @param value {@link #target} The actual object that is the target of the reference. The reference library doesn't use these, but you can use it to hold the resource if you resolve it. (The Insurer who is target of the request.) */ public PaymentNotice setTargetTarget(Organization value) { this.targetTarget = value; return this; } /** * @return {@link #provider} (The practitioner who is responsible for the services rendered to the patient.) */ public Reference getProvider() { if (this.provider == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create PaymentNotice.provider"); else if (Configuration.doAutoCreate()) this.provider = new Reference(); // cc return this.provider; } public boolean hasProvider() { return this.provider != null && !this.provider.isEmpty(); } /** * @param value {@link #provider} (The practitioner who is responsible for the services rendered to the patient.) */ public PaymentNotice setProvider(Reference value) { this.provider = value; return this; } /** * @return {@link #provider} The actual object that is the target of the reference. The reference library doesn't populate this, but you can use it to hold the resource if you resolve it. (The practitioner who is responsible for the services rendered to the patient.) */ public Practitioner getProviderTarget() { if (this.providerTarget == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create PaymentNotice.provider"); else if (Configuration.doAutoCreate()) this.providerTarget = new Practitioner(); // aa return this.providerTarget; } /** * @param value {@link #provider} The actual object that is the target of the reference. The reference library doesn't use these, but you can use it to hold the resource if you resolve it. (The practitioner who is responsible for the services rendered to the patient.) */ public PaymentNotice setProviderTarget(Practitioner value) { this.providerTarget = value; return this; } /** * @return {@link #organization} (The organization which is responsible for the services rendered to the patient.) */ public Reference getOrganization() { if (this.organization == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create PaymentNotice.organization"); else if (Configuration.doAutoCreate()) this.organization = new Reference(); // cc return this.organization; } public boolean hasOrganization() { return this.organization != null && !this.organization.isEmpty(); } /** * @param value {@link #organization} (The organization which is responsible for the services rendered to the patient.) */ public PaymentNotice setOrganization(Reference value) { this.organization = value; return this; } /** * @return {@link #organization} The actual object that is the target of the reference. The reference library doesn't populate this, but you can use it to hold the resource if you resolve it. (The organization which is responsible for the services rendered to the patient.) */ public Organization getOrganizationTarget() { if (this.organizationTarget == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create PaymentNotice.organization"); else if (Configuration.doAutoCreate()) this.organizationTarget = new Organization(); // aa return this.organizationTarget; } /** * @param value {@link #organization} The actual object that is the target of the reference. The reference library doesn't use these, but you can use it to hold the resource if you resolve it. (The organization which is responsible for the services rendered to the patient.) */ public PaymentNotice setOrganizationTarget(Organization value) { this.organizationTarget = value; return this; } /** * @return {@link #request} (Reference of resource to reverse.) */ public Reference getRequest() { if (this.request == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create PaymentNotice.request"); else if (Configuration.doAutoCreate()) this.request = new Reference(); // cc return this.request; } public boolean hasRequest() { return this.request != null && !this.request.isEmpty(); } /** * @param value {@link #request} (Reference of resource to reverse.) */ public PaymentNotice setRequest(Reference value) { this.request = value; return this; } /** * @return {@link #request} The actual object that is the target of the reference. The reference library doesn't populate this, but you can use it to hold the resource if you resolve it. (Reference of resource to reverse.) */ public Resource getRequestTarget() { return this.requestTarget; } /** * @param value {@link #request} The actual object that is the target of the reference. The reference library doesn't use these, but you can use it to hold the resource if you resolve it. (Reference of resource to reverse.) */ public PaymentNotice setRequestTarget(Resource value) { this.requestTarget = value; return this; } /** * @return {@link #response} (Reference of response to resource to reverse.) */ public Reference getResponse() { if (this.response == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create PaymentNotice.response"); else if (Configuration.doAutoCreate()) this.response = new Reference(); // cc return this.response; } public boolean hasResponse() { return this.response != null && !this.response.isEmpty(); } /** * @param value {@link #response} (Reference of response to resource to reverse.) */ public PaymentNotice setResponse(Reference value) { this.response = value; return this; } /** * @return {@link #response} The actual object that is the target of the reference. The reference library doesn't populate this, but you can use it to hold the resource if you resolve it. (Reference of response to resource to reverse.) */ public Resource getResponseTarget() { return this.responseTarget; } /** * @param value {@link #response} The actual object that is the target of the reference. The reference library doesn't use these, but you can use it to hold the resource if you resolve it. (Reference of response to resource to reverse.) */ public PaymentNotice setResponseTarget(Resource value) { this.responseTarget = value; return this; } /** * @return {@link #paymentStatus} (The payment status, typically paid: payment sent, cleared: payment received.) */ public Coding getPaymentStatus() { if (this.paymentStatus == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create PaymentNotice.paymentStatus"); else if (Configuration.doAutoCreate()) this.paymentStatus = new Coding(); // cc return this.paymentStatus; } public boolean hasPaymentStatus() { return this.paymentStatus != null && !this.paymentStatus.isEmpty(); } /** * @param value {@link #paymentStatus} (The payment status, typically paid: payment sent, cleared: payment received.) */ public PaymentNotice setPaymentStatus(Coding value) { this.paymentStatus = value; return this; } protected void listChildren(List<Property> childrenList) { super.listChildren(childrenList); childrenList.add(new Property("identifier", "Identifier", "The Response Business Identifier.", 0, java.lang.Integer.MAX_VALUE, identifier)); childrenList.add(new Property("ruleset", "Coding", "The version of the style of resource contents. This should be mapped to the allowable profiles for this and supporting resources.", 0, java.lang.Integer.MAX_VALUE, ruleset)); childrenList.add(new Property("originalRuleset", "Coding", "The style (standard) and version of the original material which was converted into this resource.", 0, java.lang.Integer.MAX_VALUE, originalRuleset)); childrenList.add(new Property("created", "dateTime", "The date when this resource was created.", 0, java.lang.Integer.MAX_VALUE, created)); childrenList.add(new Property("target", "Reference(Organization)", "The Insurer who is target of the request.", 0, java.lang.Integer.MAX_VALUE, target)); childrenList.add(new Property("provider", "Reference(Practitioner)", "The practitioner who is responsible for the services rendered to the patient.", 0, java.lang.Integer.MAX_VALUE, provider)); childrenList.add(new Property("organization", "Reference(Organization)", "The organization which is responsible for the services rendered to the patient.", 0, java.lang.Integer.MAX_VALUE, organization)); childrenList.add(new Property("request", "Reference(Any)", "Reference of resource to reverse.", 0, java.lang.Integer.MAX_VALUE, request)); childrenList.add(new Property("response", "Reference(Any)", "Reference of response to resource to reverse.", 0, java.lang.Integer.MAX_VALUE, response)); childrenList.add(new Property("paymentStatus", "Coding", "The payment status, typically paid: payment sent, cleared: payment received.", 0, java.lang.Integer.MAX_VALUE, paymentStatus)); } public PaymentNotice copy() { PaymentNotice dst = new PaymentNotice(); copyValues(dst); if (identifier != null) { dst.identifier = new ArrayList<Identifier>(); for (Identifier i : identifier) dst.identifier.add(i.copy()); }; dst.ruleset = ruleset == null ? null : ruleset.copy(); dst.originalRuleset = originalRuleset == null ? null : originalRuleset.copy(); dst.created = created == null ? null : created.copy(); dst.target = target == null ? null : target.copy(); dst.provider = provider == null ? null : provider.copy(); dst.organization = organization == null ? null : organization.copy(); dst.request = request == null ? null : request.copy(); dst.response = response == null ? null : response.copy(); dst.paymentStatus = paymentStatus == null ? null : paymentStatus.copy(); return dst; } protected PaymentNotice typedCopy() { return copy(); } @Override public boolean equalsDeep(Base other) { if (!super.equalsDeep(other)) return false; if (!(other instanceof PaymentNotice)) return false; PaymentNotice o = (PaymentNotice) other; return compareDeep(identifier, o.identifier, true) && compareDeep(ruleset, o.ruleset, true) && compareDeep(originalRuleset, o.originalRuleset, true) && compareDeep(created, o.created, true) && compareDeep(target, o.target, true) && compareDeep(provider, o.provider, true) && compareDeep(organization, o.organization, true) && compareDeep(request, o.request, true) && compareDeep(response, o.response, true) && compareDeep(paymentStatus, o.paymentStatus, true); } @Override public boolean equalsShallow(Base other) { if (!super.equalsShallow(other)) return false; if (!(other instanceof PaymentNotice)) return false; PaymentNotice o = (PaymentNotice) other; return compareValues(created, o.created, true); } public boolean isEmpty() { return super.isEmpty() && (identifier == null || identifier.isEmpty()) && (ruleset == null || ruleset.isEmpty()) && (originalRuleset == null || originalRuleset.isEmpty()) && (created == null || created.isEmpty()) && (target == null || target.isEmpty()) && (provider == null || provider.isEmpty()) && (organization == null || organization.isEmpty()) && (request == null || request.isEmpty()) && (response == null || response.isEmpty()) && (paymentStatus == null || paymentStatus.isEmpty()) ; } @Override public ResourceType getResourceType() { return ResourceType.PaymentNotice; } @SearchParamDefinition(name="identifier", path="PaymentNotice.identifier", description="The business identifier of the Eligibility", type="token" ) public static final String SP_IDENTIFIER = "identifier"; }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.index.engine; import org.apache.logging.log4j.Logger; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.codecs.Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.StoredField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterCodecReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LazySoftDeletesDirectoryReaderWrapper; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LiveIndexWriterConfig; import org.apache.lucene.index.MergePolicy; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SegmentReader; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Sort; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.search.Weight; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.AllocationId; import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.VersionFieldMapper; import org.elasticsearch.index.seqno.LocalCheckpointTracker; import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.index.seqno.RetentionLeases; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.SearcherHelper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.index.translog.TranslogDeletionPolicy; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; import org.junit.After; import org.junit.Before; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.charset.Charset; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.LongSupplier; import java.util.function.Supplier; import java.util.function.ToLongBiFunction; import java.util.stream.Collectors; import static java.util.Collections.emptyList; import static java.util.Collections.shuffle; import static org.elasticsearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY; import static org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY; import static org.elasticsearch.index.engine.Engine.Operation.Origin.REPLICA; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; @SuppressWarnings("HiddenField") public abstract class EngineTestCase extends ESTestCase { protected final ShardId shardId = new ShardId(new Index("index", "_na_"), 0); protected final AllocationId allocationId = AllocationId.newInitializing(); protected static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings("index", Settings.EMPTY); protected ThreadPool threadPool; protected TranslogHandler translogHandler; protected Store store; protected Store storeReplica; protected InternalEngine engine; protected InternalEngine replicaEngine; protected IndexSettings defaultSettings; protected String codecName; protected Path primaryTranslogDir; protected Path replicaTranslogDir; // A default primary term is used by engine instances created in this test. protected final PrimaryTermSupplier primaryTerm = new PrimaryTermSupplier(1L); protected static void assertVisibleCount(Engine engine, int numDocs) throws IOException { assertVisibleCount(engine, numDocs, true); } protected static void assertVisibleCount(Engine engine, int numDocs, boolean refresh) throws IOException { if (refresh) { engine.refresh("test"); } try (Engine.Searcher searcher = engine.acquireSearcher("test")) { final TotalHitCountCollector collector = new TotalHitCountCollector(); searcher.search(new MatchAllDocsQuery(), collector); assertThat(collector.getTotalHits(), equalTo(numDocs)); } } protected Settings indexSettings() { // TODO randomize more settings return Settings.builder() .put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), "1h") // make sure this doesn't kick in on us .put(EngineConfig.INDEX_CODEC_SETTING.getKey(), codecName) .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put( IndexSettings.MAX_REFRESH_LISTENERS_PER_SHARD.getKey(), between(10, 10 * IndexSettings.MAX_REFRESH_LISTENERS_PER_SHARD.get(Settings.EMPTY)) ) .put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), between(0, 1000)) .build(); } @Override @Before public void setUp() throws Exception { super.setUp(); primaryTerm.set(randomLongBetween(1, Long.MAX_VALUE)); CodecService codecService = new CodecService(null); String name = Codec.getDefault().getName(); if (Arrays.asList(codecService.availableCodecs()).contains(name)) { // some codecs are read only so we only take the ones that we have in the service and randomly // selected by lucene test case. codecName = name; } else { codecName = "default"; } defaultSettings = IndexSettingsModule.newIndexSettings("test", indexSettings()); threadPool = new TestThreadPool(getClass().getName()); store = createStore(); storeReplica = createStore(); Lucene.cleanLuceneIndex(store.directory()); Lucene.cleanLuceneIndex(storeReplica.directory()); primaryTranslogDir = createTempDir("translog-primary"); translogHandler = createTranslogHandler(defaultSettings); engine = createEngine(store, primaryTranslogDir); LiveIndexWriterConfig currentIndexWriterConfig = engine.getCurrentIndexWriterConfig(); assertEquals(engine.config().getCodec().getName(), codecService.codec(codecName).getName()); assertEquals(currentIndexWriterConfig.getCodec().getName(), codecService.codec(codecName).getName()); if (randomBoolean()) { engine.config().setEnableGcDeletes(false); } replicaTranslogDir = createTempDir("translog-replica"); replicaEngine = createEngine(storeReplica, replicaTranslogDir); currentIndexWriterConfig = replicaEngine.getCurrentIndexWriterConfig(); assertEquals(replicaEngine.config().getCodec().getName(), codecService.codec(codecName).getName()); assertEquals(currentIndexWriterConfig.getCodec().getName(), codecService.codec(codecName).getName()); if (randomBoolean()) { engine.config().setEnableGcDeletes(false); } } public EngineConfig copy(EngineConfig config, LongSupplier globalCheckpointSupplier) { return new EngineConfig( config.getShardId(), config.getThreadPool(), config.getIndexSettings(), config.getWarmer(), config.getStore(), config.getMergePolicy(), config.getAnalyzer(), config.getSimilarity(), new CodecService(null), config.getEventListener(), config.getQueryCache(), config.getQueryCachingPolicy(), config.getTranslogConfig(), config.getFlushMergesAfter(), config.getExternalRefreshListener(), Collections.emptyList(), config.getIndexSort(), config.getCircuitBreakerService(), globalCheckpointSupplier, config.retentionLeasesSupplier(), config.getPrimaryTermSupplier(), config.getSnapshotCommitSupplier(), config.getLeafSorter() ); } public EngineConfig copy(EngineConfig config, Analyzer analyzer) { return new EngineConfig( config.getShardId(), config.getThreadPool(), config.getIndexSettings(), config.getWarmer(), config.getStore(), config.getMergePolicy(), analyzer, config.getSimilarity(), new CodecService(null), config.getEventListener(), config.getQueryCache(), config.getQueryCachingPolicy(), config.getTranslogConfig(), config.getFlushMergesAfter(), config.getExternalRefreshListener(), Collections.emptyList(), config.getIndexSort(), config.getCircuitBreakerService(), config.getGlobalCheckpointSupplier(), config.retentionLeasesSupplier(), config.getPrimaryTermSupplier(), config.getSnapshotCommitSupplier(), config.getLeafSorter() ); } public EngineConfig copy(EngineConfig config, MergePolicy mergePolicy) { return new EngineConfig( config.getShardId(), config.getThreadPool(), config.getIndexSettings(), config.getWarmer(), config.getStore(), mergePolicy, config.getAnalyzer(), config.getSimilarity(), new CodecService(null), config.getEventListener(), config.getQueryCache(), config.getQueryCachingPolicy(), config.getTranslogConfig(), config.getFlushMergesAfter(), config.getExternalRefreshListener(), Collections.emptyList(), config.getIndexSort(), config.getCircuitBreakerService(), config.getGlobalCheckpointSupplier(), config.retentionLeasesSupplier(), config.getPrimaryTermSupplier(), config.getSnapshotCommitSupplier(), config.getLeafSorter() ); } @Override @After public void tearDown() throws Exception { super.tearDown(); try { if (engine != null && engine.isClosed.get() == false) { engine.getTranslog().getDeletionPolicy().assertNoOpenTranslogRefs(); assertNoInFlightDocuments(engine); assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine); assertMaxSeqNoInCommitUserData(engine); assertAtMostOneLuceneDocumentPerSequenceNumber(engine); } if (replicaEngine != null && replicaEngine.isClosed.get() == false) { replicaEngine.getTranslog().getDeletionPolicy().assertNoOpenTranslogRefs(); assertNoInFlightDocuments(replicaEngine); assertConsistentHistoryBetweenTranslogAndLuceneIndex(replicaEngine); assertMaxSeqNoInCommitUserData(replicaEngine); assertAtMostOneLuceneDocumentPerSequenceNumber(replicaEngine); } } finally { IOUtils.close(replicaEngine, storeReplica, engine, store, () -> terminate(threadPool)); } } protected static LuceneDocument testDocumentWithTextField() { return testDocumentWithTextField("test"); } protected static LuceneDocument testDocumentWithTextField(String value) { LuceneDocument document = testDocument(); document.add(new TextField("value", value, Field.Store.YES)); return document; } protected static LuceneDocument testDocument() { return new LuceneDocument(); } public static ParsedDocument createParsedDoc(String id, String routing) { return testParsedDocument(id, routing, testDocumentWithTextField(), new BytesArray("{ \"value\" : \"test\" }"), null); } public static ParsedDocument createParsedDoc(String id, String routing, boolean recoverySource) { return testParsedDocument( id, routing, testDocumentWithTextField(), new BytesArray("{ \"value\" : \"test\" }"), null, recoverySource ); } protected static ParsedDocument testParsedDocument( String id, String routing, LuceneDocument document, BytesReference source, Mapping mappingUpdate ) { return testParsedDocument(id, routing, document, source, mappingUpdate, false); } protected static ParsedDocument testParsedDocument( String id, String routing, LuceneDocument document, BytesReference source, Mapping mappingUpdate, boolean recoverySource ) { Field uidField = new Field("_id", Uid.encodeId(id), IdFieldMapper.Defaults.FIELD_TYPE); Field versionField = new NumericDocValuesField("_version", 0); SeqNoFieldMapper.SequenceIDFields seqID = SeqNoFieldMapper.SequenceIDFields.emptySeqID(); document.add(uidField); document.add(versionField); document.add(seqID.seqNo); document.add(seqID.seqNoDocValue); document.add(seqID.primaryTerm); BytesRef ref = source.toBytesRef(); if (recoverySource) { document.add(new StoredField(SourceFieldMapper.RECOVERY_SOURCE_NAME, ref.bytes, ref.offset, ref.length)); document.add(new NumericDocValuesField(SourceFieldMapper.RECOVERY_SOURCE_NAME, 1)); } else { document.add(new StoredField(SourceFieldMapper.NAME, ref.bytes, ref.offset, ref.length)); } return new ParsedDocument(versionField, seqID, id, routing, Arrays.asList(document), source, XContentType.JSON, mappingUpdate); } public static CheckedBiFunction<String, Integer, ParsedDocument, IOException> nestedParsedDocFactory() throws Exception { final MapperService mapperService = createMapperService(); final String nestedMapping = Strings.toString( XContentFactory.jsonBuilder() .startObject() .startObject("type") .startObject("properties") .startObject("nested_field") .field("type", "nested") .endObject() .endObject() .endObject() .endObject() ); final DocumentMapper nestedMapper = mapperService.merge( "type", new CompressedXContent(nestedMapping), MapperService.MergeReason.MAPPING_UPDATE ); return (docId, nestedFieldValues) -> { final XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("field", "value"); if (nestedFieldValues > 0) { XContentBuilder nestedField = source.startObject("nested_field"); for (int i = 0; i < nestedFieldValues; i++) { nestedField.field("field-" + i, "value-" + i); } source.endObject(); } source.endObject(); return nestedMapper.parse(new SourceToParse(docId, BytesReference.bytes(source), XContentType.JSON)); }; } protected Store createStore() throws IOException { return createStore(newDirectory()); } protected Store createStore(final Directory directory) throws IOException { return createStore(INDEX_SETTINGS, directory); } protected Store createStore(final IndexSettings indexSettings, final Directory directory) throws IOException { return new Store(shardId, indexSettings, directory, new DummyShardLock(shardId)); } protected Translog createTranslog(LongSupplier primaryTermSupplier) throws IOException { return createTranslog(primaryTranslogDir, primaryTermSupplier); } protected Translog createTranslog(Path translogPath, LongSupplier primaryTermSupplier) throws IOException { TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, INDEX_SETTINGS, BigArrays.NON_RECYCLING_INSTANCE); String translogUUID = Translog.createEmptyTranslog( translogPath, SequenceNumbers.NO_OPS_PERFORMED, shardId, primaryTermSupplier.getAsLong() ); return new Translog( translogConfig, translogUUID, new TranslogDeletionPolicy(), () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTermSupplier, seqNo -> {} ); } protected TranslogHandler createTranslogHandler(IndexSettings indexSettings) { return new TranslogHandler(xContentRegistry(), indexSettings); } protected InternalEngine createEngine(Store store, Path translogPath) throws IOException { return createEngine(defaultSettings, store, translogPath, newMergePolicy(), null); } protected InternalEngine createEngine(Store store, Path translogPath, LongSupplier globalCheckpointSupplier) throws IOException { return createEngine(defaultSettings, store, translogPath, newMergePolicy(), null, null, globalCheckpointSupplier); } protected InternalEngine createEngine( Store store, Path translogPath, BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier ) throws IOException { return createEngine(defaultSettings, store, translogPath, newMergePolicy(), null, localCheckpointTrackerSupplier, null); } protected InternalEngine createEngine( Store store, Path translogPath, BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier, ToLongBiFunction<Engine, Engine.Operation> seqNoForOperation ) throws IOException { return createEngine( defaultSettings, store, translogPath, newMergePolicy(), null, localCheckpointTrackerSupplier, null, seqNoForOperation ); } protected InternalEngine createEngine(IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy) throws IOException { return createEngine(indexSettings, store, translogPath, mergePolicy, null); } protected InternalEngine createEngine( IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy, @Nullable IndexWriterFactory indexWriterFactory ) throws IOException { return createEngine(indexSettings, store, translogPath, mergePolicy, indexWriterFactory, null, null); } protected InternalEngine createEngine( IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy, @Nullable IndexWriterFactory indexWriterFactory, @Nullable BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier, @Nullable LongSupplier globalCheckpointSupplier ) throws IOException { return createEngine( indexSettings, store, translogPath, mergePolicy, indexWriterFactory, localCheckpointTrackerSupplier, null, null, globalCheckpointSupplier ); } protected InternalEngine createEngine( IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy, @Nullable IndexWriterFactory indexWriterFactory, @Nullable BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier, @Nullable LongSupplier globalCheckpointSupplier, @Nullable ToLongBiFunction<Engine, Engine.Operation> seqNoForOperation ) throws IOException { return createEngine( indexSettings, store, translogPath, mergePolicy, indexWriterFactory, localCheckpointTrackerSupplier, seqNoForOperation, null, globalCheckpointSupplier ); } protected InternalEngine createEngine( IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy, @Nullable IndexWriterFactory indexWriterFactory, @Nullable BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier, @Nullable ToLongBiFunction<Engine, Engine.Operation> seqNoForOperation, @Nullable Sort indexSort, @Nullable LongSupplier globalCheckpointSupplier ) throws IOException { EngineConfig config = config(indexSettings, store, translogPath, mergePolicy, null, indexSort, globalCheckpointSupplier); return createEngine(indexWriterFactory, localCheckpointTrackerSupplier, seqNoForOperation, config); } protected InternalEngine createEngine(EngineConfig config) throws IOException { return createEngine(null, null, null, config); } protected InternalEngine createEngine( @Nullable IndexWriterFactory indexWriterFactory, @Nullable BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier, @Nullable ToLongBiFunction<Engine, Engine.Operation> seqNoForOperation, EngineConfig config ) throws IOException { final Store store = config.getStore(); final Directory directory = store.directory(); if (Lucene.indexExists(directory) == false) { store.createEmpty(); final String translogUuid = Translog.createEmptyTranslog( config.getTranslogConfig().getTranslogPath(), SequenceNumbers.NO_OPS_PERFORMED, shardId, primaryTerm.get() ); store.associateIndexWithNewTranslog(translogUuid); } InternalEngine internalEngine = createInternalEngine(indexWriterFactory, localCheckpointTrackerSupplier, seqNoForOperation, config); internalEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); return internalEngine; } public static InternalEngine createEngine(EngineConfig engineConfig, int maxDocs) { return new InternalEngine(engineConfig, maxDocs, LocalCheckpointTracker::new); } @FunctionalInterface public interface IndexWriterFactory { IndexWriter createWriter(Directory directory, IndexWriterConfig iwc) throws IOException; } /** * Generate a new sequence number and return it. Only works on InternalEngines */ public static long generateNewSeqNo(final Engine engine) { assert engine instanceof InternalEngine : "expected InternalEngine, got: " + engine.getClass(); InternalEngine internalEngine = (InternalEngine) engine; return internalEngine.getLocalCheckpointTracker().generateSeqNo(); } public static InternalEngine createInternalEngine( @Nullable final IndexWriterFactory indexWriterFactory, @Nullable final BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier, @Nullable final ToLongBiFunction<Engine, Engine.Operation> seqNoForOperation, final EngineConfig config ) { if (localCheckpointTrackerSupplier == null) { return new InternalTestEngine(config) { @Override IndexWriter createWriter(Directory directory, IndexWriterConfig iwc) throws IOException { return (indexWriterFactory != null) ? indexWriterFactory.createWriter(directory, iwc) : super.createWriter(directory, iwc); } @Override protected long doGenerateSeqNoForOperation(final Operation operation) { return seqNoForOperation != null ? seqNoForOperation.applyAsLong(this, operation) : super.doGenerateSeqNoForOperation(operation); } }; } else { return new InternalTestEngine(config, IndexWriter.MAX_DOCS, localCheckpointTrackerSupplier) { @Override IndexWriter createWriter(Directory directory, IndexWriterConfig iwc) throws IOException { return (indexWriterFactory != null) ? indexWriterFactory.createWriter(directory, iwc) : super.createWriter(directory, iwc); } @Override protected long doGenerateSeqNoForOperation(final Operation operation) { return seqNoForOperation != null ? seqNoForOperation.applyAsLong(this, operation) : super.doGenerateSeqNoForOperation(operation); } }; } } public EngineConfig config( IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy, ReferenceManager.RefreshListener refreshListener ) { return config(indexSettings, store, translogPath, mergePolicy, refreshListener, null, () -> SequenceNumbers.NO_OPS_PERFORMED); } public EngineConfig config( IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy, ReferenceManager.RefreshListener refreshListener, Sort indexSort, LongSupplier globalCheckpointSupplier ) { return config( indexSettings, store, translogPath, mergePolicy, refreshListener, indexSort, globalCheckpointSupplier, globalCheckpointSupplier == null ? null : () -> RetentionLeases.EMPTY ); } public EngineConfig config( final IndexSettings indexSettings, final Store store, final Path translogPath, final MergePolicy mergePolicy, final ReferenceManager.RefreshListener refreshListener, final Sort indexSort, final LongSupplier globalCheckpointSupplier, final Supplier<RetentionLeases> retentionLeasesSupplier ) { return config( indexSettings, store, translogPath, mergePolicy, refreshListener, null, indexSort, globalCheckpointSupplier, retentionLeasesSupplier, new NoneCircuitBreakerService() ); } public EngineConfig config( IndexSettings indexSettings, Store store, Path translogPath, MergePolicy mergePolicy, ReferenceManager.RefreshListener externalRefreshListener, ReferenceManager.RefreshListener internalRefreshListener, Sort indexSort, @Nullable LongSupplier maybeGlobalCheckpointSupplier, CircuitBreakerService breakerService ) { return config( indexSettings, store, translogPath, mergePolicy, externalRefreshListener, internalRefreshListener, indexSort, maybeGlobalCheckpointSupplier, maybeGlobalCheckpointSupplier == null ? null : () -> RetentionLeases.EMPTY, breakerService ); } public EngineConfig config( final IndexSettings indexSettings, final Store store, final Path translogPath, final MergePolicy mergePolicy, final ReferenceManager.RefreshListener externalRefreshListener, final ReferenceManager.RefreshListener internalRefreshListener, final Sort indexSort, final @Nullable LongSupplier maybeGlobalCheckpointSupplier, final @Nullable Supplier<RetentionLeases> maybeRetentionLeasesSupplier, final CircuitBreakerService breakerService ) { final IndexWriterConfig iwc = newIndexWriterConfig(); final TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, BigArrays.NON_RECYCLING_INSTANCE); final Engine.EventListener eventListener = new Engine.EventListener() { }; // we don't need to notify anybody in this test final List<ReferenceManager.RefreshListener> extRefreshListenerList = externalRefreshListener == null ? emptyList() : Collections.singletonList(externalRefreshListener); final List<ReferenceManager.RefreshListener> intRefreshListenerList = internalRefreshListener == null ? emptyList() : Collections.singletonList(internalRefreshListener); final LongSupplier globalCheckpointSupplier; final Supplier<RetentionLeases> retentionLeasesSupplier; if (maybeGlobalCheckpointSupplier == null) { assert maybeRetentionLeasesSupplier == null; final ReplicationTracker replicationTracker = new ReplicationTracker( shardId, allocationId.getId(), indexSettings, randomNonNegativeLong(), SequenceNumbers.NO_OPS_PERFORMED, update -> {}, () -> 0L, (leases, listener) -> listener.onResponse(new ReplicationResponse()), () -> SafeCommitInfo.EMPTY ); globalCheckpointSupplier = replicationTracker; retentionLeasesSupplier = replicationTracker::getRetentionLeases; } else { assert maybeRetentionLeasesSupplier != null; globalCheckpointSupplier = maybeGlobalCheckpointSupplier; retentionLeasesSupplier = maybeRetentionLeasesSupplier; } return new EngineConfig( shardId, threadPool, indexSettings, null, store, mergePolicy, iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(null), eventListener, IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5), extRefreshListenerList, intRefreshListenerList, indexSort, breakerService, globalCheckpointSupplier, retentionLeasesSupplier, primaryTerm, IndexModule.DEFAULT_SNAPSHOT_COMMIT_SUPPLIER, null ); } protected EngineConfig config(EngineConfig config, Store store, Path translogPath) { IndexSettings indexSettings = IndexSettingsModule.newIndexSettings( "test", Settings.builder() .put(config.getIndexSettings().getSettings()) .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) .build() ); TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, BigArrays.NON_RECYCLING_INSTANCE); return new EngineConfig( config.getShardId(), config.getThreadPool(), indexSettings, config.getWarmer(), store, config.getMergePolicy(), config.getAnalyzer(), config.getSimilarity(), new CodecService(null), config.getEventListener(), config.getQueryCache(), config.getQueryCachingPolicy(), translogConfig, config.getFlushMergesAfter(), config.getExternalRefreshListener(), config.getInternalRefreshListener(), config.getIndexSort(), config.getCircuitBreakerService(), config.getGlobalCheckpointSupplier(), config.retentionLeasesSupplier(), config.getPrimaryTermSupplier(), config.getSnapshotCommitSupplier(), config.getLeafSorter() ); } protected EngineConfig noOpConfig(IndexSettings indexSettings, Store store, Path translogPath) { return noOpConfig(indexSettings, store, translogPath, null); } protected EngineConfig noOpConfig(IndexSettings indexSettings, Store store, Path translogPath, LongSupplier globalCheckpointSupplier) { return config(indexSettings, store, translogPath, newMergePolicy(), null, null, globalCheckpointSupplier); } protected static final BytesReference B_1 = new BytesArray(new byte[] { 1 }); protected static final BytesReference B_2 = new BytesArray(new byte[] { 2 }); protected static final BytesReference B_3 = new BytesArray(new byte[] { 3 }); protected static final BytesArray SOURCE = bytesArray("{}"); protected static BytesArray bytesArray(String string) { return new BytesArray(string.getBytes(Charset.defaultCharset())); } public static Term newUid(String id) { return new Term("_id", Uid.encodeId(id)); } public static Term newUid(ParsedDocument doc) { return newUid(doc.id()); } protected Engine.Get newGet(boolean realtime, ParsedDocument doc) { return new Engine.Get(realtime, realtime, doc.id()); } protected Engine.Index indexForDoc(ParsedDocument doc) { return new Engine.Index(newUid(doc), primaryTerm.get(), doc); } protected Engine.Index replicaIndexForDoc(ParsedDocument doc, long version, long seqNo, boolean isRetry) { return new Engine.Index( newUid(doc), doc, seqNo, primaryTerm.get(), version, null, Engine.Operation.Origin.REPLICA, System.nanoTime(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, isRetry, SequenceNumbers.UNASSIGNED_SEQ_NO, 0 ); } protected Engine.Delete replicaDeleteForDoc(String id, long version, long seqNo, long startTime) { return new Engine.Delete( id, newUid(id), seqNo, 1, version, null, Engine.Operation.Origin.REPLICA, startTime, SequenceNumbers.UNASSIGNED_SEQ_NO, 0 ); } protected static void assertVisibleCount(InternalEngine engine, int numDocs) throws IOException { assertVisibleCount(engine, numDocs, true); } protected static void assertVisibleCount(InternalEngine engine, int numDocs, boolean refresh) throws IOException { if (refresh) { engine.refresh("test"); } try (Engine.Searcher searcher = engine.acquireSearcher("test")) { final TotalHitCountCollector collector = new TotalHitCountCollector(); searcher.search(new MatchAllDocsQuery(), collector); assertThat(collector.getTotalHits(), equalTo(numDocs)); } } public static List<Engine.Operation> generateSingleDocHistory( boolean forReplica, VersionType versionType, long primaryTerm, int minOpCount, int maxOpCount, String docId ) { final int numOfOps = randomIntBetween(minOpCount, maxOpCount); final List<Engine.Operation> ops = new ArrayList<>(); final Term id = newUid(docId); final int startWithSeqNo = 0; final String valuePrefix = (forReplica ? "r_" : "p_") + docId + "_"; final boolean incrementTermWhenIntroducingSeqNo = randomBoolean(); for (int i = 0; i < numOfOps; i++) { final Engine.Operation op; final long version; switch (versionType) { case INTERNAL: version = forReplica ? i : Versions.MATCH_ANY; break; case EXTERNAL: version = i; break; case EXTERNAL_GTE: version = randomBoolean() ? Math.max(i - 1, 0) : i; break; default: throw new UnsupportedOperationException("unknown version type: " + versionType); } if (randomBoolean()) { op = new Engine.Index( id, testParsedDocument(docId, null, testDocumentWithTextField(valuePrefix + i), SOURCE, null), forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO, forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm, version, forReplica ? null : versionType, forReplica ? REPLICA : PRIMARY, System.currentTimeMillis(), -1, false, SequenceNumbers.UNASSIGNED_SEQ_NO, 0 ); } else { op = new Engine.Delete( docId, id, forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO, forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm, version, forReplica ? null : versionType, forReplica ? REPLICA : PRIMARY, System.currentTimeMillis(), SequenceNumbers.UNASSIGNED_SEQ_NO, 0 ); } ops.add(op); } return ops; } public List<Engine.Operation> generateHistoryOnReplica( int numOps, boolean allowGapInSeqNo, boolean allowDuplicate, boolean includeNestedDocs ) throws Exception { return generateHistoryOnReplica(numOps, 0L, allowGapInSeqNo, allowDuplicate, includeNestedDocs); } public List<Engine.Operation> generateHistoryOnReplica( int numOps, long startingSeqNo, boolean allowGapInSeqNo, boolean allowDuplicate, boolean includeNestedDocs ) throws Exception { long seqNo = startingSeqNo; final int maxIdValue = randomInt(numOps * 2); final List<Engine.Operation> operations = new ArrayList<>(numOps); CheckedBiFunction<String, Integer, ParsedDocument, IOException> nestedParsedDocFactory = nestedParsedDocFactory(); for (int i = 0; i < numOps; i++) { final String id = Integer.toString(randomInt(maxIdValue)); final Engine.Operation.TYPE opType = randomFrom(Engine.Operation.TYPE.values()); final boolean isNestedDoc = includeNestedDocs && opType == Engine.Operation.TYPE.INDEX && randomBoolean(); final int nestedValues = between(0, 3); final long startTime = threadPool.relativeTimeInNanos(); final int copies = allowDuplicate && rarely() ? between(2, 4) : 1; for (int copy = 0; copy < copies; copy++) { final ParsedDocument doc = isNestedDoc ? nestedParsedDocFactory.apply(id, nestedValues) : createParsedDoc(id, null); switch (opType) { case INDEX: operations.add( new Engine.Index( EngineTestCase.newUid(doc), doc, seqNo, primaryTerm.get(), i, null, randomFrom(REPLICA, PEER_RECOVERY), startTime, -1, true, SequenceNumbers.UNASSIGNED_SEQ_NO, 0 ) ); break; case DELETE: operations.add( new Engine.Delete( doc.id(), EngineTestCase.newUid(doc), seqNo, primaryTerm.get(), i, null, randomFrom(REPLICA, PEER_RECOVERY), startTime, SequenceNumbers.UNASSIGNED_SEQ_NO, 0 ) ); break; case NO_OP: operations.add( new Engine.NoOp(seqNo, primaryTerm.get(), randomFrom(REPLICA, PEER_RECOVERY), startTime, "test-" + i) ); break; default: throw new IllegalStateException("Unknown operation type [" + opType + "]"); } } seqNo++; if (allowGapInSeqNo && rarely()) { seqNo++; } } Randomness.shuffle(operations); return operations; } public static void assertOpsOnReplica( final List<Engine.Operation> ops, final InternalEngine replicaEngine, boolean shuffleOps, final Logger logger ) throws IOException { final Engine.Operation lastOp = ops.get(ops.size() - 1); final String lastFieldValue; if (lastOp instanceof Engine.Index) { Engine.Index index = (Engine.Index) lastOp; lastFieldValue = index.docs().get(0).get("value"); } else { // delete lastFieldValue = null; } if (shuffleOps) { int firstOpWithSeqNo = 0; while (firstOpWithSeqNo < ops.size() && ops.get(firstOpWithSeqNo).seqNo() < 0) { firstOpWithSeqNo++; } // shuffle ops but make sure legacy ops are first shuffle(ops.subList(0, firstOpWithSeqNo), random()); shuffle(ops.subList(firstOpWithSeqNo, ops.size()), random()); } boolean firstOp = true; for (Engine.Operation op : ops) { logger.info( "performing [{}], v [{}], seq# [{}], term [{}]", op.operationType().name().charAt(0), op.version(), op.seqNo(), op.primaryTerm() ); if (op instanceof Engine.Index) { Engine.IndexResult result = replicaEngine.index((Engine.Index) op); // replicas don't really care to about creation status of documents // this allows to ignore the case where a document was found in the live version maps in // a delete state and return false for the created flag in favor of code simplicity // as deleted or not. This check is just signal regression so a decision can be made if it's // intentional assertThat(result.isCreated(), equalTo(firstOp)); assertThat(result.getVersion(), equalTo(op.version())); assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS)); } else { Engine.DeleteResult result = replicaEngine.delete((Engine.Delete) op); // Replicas don't really care to about found status of documents // this allows to ignore the case where a document was found in the live version maps in // a delete state and return true for the found flag in favor of code simplicity // his check is just signal regression so a decision can be made if it's // intentional assertThat(result.isFound(), equalTo(firstOp == false)); assertThat(result.getVersion(), equalTo(op.version())); assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS)); } if (randomBoolean()) { replicaEngine.refresh("test"); } if (randomBoolean()) { replicaEngine.flush(); replicaEngine.refresh("test"); } firstOp = false; } assertVisibleCount(replicaEngine, lastFieldValue == null ? 0 : 1); if (lastFieldValue != null) { try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { final TotalHitCountCollector collector = new TotalHitCountCollector(); searcher.search(new TermQuery(new Term("value", lastFieldValue)), collector); assertThat(collector.getTotalHits(), equalTo(1)); } } } public static void concurrentlyApplyOps(List<Engine.Operation> ops, InternalEngine engine) throws InterruptedException { Thread[] thread = new Thread[randomIntBetween(3, 5)]; CountDownLatch startGun = new CountDownLatch(thread.length); AtomicInteger offset = new AtomicInteger(-1); for (int i = 0; i < thread.length; i++) { thread[i] = new Thread(() -> { startGun.countDown(); try { startGun.await(); } catch (InterruptedException e) { throw new AssertionError(e); } int docOffset; while ((docOffset = offset.incrementAndGet()) < ops.size()) { try { applyOperation(engine, ops.get(docOffset)); if ((docOffset + 1) % 4 == 0) { engine.refresh("test"); } if (rarely()) { engine.flush(); } } catch (IOException e) { throw new AssertionError(e); } } }); thread[i].start(); } for (int i = 0; i < thread.length; i++) { thread[i].join(); } } public static void applyOperations(Engine engine, List<Engine.Operation> operations) throws IOException { for (Engine.Operation operation : operations) { applyOperation(engine, operation); if (randomInt(100) < 10) { engine.refresh("test"); } if (rarely()) { engine.flush(); } } } public static Engine.Result applyOperation(Engine engine, Engine.Operation operation) throws IOException { final Engine.Result result; switch (operation.operationType()) { case INDEX: result = engine.index((Engine.Index) operation); break; case DELETE: result = engine.delete((Engine.Delete) operation); break; case NO_OP: result = engine.noOp((Engine.NoOp) operation); break; default: throw new IllegalStateException("No operation defined for [" + operation + "]"); } return result; } /** * Gets a collection of tuples of docId, sequence number, and primary term of all live documents in the provided engine. */ public static List<DocIdSeqNoAndSource> getDocIds(Engine engine, boolean refresh) throws IOException { if (refresh) { engine.refresh("test_get_doc_ids"); } try (Engine.Searcher searcher = engine.acquireSearcher("test_get_doc_ids", Engine.SearcherScope.INTERNAL)) { List<DocIdSeqNoAndSource> docs = new ArrayList<>(); for (LeafReaderContext leafContext : searcher.getIndexReader().leaves()) { LeafReader reader = leafContext.reader(); NumericDocValues seqNoDocValues = reader.getNumericDocValues(SeqNoFieldMapper.NAME); NumericDocValues primaryTermDocValues = reader.getNumericDocValues(SeqNoFieldMapper.PRIMARY_TERM_NAME); NumericDocValues versionDocValues = reader.getNumericDocValues(VersionFieldMapper.NAME); Bits liveDocs = reader.getLiveDocs(); for (int i = 0; i < reader.maxDoc(); i++) { if (liveDocs == null || liveDocs.get(i)) { if (primaryTermDocValues.advanceExact(i) == false) { // We have to skip non-root docs because its _id field is not stored (indexed only). continue; } final long primaryTerm = primaryTermDocValues.longValue(); Document doc = reader.document(i, Set.of(IdFieldMapper.NAME, SourceFieldMapper.NAME)); BytesRef binaryID = doc.getBinaryValue(IdFieldMapper.NAME); String id = Uid.decodeId(Arrays.copyOfRange(binaryID.bytes, binaryID.offset, binaryID.offset + binaryID.length)); final BytesRef source = doc.getBinaryValue(SourceFieldMapper.NAME); if (seqNoDocValues.advanceExact(i) == false) { throw new AssertionError("seqNoDocValues not found for doc[" + i + "] id[" + id + "]"); } final long seqNo = seqNoDocValues.longValue(); if (versionDocValues.advanceExact(i) == false) { throw new AssertionError("versionDocValues not found for doc[" + i + "] id[" + id + "]"); } final long version = versionDocValues.longValue(); docs.add(new DocIdSeqNoAndSource(id, source, seqNo, primaryTerm, version)); } } } docs.sort( Comparator.comparingLong(DocIdSeqNoAndSource::getSeqNo) .thenComparingLong(DocIdSeqNoAndSource::getPrimaryTerm) .thenComparing((DocIdSeqNoAndSource::getId)) ); return docs; } } /** * Reads all engine operations that have been processed by the engine from Lucene index. * The returned operations are sorted and de-duplicated, thus each sequence number will be have at most one operation. */ public static List<Translog.Operation> readAllOperationsInLucene(Engine engine) throws IOException { final List<Translog.Operation> operations = new ArrayList<>(); try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", 0, Long.MAX_VALUE, false, randomBoolean(), randomBoolean())) { Translog.Operation op; while ((op = snapshot.next()) != null) { operations.add(op); } } return operations; } /** * Asserts the provided engine has a consistent document history between translog and Lucene index. */ public static void assertConsistentHistoryBetweenTranslogAndLuceneIndex(Engine engine) throws IOException { if (engine instanceof InternalEngine == false) { return; } final List<Translog.Operation> translogOps = new ArrayList<>(); try (Translog.Snapshot snapshot = EngineTestCase.getTranslog(engine).newSnapshot()) { Translog.Operation op; while ((op = snapshot.next()) != null) { translogOps.add(op); } } final Map<Long, Translog.Operation> luceneOps = readAllOperationsInLucene(engine).stream() .collect(Collectors.toMap(Translog.Operation::seqNo, Function.identity())); final long maxSeqNo = ((InternalEngine) engine).getLocalCheckpointTracker().getMaxSeqNo(); for (Translog.Operation op : translogOps) { assertThat("translog operation [" + op + "] > max_seq_no[" + maxSeqNo + "]", op.seqNo(), lessThanOrEqualTo(maxSeqNo)); } for (Translog.Operation op : luceneOps.values()) { assertThat("lucene operation [" + op + "] > max_seq_no[" + maxSeqNo + "]", op.seqNo(), lessThanOrEqualTo(maxSeqNo)); } final long globalCheckpoint = EngineTestCase.getTranslog(engine).getLastSyncedGlobalCheckpoint(); final long retainedOps = engine.config().getIndexSettings().getSoftDeleteRetentionOperations(); final long minSeqNoToRetain; if (engine.config().getIndexSettings().isSoftDeleteEnabled()) { try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) { final long seqNoForRecovery = Long.parseLong( safeCommit.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY) ) + 1; minSeqNoToRetain = Math.min(seqNoForRecovery, globalCheckpoint + 1 - retainedOps); } } else { minSeqNoToRetain = engine.getMinRetainedSeqNo(); } for (Translog.Operation translogOp : translogOps) { final Translog.Operation luceneOp = luceneOps.get(translogOp.seqNo()); if (luceneOp == null) { if (minSeqNoToRetain <= translogOp.seqNo()) { fail( "Operation not found seq# [" + translogOp.seqNo() + "], global checkpoint [" + globalCheckpoint + "], " + "retention policy [" + retainedOps + "], maxSeqNo [" + maxSeqNo + "], translog op [" + translogOp + "]" ); } else { continue; } } assertThat(luceneOp, notNullValue()); assertThat(luceneOp.toString(), luceneOp.primaryTerm(), equalTo(translogOp.primaryTerm())); assertThat(luceneOp.opType(), equalTo(translogOp.opType())); if (luceneOp.opType() == Translog.Operation.Type.INDEX) { assertThat(luceneOp.getSource().source, equalTo(translogOp.getSource().source)); } } } /** * Asserts that the max_seq_no stored in the commit's user_data is never smaller than seq_no of any document in the commit. */ public static void assertMaxSeqNoInCommitUserData(Engine engine) throws Exception { List<IndexCommit> commits = DirectoryReader.listCommits(engine.store.directory()); for (IndexCommit commit : commits) { try (DirectoryReader reader = DirectoryReader.open(commit)) { assertThat( Long.parseLong(commit.getUserData().get(SequenceNumbers.MAX_SEQ_NO)), greaterThanOrEqualTo(maxSeqNosInReader(reader)) ); } } } public static void assertAtMostOneLuceneDocumentPerSequenceNumber(Engine engine) throws IOException { if (engine instanceof InternalEngine) { try { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { assertAtMostOneLuceneDocumentPerSequenceNumber(engine.config().getIndexSettings(), searcher.getDirectoryReader()); } } catch (AlreadyClosedException ignored) { // engine was closed } } } public static void assertAtMostOneLuceneDocumentPerSequenceNumber(IndexSettings indexSettings, DirectoryReader reader) throws IOException { Set<Long> seqNos = new HashSet<>(); final DirectoryReader wrappedReader = indexSettings.isSoftDeleteEnabled() ? Lucene.wrapAllDocsLive(reader) : reader; for (LeafReaderContext leaf : wrappedReader.leaves()) { NumericDocValues primaryTermDocValues = leaf.reader().getNumericDocValues(SeqNoFieldMapper.PRIMARY_TERM_NAME); NumericDocValues seqNoDocValues = leaf.reader().getNumericDocValues(SeqNoFieldMapper.NAME); int docId; while ((docId = seqNoDocValues.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { assertTrue(seqNoDocValues.advanceExact(docId)); long seqNo = seqNoDocValues.longValue(); assertThat(seqNo, greaterThanOrEqualTo(0L)); if (primaryTermDocValues.advanceExact(docId)) { if (seqNos.add(seqNo) == false) { IdStoredFieldLoader idLoader = new IdStoredFieldLoader(leaf.reader()); throw new AssertionError("found multiple documents for seq=" + seqNo + " id=" + idLoader.id(docId)); } } } } } public static MapperService createMapperService() throws IOException { IndexMetadata indexMetadata = IndexMetadata.builder("test") .settings( Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) ) .putMapping("{\"properties\": {}}") .build(); MapperService mapperService = MapperTestUtils.newMapperService( new NamedXContentRegistry(ClusterModule.getNamedXWriteables()), createTempDir(), Settings.EMPTY, "test" ); mapperService.merge(indexMetadata, MapperService.MergeReason.MAPPING_UPDATE); return mapperService; } public static MappingLookup mappingLookup() { try { return createMapperService().mappingLookup(); } catch (IOException e) { throw new UncheckedIOException(e); } } /** * Exposes a translog associated with the given engine for testing purpose. */ public static Translog getTranslog(Engine engine) { assert engine instanceof InternalEngine : "only InternalEngines have translogs, got: " + engine.getClass(); InternalEngine internalEngine = (InternalEngine) engine; return internalEngine.getTranslog(); } /** * Waits for all operations up to the provided sequence number to complete in the given internal engine. * * @param seqNo the sequence number that the checkpoint must advance to before this method returns * @throws InterruptedException if the thread was interrupted while blocking on the condition */ public static void waitForOpsToComplete(InternalEngine engine, long seqNo) throws Exception { assertBusy(() -> assertThat(engine.getLocalCheckpointTracker().getProcessedCheckpoint(), greaterThanOrEqualTo(seqNo))); } public static boolean hasSnapshottedCommits(Engine engine) { assert engine instanceof InternalEngine : "only InternalEngines have snapshotted commits, got: " + engine.getClass(); InternalEngine internalEngine = (InternalEngine) engine; return internalEngine.hasSnapshottedCommits(); } public static final class PrimaryTermSupplier implements LongSupplier { private final AtomicLong term; PrimaryTermSupplier(long initialTerm) { this.term = new AtomicLong(initialTerm); } public long get() { return term.get(); } public void set(long newTerm) { this.term.set(newTerm); } @Override public long getAsLong() { return get(); } } static long maxSeqNosInReader(DirectoryReader reader) throws IOException { long maxSeqNo = SequenceNumbers.NO_OPS_PERFORMED; for (LeafReaderContext leaf : reader.leaves()) { final NumericDocValues seqNoDocValues = leaf.reader().getNumericDocValues(SeqNoFieldMapper.NAME); while (seqNoDocValues.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { maxSeqNo = SequenceNumbers.max(maxSeqNo, seqNoDocValues.longValue()); } } return maxSeqNo; } /** * Returns the number of times a version was looked up either from version map or from the index. */ public static long getNumVersionLookups(Engine engine) { return ((InternalEngine) engine).getNumVersionLookups(); } public static long getInFlightDocCount(Engine engine) { if (engine instanceof InternalEngine) { return ((InternalEngine) engine).getInFlightDocCount(); } else { return 0; } } public static void assertNoInFlightDocuments(Engine engine) throws Exception { assertBusy(() -> assertThat(getInFlightDocCount(engine), equalTo(0L))); } public static final class MatchingDirectoryReader extends FilterDirectoryReader { private final Query query; public MatchingDirectoryReader(DirectoryReader in, Query query) throws IOException { super(in, new SubReaderWrapper() { @Override public LeafReader wrap(LeafReader leaf) { try { final IndexSearcher searcher = new IndexSearcher(leaf); searcher.setQueryCache(null); final Weight weight = searcher.createWeight(query, ScoreMode.COMPLETE_NO_SCORES, 1.0f); final Scorer scorer = weight.scorer(leaf.getContext()); final DocIdSetIterator iterator = scorer != null ? scorer.iterator() : null; final FixedBitSet liveDocs = new FixedBitSet(leaf.maxDoc()); if (iterator != null) { for (int docId = iterator.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = iterator.nextDoc()) { if (leaf.getLiveDocs() == null || leaf.getLiveDocs().get(docId)) { liveDocs.set(docId); } } } return new FilterLeafReader(leaf) { @Override public Bits getLiveDocs() { return liveDocs; } @Override public CacheHelper getCoreCacheHelper() { return leaf.getCoreCacheHelper(); } @Override public CacheHelper getReaderCacheHelper() { return null; // modify liveDocs } }; } catch (IOException e) { throw new UncheckedIOException(e); } } }); this.query = query; } @Override protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException { return new MatchingDirectoryReader(in, query); } @Override public CacheHelper getReaderCacheHelper() { // TODO: We should not return the ReaderCacheHelper if we modify the liveDocs, // but some caching components (e.g., global ordinals) require this cache key. return in.getReaderCacheHelper(); } } public static CheckedFunction<DirectoryReader, DirectoryReader, IOException> randomReaderWrapper() { if (randomBoolean()) { return reader -> reader; } else { return reader -> new MatchingDirectoryReader(reader, new MatchAllDocsQuery()); } } public static Function<Engine.Searcher, Engine.Searcher> randomSearcherWrapper() { if (randomBoolean()) { return Function.identity(); } else { final CheckedFunction<DirectoryReader, DirectoryReader, IOException> readerWrapper = randomReaderWrapper(); return searcher -> SearcherHelper.wrapSearcher(searcher, readerWrapper); } } public static void checkNoSoftDeletesLoaded(ReadOnlyEngine readOnlyEngine) { if (readOnlyEngine.lazilyLoadSoftDeletes == false) { throw new IllegalStateException("method should only be called when lazily loading soft-deletes is enabled"); } try (Engine.Searcher searcher = readOnlyEngine.acquireSearcher("soft-deletes-check", Engine.SearcherScope.INTERNAL)) { for (LeafReaderContext ctx : searcher.getIndexReader().getContext().leaves()) { LazySoftDeletesDirectoryReaderWrapper.LazyBits lazyBits = lazyBits(ctx.reader()); if (lazyBits != null && lazyBits.initialized()) { throw new IllegalStateException("soft-deletes loaded"); } } } } @Nullable private static LazySoftDeletesDirectoryReaderWrapper.LazyBits lazyBits(LeafReader reader) { if (reader instanceof LazySoftDeletesDirectoryReaderWrapper.LazySoftDeletesFilterLeafReader) { return ((LazySoftDeletesDirectoryReaderWrapper.LazySoftDeletesFilterLeafReader) reader).getLiveDocs(); } else if (reader instanceof LazySoftDeletesDirectoryReaderWrapper.LazySoftDeletesFilterCodecReader) { return ((LazySoftDeletesDirectoryReaderWrapper.LazySoftDeletesFilterCodecReader) reader).getLiveDocs(); } else if (reader instanceof FilterLeafReader) { final FilterLeafReader fReader = (FilterLeafReader) reader; return lazyBits(FilterLeafReader.unwrap(fReader)); } else if (reader instanceof FilterCodecReader) { final FilterCodecReader fReader = (FilterCodecReader) reader; return lazyBits(FilterCodecReader.unwrap(fReader)); } else if (reader instanceof SegmentReader) { return null; } // hard fail - we can't get the lazybits throw new IllegalStateException("Can not extract lazy bits from given index reader [" + reader + "]"); } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.psi; import com.google.common.collect.ImmutableList; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiElement; import com.jetbrains.python.PyTokenTypes; import com.jetbrains.python.lexer.PythonLexer; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; /** * @author Mikhail Golubev */ public final class PyStringLiteralUtil { /** * Valid string prefix characters (lowercased) as defined in Python lexer. */ public static final String PREFIX_CHARACTERS = "ubcrf"; /** * Maximum length of a string prefix as defined in Python lexer. */ public static final int MAX_PREFIX_LENGTH = 3; private static final ImmutableList<String> QUOTES = ImmutableList.of("'''", "\"\"\"", "'", "\""); private static final Logger LOG = Logger.getInstance(PyStringLiteralUtil.class); private PyStringLiteralUtil() { } /** * 'text' => text * "text" => text * text => text * "text => "text * * @return string without heading and trailing pair of ' or " */ @NotNull public static String getStringValue(@NotNull String s) { return getStringValueTextRange(s).substring(s); } public static TextRange getStringValueTextRange(@NotNull String s) { final Pair<String, String> quotes = getQuotes(s); if (quotes != null) { return TextRange.create(quotes.getFirst().length(), s.length() - quotes.getSecond().length()); } return TextRange.allOf(s); } /** * @return whether the given text is recognized as a valid string literal token by Python lexer */ public static boolean isStringLiteralToken(@NotNull String text) { final PythonLexer lexer = new PythonLexer(); lexer.start(text); return PyTokenTypes.STRING_NODES.contains(lexer.getTokenType()) && lexer.getTokenEnd() == lexer.getBufferEnd() || PyTokenTypes.FSTRING_START == lexer.getTokenType(); } /** * Handles unicode and raw strings * * @param text * @return false if no quotes found, true otherwise * sdfs -> false * ur'x' -> true * "string" -> true */ public static boolean isQuoted(@Nullable String text) { return text != null && getQuotes(text) != null; } /** * Returns a pair where the first element is the prefix combined with the opening quote and the second is the closing quote. * <p> * If the given string literal is not properly quoted, e.g. the closing quote has fewer quotes as opposed to the * opening one, or it's missing altogether this method returns null. * <p> * Examples: * <pre> * ur"foo" -> ("ur, ") * ur'bar -> null * """baz""" -> (""", """) * '''quux' -> null * </pre> */ @Nullable public static Pair<String, String> getQuotes(@NotNull String text) { final String prefix = getPrefix(text); final String mainText = text.substring(prefix.length()); for (String quote : QUOTES) { final Pair<String, String> quotes = getQuotes(mainText, prefix, quote); if (quotes != null) { return quotes; } } return null; } /** * Returns the range of the string literal text between the opening quote and the closing one. * If the closing quote is either missing or mismatched, this range spans until the end of the literal. */ @NotNull public static TextRange getContentRange(@NotNull String text) { LOG.assertTrue(isStringLiteralToken(text), "Text of a single string literal node expected"); int startOffset = getPrefixLength(text); int delimiterLength = 1; final String afterPrefix = text.substring(startOffset); if (afterPrefix.startsWith("\"\"\"") || afterPrefix.startsWith("'''")) { delimiterLength = 3; } final String delimiter = text.substring(startOffset, startOffset + delimiterLength); startOffset += delimiterLength; int endOffset = text.length(); if (text.substring(startOffset).endsWith(delimiter)) { endOffset -= delimiterLength; } return new TextRange(startOffset, endOffset); } /** * Finds the end offset of the string prefix starting from {@code startOffset} in the given char sequence. * String prefix may contain only up to {@link #MAX_PREFIX_LENGTH} characters from {@link #PREFIX_CHARACTERS} * (case insensitively). * * @return end offset of found string prefix */ public static int getPrefixEndOffset(@NotNull CharSequence text, int startOffset) { int offset; for (offset = startOffset; offset < Math.min(startOffset + MAX_PREFIX_LENGTH, text.length()); offset++) { if (PREFIX_CHARACTERS.indexOf(Character.toLowerCase(text.charAt(offset))) < 0) { break; } } return offset; } public static int getPrefixLength(@NotNull String text) { return getPrefixEndOffset(text, 0); } @NotNull public static String getPrefix(@NotNull CharSequence text) { return getPrefix(text, 0); } /** * Extracts string prefix from the given char sequence using {@link #getPrefixEndOffset(CharSequence, int)}. * * @return extracted string prefix * @see #getPrefixEndOffset(CharSequence, int) */ @NotNull public static String getPrefix(@NotNull CharSequence text, int startOffset) { return text.subSequence(startOffset, getPrefixEndOffset(text, startOffset)).toString(); } /** * @return whether the given prefix contains either 'u' or 'U' character */ public static boolean isUnicodePrefix(@NotNull String prefix) { return StringUtil.indexOfIgnoreCase(prefix, 'u', 0) >= 0; } /** * @return whether the given prefix contains either 'b' or 'B' character */ public static boolean isBytesPrefix(@NotNull String prefix) { return StringUtil.indexOfIgnoreCase(prefix, 'b', 0) >= 0; } /** * @return whether the given prefix contains either 'r' or 'R' character */ public static boolean isRawPrefix(@NotNull String prefix) { return StringUtil.indexOfIgnoreCase(prefix, 'r', 0) >= 0; } /** * @return whether the given prefix contains either 'f' or 'F' character */ public static boolean isFormattedPrefix(@NotNull String prefix) { return StringUtil.indexOfIgnoreCase(prefix, 'f', 0) >= 0; } /** * @return alternative quote character, i.e. " for ' and ' for " */ public static char flipQuote(char quote) { return quote == '"' ? '\'' : '"'; } @Nullable private static Pair<String, String> getQuotes(@NotNull String text, @NotNull String prefix, @NotNull String quote) { final int length = text.length(); final int n = quote.length(); if (length >= 2 * n && text.startsWith(quote) && text.endsWith(quote)) { return Pair.create(prefix + text.substring(0, n), text.substring(length - n)); } return null; } public static TextRange getTextRange(PsiElement element) { if (element instanceof PyStringLiteralExpression) { final List<TextRange> ranges = ((PyStringLiteralExpression)element).getStringValueTextRanges(); return ranges.get(0); } else { return new TextRange(0, element.getTextLength()); } } @Nullable public static String getText(@Nullable PyExpression ex) { if (ex == null) { return null; } else { return ex.getText(); } } @Nullable public static String getStringValue(@Nullable PsiElement o) { if (o == null) { return null; } if (o instanceof PyStringLiteralExpression) { PyStringLiteralExpression literalExpression = (PyStringLiteralExpression)o; return literalExpression.getStringValue(); } else { return o.getText(); } } public static String stripQuotesAroundValue(String text) { Pair<String, String> quotes = getQuotes(text); if (quotes == null) { return text; } return text.substring(quotes.first.length(), text.length() - quotes.second.length()); } }
package com.netflix.discovery.junit.resource; import javax.ws.rs.core.UriBuilder; import java.net.URI; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import com.google.common.base.Preconditions; import com.netflix.appinfo.ApplicationInfoManager; import com.netflix.appinfo.DataCenterInfo; import com.netflix.appinfo.EurekaInstanceConfig; import com.netflix.appinfo.InstanceInfo; import com.netflix.appinfo.LeaseInfo; import com.netflix.appinfo.MyDataCenterInstanceConfig; import com.netflix.config.ConfigurationManager; import com.netflix.discovery.CacheRefreshedEvent; import com.netflix.discovery.DefaultEurekaClientConfig; import com.netflix.discovery.DiscoveryClient; import com.netflix.discovery.DiscoveryManager; import com.netflix.discovery.EurekaClient; import com.netflix.discovery.EurekaClientConfig; import com.netflix.discovery.shared.transport.SimpleEurekaHttpServer; import com.netflix.discovery.shared.transport.jersey.Jersey1DiscoveryClientOptionalArgs; import com.netflix.eventbus.impl.EventBusImpl; import com.netflix.eventbus.spi.EventBus; import com.netflix.eventbus.spi.InvalidSubscriberException; import com.netflix.eventbus.spi.Subscribe; import org.junit.rules.ExternalResource; /** * JUnit rule for discovery client + collection of static methods for setting it up. */ public class DiscoveryClientResource extends ExternalResource { public static final String REMOTE_REGION = "myregion"; public static final String REMOTE_ZONE = "myzone"; public static final int CLIENT_REFRESH_RATE = 10; public static final String EUREKA_TEST_NAMESPACE = "eurekaTestNamespace."; private static final Set<String> SYSTEM_PROPERTY_TRACKER = new HashSet<>(); private final boolean registrationEnabled; private final boolean registryFetchEnabled; private final InstanceInfo instance; private final SimpleEurekaHttpServer eurekaHttpServer; private final Callable<Integer> portResolverCallable; private final List<String> remoteRegions; private final String vipFetch; private final String userName; private final String password; private EventBus eventBus; private ApplicationInfoManager applicationManager; private EurekaClient client; private final List<DiscoveryClientResource> forkedDiscoveryClientResources = new ArrayList<>(); private ApplicationInfoManager applicationInfoManager; DiscoveryClientResource(DiscoveryClientRuleBuilder builder) { this.registrationEnabled = builder.registrationEnabled; this.registryFetchEnabled = builder.registryFetchEnabled; this.portResolverCallable = builder.portResolverCallable; this.eurekaHttpServer = builder.eurekaHttpServer; this.instance = builder.instance; this.remoteRegions = builder.remoteRegions; this.vipFetch = builder.vipFetch; this.userName = builder.userName; this.password = builder.password; } public InstanceInfo getMyInstanceInfo() { return createApplicationManager().getInfo(); } public EventBus getEventBus() { if (client == null) { getClient(); // Lazy initialization } return eventBus; } public ApplicationInfoManager getApplicationInfoManager() { return applicationInfoManager; } public EurekaClient getClient() { if (client == null) { try { applicationInfoManager = createApplicationManager(); EurekaClientConfig clientConfig = createEurekaClientConfig(); Jersey1DiscoveryClientOptionalArgs optionalArgs = new Jersey1DiscoveryClientOptionalArgs(); eventBus = new EventBusImpl(); optionalArgs.setEventBus(eventBus); client = new DiscoveryClient(applicationInfoManager, clientConfig, optionalArgs); } catch (Exception e) { throw new RuntimeException(e); } } return client; } public boolean awaitCacheUpdate(long timeout, TimeUnit unit) throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); Object eventListener = new Object() { @Subscribe public void consume(CacheRefreshedEvent event) { latch.countDown(); } }; try { getEventBus().registerSubscriber(eventListener); } catch (InvalidSubscriberException e) { throw new IllegalStateException("Unexpected error during subscriber registration", e); } try { return latch.await(timeout, unit); } finally { getEventBus().unregisterSubscriber(eventListener); } } private ApplicationInfoManager createApplicationManager() { if (applicationManager == null) { EurekaInstanceConfig instanceConfig = new MyDataCenterInstanceConfig(EUREKA_TEST_NAMESPACE) { @Override public String getAppname() { return "discoveryClientTest"; } @Override public int getLeaseRenewalIntervalInSeconds() { return 1; } }; applicationManager = new ApplicationInfoManager(instanceConfig); } return applicationManager; } private EurekaClientConfig createEurekaClientConfig() throws Exception { // Cluster connectivity URI serviceURI; if (portResolverCallable != null) { serviceURI = new URI("http://localhost:" + portResolverCallable.call() + "/eureka/v2/"); } else if (eurekaHttpServer != null) { serviceURI = eurekaHttpServer.getServiceURI(); } else { throw new IllegalStateException("Either port or EurekaHttpServer must be configured"); } if (userName != null) { serviceURI = UriBuilder.fromUri(serviceURI).userInfo(userName + ':' + password).build(); } bindProperty(EUREKA_TEST_NAMESPACE + "serviceUrl.default", serviceURI.toString()); if (remoteRegions != null && !remoteRegions.isEmpty()) { StringBuilder regions = new StringBuilder(); for (String region : remoteRegions) { regions.append(',').append(region); } bindProperty(EUREKA_TEST_NAMESPACE + "fetchRemoteRegionsRegistry", regions.substring(1)); } // Registration bindProperty(EUREKA_TEST_NAMESPACE + "registration.enabled", Boolean.toString(registrationEnabled)); bindProperty(EUREKA_TEST_NAMESPACE + "appinfo.initial.replicate.time", Integer.toString(0)); bindProperty(EUREKA_TEST_NAMESPACE + "appinfo.replicate.interval", Integer.toString(1)); // Registry fetch bindProperty(EUREKA_TEST_NAMESPACE + "shouldFetchRegistry", Boolean.toString(registryFetchEnabled)); bindProperty(EUREKA_TEST_NAMESPACE + "client.refresh.interval", Integer.toString(1)); if (vipFetch != null) { bindProperty(EUREKA_TEST_NAMESPACE + "registryRefreshSingleVipAddress", vipFetch); } return new DefaultEurekaClientConfig(EUREKA_TEST_NAMESPACE); } @Override protected void after() { if (client != null) { client.shutdown(); } for (DiscoveryClientResource resource : forkedDiscoveryClientResources) { resource.after(); } for (String property : SYSTEM_PROPERTY_TRACKER) { ConfigurationManager.getConfigInstance().clearProperty(property); } clearDiscoveryClientConfig(); } public DiscoveryClientRuleBuilder fork() { DiscoveryClientRuleBuilder builder = new DiscoveryClientRuleBuilder() { @Override public DiscoveryClientResource build() { DiscoveryClientResource clientResource = super.build(); try { clientResource.before(); } catch (Throwable e) { throw new IllegalStateException("Unexpected error during forking the client resource", e); } forkedDiscoveryClientResources.add(clientResource); return clientResource; } }; return builder.withInstanceInfo(instance) .connectWith(eurekaHttpServer) .withPortResolver(portResolverCallable) .withRegistration(registrationEnabled) .withRegistryFetch(registryFetchEnabled) .withRemoteRegions(remoteRegions.toArray(new String[remoteRegions.size()])); } public static DiscoveryClientRuleBuilder newBuilder() { return new DiscoveryClientRuleBuilder(); } public static void setupDiscoveryClientConfig(int serverPort, String path) { ConfigurationManager.getConfigInstance().setProperty("eureka.shouldFetchRegistry", "true"); ConfigurationManager.getConfigInstance().setProperty("eureka.responseCacheAutoExpirationInSeconds", "10"); ConfigurationManager.getConfigInstance().setProperty("eureka.client.refresh.interval", CLIENT_REFRESH_RATE); ConfigurationManager.getConfigInstance().setProperty("eureka.registration.enabled", "false"); ConfigurationManager.getConfigInstance().setProperty("eureka.fetchRemoteRegionsRegistry", REMOTE_REGION); ConfigurationManager.getConfigInstance().setProperty("eureka.myregion.availabilityZones", REMOTE_ZONE); ConfigurationManager.getConfigInstance().setProperty("eureka.serviceUrl.default", "http://localhost:" + serverPort + path); } public static void clearDiscoveryClientConfig() { ConfigurationManager.getConfigInstance().clearProperty("eureka.client.refresh.interval"); ConfigurationManager.getConfigInstance().clearProperty("eureka.registration.enabled"); ConfigurationManager.getConfigInstance().clearProperty("eureka.fetchRemoteRegionsRegistry"); ConfigurationManager.getConfigInstance().clearProperty("eureka.myregion.availabilityZones"); ConfigurationManager.getConfigInstance().clearProperty("eureka.serviceUrl.default"); ConfigurationManager.getConfigInstance().clearProperty("eureka.shouldEnforceFetchRegistryAtInit"); } public static EurekaClient setupDiscoveryClient(InstanceInfo clientInstanceInfo) { DefaultEurekaClientConfig config = new DefaultEurekaClientConfig(); // setup config in advance, used in initialize converter ApplicationInfoManager applicationInfoManager = new ApplicationInfoManager(new MyDataCenterInstanceConfig(), clientInstanceInfo); DiscoveryManager.getInstance().setEurekaClientConfig(config); EurekaClient client = new DiscoveryClient(applicationInfoManager, config); return client; } public static EurekaClient setupInjector(InstanceInfo clientInstanceInfo) { DefaultEurekaClientConfig config = new DefaultEurekaClientConfig(); // setup config in advance, used in initialize converter DiscoveryManager.getInstance().setEurekaClientConfig(config); EurekaClient client = new DiscoveryClient(clientInstanceInfo, config); ApplicationInfoManager.getInstance().initComponent(new MyDataCenterInstanceConfig()); return client; } public static InstanceInfo.Builder newInstanceInfoBuilder(int renewalIntervalInSecs) { InstanceInfo.Builder builder = InstanceInfo.Builder.newBuilder(); builder.setIPAddr("10.10.101.00"); builder.setHostName("Hosttt"); builder.setAppName("EurekaTestApp-" + UUID.randomUUID()); builder.setDataCenterInfo(new DataCenterInfo() { @Override public Name getName() { return Name.MyOwn; } }); builder.setLeaseInfo(LeaseInfo.Builder.newBuilder().setRenewalIntervalInSecs(renewalIntervalInSecs).build()); return builder; } private static void bindProperty(String propertyName, String value) { SYSTEM_PROPERTY_TRACKER.add(propertyName); ConfigurationManager.getConfigInstance().setProperty(propertyName, value); } public static class DiscoveryClientRuleBuilder { private boolean registrationEnabled; private boolean registryFetchEnabled; private Callable<Integer> portResolverCallable; private InstanceInfo instance; private SimpleEurekaHttpServer eurekaHttpServer; private List<String> remoteRegions; private String vipFetch; private String userName; private String password; public DiscoveryClientRuleBuilder withInstanceInfo(InstanceInfo instance) { this.instance = instance; return this; } public DiscoveryClientRuleBuilder withRegistration(boolean enabled) { this.registrationEnabled = enabled; return this; } public DiscoveryClientRuleBuilder withRegistryFetch(boolean enabled) { this.registryFetchEnabled = enabled; return this; } public DiscoveryClientRuleBuilder withPortResolver(Callable<Integer> portResolverCallable) { this.portResolverCallable = portResolverCallable; return this; } public DiscoveryClientRuleBuilder connectWith(SimpleEurekaHttpServer eurekaHttpServer) { this.eurekaHttpServer = eurekaHttpServer; return this; } public DiscoveryClientRuleBuilder withRemoteRegions(String... remoteRegions) { if (this.remoteRegions == null) { this.remoteRegions = new ArrayList<>(); } Collections.addAll(this.remoteRegions, remoteRegions); return this; } public DiscoveryClientRuleBuilder withVipFetch(String vipFetch) { this.vipFetch = vipFetch; return this; } public DiscoveryClientRuleBuilder basicAuthentication(String userName, String password) { Preconditions.checkNotNull(userName, "HTTP basic authentication user name is null"); Preconditions.checkNotNull(password, "HTTP basic authentication password is null"); this.userName = userName; this.password = password; return this; } public DiscoveryClientResource build() { return new DiscoveryClientResource(this); } } }
package io.vertx.blueprint.microservice.cart.impl; import io.vertx.blueprint.microservice.cache.CounterService; import io.vertx.blueprint.microservice.cart.CartEvent; import io.vertx.blueprint.microservice.cart.CheckoutResult; import io.vertx.blueprint.microservice.cart.CheckoutService; import io.vertx.blueprint.microservice.cart.ShoppingCart; import io.vertx.blueprint.microservice.cart.ShoppingCartService; import io.vertx.blueprint.microservice.common.functional.Functional; import io.vertx.blueprint.microservice.order.Order; import io.vertx.blueprint.microservice.product.ProductTuple; import io.vertx.core.AsyncResult; import io.vertx.core.Future; import io.vertx.core.Handler; import io.vertx.core.Vertx; import io.vertx.core.http.HttpClient; import io.vertx.core.json.JsonObject; import io.vertx.servicediscovery.ServiceDiscovery; import io.vertx.servicediscovery.types.EventBusService; import io.vertx.servicediscovery.types.HttpEndpoint; import java.util.List; import java.util.stream.Collectors; /** * A simple implementation for {@link CheckoutService}. * * @author Eric Zhao */ public class CheckoutServiceImpl implements CheckoutService { private final Vertx vertx; private final ServiceDiscovery discovery; public CheckoutServiceImpl(Vertx vertx, ServiceDiscovery discovery) { this.vertx = vertx; this.discovery = discovery; } @Override public void checkout(String userId, Handler<AsyncResult<CheckoutResult>> resultHandler) { if (userId == null) { resultHandler.handle(Future.failedFuture(new IllegalStateException("Invalid user"))); return; } Future<ShoppingCart> cartFuture = getCurrentCart(userId); Future<CheckoutResult> orderFuture = cartFuture.compose(cart -> checkAvailableInventory(cart).compose(checkResult -> { if (checkResult.getBoolean("res")) { double totalPrice = calculateTotalPrice(cart); // create order instance Order order = new Order().setBuyerId(userId) .setPayId("TEST") // reserved field .setProducts(cart.getProductItems()) .setTotalPrice(totalPrice); // set id and then send order, wait for reply return retrieveCounter("order") .compose(id -> sendOrderAwaitResult(order.setOrderId(id))) .compose(result -> saveCheckoutEvent(userId).map(v -> result)); } else { // has insufficient inventory, fail return Future.succeededFuture(new CheckoutResult() .setMessage(checkResult.getString("message"))); } }) ); orderFuture.setHandler(resultHandler); } /** * Fetch global counter of order from the cache infrastructure. * * @param key counter key (type) * @return async result of the counter */ private Future<Long> retrieveCounter(String key) { Future<Long> future = Future.future(); EventBusService.getProxy(discovery, CounterService.class, ar -> { if (ar.succeeded()) { CounterService service = ar.result(); service.addThenRetrieve(key, future.completer()); } else { future.fail(ar.cause()); } }); return future; } /** * Send the order to the order microservice and wait for reply. * * @param order order data object * @return async result */ private Future<CheckoutResult> sendOrderAwaitResult(Order order) { Future<CheckoutResult> future = Future.future(); vertx.eventBus().send(CheckoutService.ORDER_EVENT_ADDRESS, order.toJson(), reply -> { if (reply.succeeded()) { future.complete(new CheckoutResult((JsonObject) reply.result().body())); } else { future.fail(reply.cause()); } }); return future; } private Future<ShoppingCart> getCurrentCart(String userId) { Future<ShoppingCartService> future = Future.future(); EventBusService.getProxy(discovery, ShoppingCartService.class, future.completer()); return future.compose(service -> { Future<ShoppingCart> cartFuture = Future.future(); service.getShoppingCart(userId, cartFuture.completer()); return cartFuture.compose(c -> { if (c == null || c.isEmpty()) return Future.failedFuture(new IllegalStateException("Invalid shopping cart")); else return Future.succeededFuture(c); }); }); } private double calculateTotalPrice(ShoppingCart cart) { return cart.getProductItems().stream() .map(p -> p.getAmount() * p.getPrice()) // join by product id .reduce(0.0d, (a, b) -> a + b); } private Future<HttpClient> getInventoryEndpoint() { Future<HttpClient> future = Future.future(); HttpEndpoint.getClient(discovery, new JsonObject().put("name", "inventory-rest-api"), future.completer()); return future; } private Future<JsonObject> getInventory(ProductTuple product, HttpClient client) { Future<Integer> future = Future.future(); client.get("/" + product.getProductId(), response -> { if (response.statusCode() == 200) { response.bodyHandler(buffer -> { try { int inventory = Integer.valueOf(buffer.toString()); future.complete(inventory); } catch (NumberFormatException ex) { future.fail(ex); } }); } else { future.fail("not_found:" + product.getProductId()); } }) .exceptionHandler(future::fail) .end(); return future.map(inv -> new JsonObject() .put("id", product.getProductId()) .put("inventory", inv) .put("amount", product.getAmount())); } /** * Check inventory for the current cart. * * @param cart shopping cart data object * @return async result */ private Future<JsonObject> checkAvailableInventory(ShoppingCart cart) { Future<List<JsonObject>> allInventories = getInventoryEndpoint().compose(client -> { List<Future<JsonObject>> futures = cart.getProductItems() .stream() .map(product -> getInventory(product, client)) .collect(Collectors.toList()); return Functional.allOfFutures(futures) .map(r -> { ServiceDiscovery.releaseServiceObject(discovery, client); return r; }); }); return allInventories.map(inventories -> { JsonObject result = new JsonObject(); // get the list of products whose inventory is lower than the demand amount List<JsonObject> insufficient = inventories.stream() .filter(item -> item.getInteger("inventory") - item.getInteger("amount") < 0) .collect(Collectors.toList()); // insufficient inventory exists if (insufficient.size() > 0) { String insufficientList = insufficient.stream() .map(item -> item.getString("id")) .collect(Collectors.joining(", ")); result.put("message", String.format("Insufficient inventory available for product %s.", insufficientList)) .put("res", false); } else { result.put("res", true); } return result; }); } /** * Save checkout cart event for current user. * * @param userId user id * @return async result */ private Future<Void> saveCheckoutEvent(String userId) { Future<ShoppingCartService> future = Future.future(); EventBusService.getProxy(discovery, ShoppingCartService.class, future.completer()); return future.compose(service -> { Future<Void> resFuture = Future.future(); CartEvent event = CartEvent.createCheckoutEvent(userId); service.addCartEvent(event, resFuture.completer()); return resFuture; }); } }
package org.hisp.dhis.webapi.controller; /* * Copyright (c) 2004-2017, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import org.hisp.dhis.analytics.AggregationType; import org.hisp.dhis.analytics.AnalyticsService; import org.hisp.dhis.analytics.AnalyticsUtils; import org.hisp.dhis.analytics.DataQueryParams; import org.hisp.dhis.analytics.DataQueryService; import org.hisp.dhis.analytics.OutputFormat; import org.hisp.dhis.common.DisplayProperty; import org.hisp.dhis.common.Grid; import org.hisp.dhis.common.IdScheme; import org.hisp.dhis.common.cache.CacheStrategy; import org.hisp.dhis.dxf2.datavalueset.DataValueSet; import org.hisp.dhis.system.grid.GridUtils; import org.hisp.dhis.webapi.mvc.annotation.ApiVersion; import org.hisp.dhis.common.DhisApiVersion; import org.hisp.dhis.webapi.utils.ContextUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import javax.servlet.http.HttpServletResponse; import java.util.Date; import java.util.Set; import static org.hisp.dhis.common.DimensionalObjectUtils.getItemsFromParam; /** * @author Lars Helge Overland */ @Controller @ApiVersion( { DhisApiVersion.DEFAULT, DhisApiVersion.ALL } ) public class AnalyticsController { private static final String RESOURCE_PATH = "/analytics"; private static final String DATA_VALUE_SET_PATH = "/dataValueSet"; private static final String RAW_DATA_PATH = "/rawData"; @Autowired private DataQueryService dataQueryService; @Autowired private AnalyticsService analyticsService; @Autowired private ContextUtils contextUtils; // ------------------------------------------------------------------------- // Resources // ------------------------------------------------------------------------- @RequestMapping( value = RESOURCE_PATH, method = RequestMethod.GET, produces = { "application/json", "application/javascript" } ) public @ResponseBody Grid getJson( // JSON, JSONP @RequestParam Set<String> dimension, @RequestParam( required = false ) Set<String> filter, @RequestParam( required = false ) AggregationType aggregationType, @RequestParam( required = false ) String measureCriteria, @RequestParam( required = false ) String preAggregationMeasureCriteria, @RequestParam( required = false ) boolean skipMeta, @RequestParam( required = false ) boolean skipData, @RequestParam( required = false ) boolean skipRounding, @RequestParam( required = false ) boolean completedOnly, @RequestParam( required = false ) boolean hierarchyMeta, @RequestParam( required = false ) boolean ignoreLimit, @RequestParam( required = false ) boolean hideEmptyRows, @RequestParam( required = false ) boolean showHierarchy, @RequestParam( required = false ) boolean includeNumDen, @RequestParam( required = false ) DisplayProperty displayProperty, @RequestParam( required = false ) IdScheme outputIdScheme, @RequestParam( required = false ) IdScheme inputIdScheme, @RequestParam( required = false ) String approvalLevel, @RequestParam( required = false ) Date relativePeriodDate, @RequestParam( required = false ) String userOrgUnit, @RequestParam( required = false ) String columns, @RequestParam( required = false ) String rows, DhisApiVersion apiVersion, Model model, HttpServletResponse response ) throws Exception { DataQueryParams params = dataQueryService.getFromUrl( dimension, filter, aggregationType, measureCriteria, preAggregationMeasureCriteria, skipMeta, skipData, skipRounding, completedOnly, hierarchyMeta, ignoreLimit, hideEmptyRows, showHierarchy, includeNumDen, displayProperty, outputIdScheme, inputIdScheme, false, approvalLevel, relativePeriodDate, userOrgUnit, apiVersion ); contextUtils.configureAnalyticsResponse( response, ContextUtils.CONTENT_TYPE_JSON, CacheStrategy.RESPECT_SYSTEM_SETTING, null, false, params.getLatestEndDate() ); return analyticsService.getAggregatedDataValues( params, getItemsFromParam( columns ), getItemsFromParam( rows ) ); } @RequestMapping( value = RESOURCE_PATH + ".xml", method = RequestMethod.GET ) public void getXml( @RequestParam Set<String> dimension, @RequestParam( required = false ) Set<String> filter, @RequestParam( required = false ) AggregationType aggregationType, @RequestParam( required = false ) String measureCriteria, @RequestParam( required = false ) String preAggregationMeasureCriteria, @RequestParam( required = false ) boolean skipMeta, @RequestParam( required = false ) boolean skipData, @RequestParam( required = false ) boolean skipRounding, @RequestParam( required = false ) boolean completedOnly, @RequestParam( required = false ) boolean hierarchyMeta, @RequestParam( required = false ) boolean ignoreLimit, @RequestParam( required = false ) boolean hideEmptyRows, @RequestParam( required = false ) boolean showHierarchy, @RequestParam( required = false ) boolean includeNumDen, @RequestParam( required = false ) DisplayProperty displayProperty, @RequestParam( required = false ) IdScheme outputIdScheme, @RequestParam( required = false ) IdScheme inputIdScheme, @RequestParam( required = false ) String approvalLevel, @RequestParam( required = false ) Date relativePeriodDate, @RequestParam( required = false ) String userOrgUnit, @RequestParam( required = false ) String columns, @RequestParam( required = false ) String rows, DhisApiVersion apiVersion, Model model, HttpServletResponse response ) throws Exception { DataQueryParams params = dataQueryService.getFromUrl( dimension, filter, aggregationType, measureCriteria, preAggregationMeasureCriteria, skipMeta, skipData, skipRounding, completedOnly, hierarchyMeta, ignoreLimit, hideEmptyRows, showHierarchy, includeNumDen, displayProperty, outputIdScheme, inputIdScheme, false, approvalLevel, relativePeriodDate, userOrgUnit, apiVersion ); contextUtils.configureAnalyticsResponse( response, ContextUtils.CONTENT_TYPE_XML, CacheStrategy.RESPECT_SYSTEM_SETTING, null, false, params.getLatestEndDate() ); Grid grid = analyticsService.getAggregatedDataValues( params, getItemsFromParam( columns ), getItemsFromParam( rows ) ); GridUtils.toXml( grid, response.getOutputStream() ); } @RequestMapping( value = RESOURCE_PATH + ".html", method = RequestMethod.GET ) public void getHtml( @RequestParam Set<String> dimension, @RequestParam( required = false ) Set<String> filter, @RequestParam( required = false ) AggregationType aggregationType, @RequestParam( required = false ) String measureCriteria, @RequestParam( required = false ) String preAggregationMeasureCriteria, @RequestParam( required = false ) boolean skipMeta, @RequestParam( required = false ) boolean skipData, @RequestParam( required = false ) boolean skipRounding, @RequestParam( required = false ) boolean completedOnly, @RequestParam( required = false ) boolean hierarchyMeta, @RequestParam( required = false ) boolean ignoreLimit, @RequestParam( required = false ) boolean hideEmptyRows, @RequestParam( required = false ) boolean showHierarchy, @RequestParam( required = false ) boolean includeNumDen, @RequestParam( required = false ) DisplayProperty displayProperty, @RequestParam( required = false ) IdScheme outputIdScheme, @RequestParam( required = false ) IdScheme inputIdScheme, @RequestParam( required = false ) String approvalLevel, @RequestParam( required = false ) Date relativePeriodDate, @RequestParam( required = false ) String userOrgUnit, @RequestParam( required = false ) String columns, @RequestParam( required = false ) String rows, DhisApiVersion apiVersion, Model model, HttpServletResponse response ) throws Exception { DataQueryParams params = dataQueryService.getFromUrl( dimension, filter, aggregationType, measureCriteria, preAggregationMeasureCriteria, skipMeta, skipData, skipRounding, completedOnly, hierarchyMeta, ignoreLimit, hideEmptyRows, showHierarchy, includeNumDen, displayProperty, outputIdScheme, inputIdScheme, false, approvalLevel, relativePeriodDate, userOrgUnit, apiVersion ); contextUtils.configureAnalyticsResponse( response, ContextUtils.CONTENT_TYPE_HTML, CacheStrategy.RESPECT_SYSTEM_SETTING, null, false, params.getLatestEndDate() ); Grid grid = analyticsService.getAggregatedDataValues( params, getItemsFromParam( columns ), getItemsFromParam( rows ) ); GridUtils.toHtml( grid, response.getWriter() ); } @RequestMapping( value = RESOURCE_PATH + ".html+css", method = RequestMethod.GET ) public void getHtmlCss( @RequestParam Set<String> dimension, @RequestParam( required = false ) Set<String> filter, @RequestParam( required = false ) AggregationType aggregationType, @RequestParam( required = false ) String measureCriteria, @RequestParam( required = false ) String preAggregationMeasureCriteria, @RequestParam( required = false ) boolean skipMeta, @RequestParam( required = false ) boolean skipData, @RequestParam( required = false ) boolean skipRounding, @RequestParam( required = false ) boolean completedOnly, @RequestParam( required = false ) boolean hierarchyMeta, @RequestParam( required = false ) boolean ignoreLimit, @RequestParam( required = false ) boolean hideEmptyRows, @RequestParam( required = false ) boolean showHierarchy, @RequestParam( required = false ) boolean includeNumDen, @RequestParam( required = false ) DisplayProperty displayProperty, @RequestParam( required = false ) IdScheme outputIdScheme, @RequestParam( required = false ) IdScheme inputIdScheme, @RequestParam( required = false ) String approvalLevel, @RequestParam( required = false ) Date relativePeriodDate, @RequestParam( required = false ) String userOrgUnit, @RequestParam( required = false ) String columns, @RequestParam( required = false ) String rows, DhisApiVersion apiVersion, Model model, HttpServletResponse response ) throws Exception { DataQueryParams params = dataQueryService.getFromUrl( dimension, filter, aggregationType, measureCriteria, preAggregationMeasureCriteria, skipMeta, skipData, skipRounding, completedOnly, hierarchyMeta, ignoreLimit, hideEmptyRows, showHierarchy, includeNumDen, displayProperty, outputIdScheme, inputIdScheme, false, approvalLevel, relativePeriodDate, userOrgUnit, apiVersion ); contextUtils.configureAnalyticsResponse( response, ContextUtils.CONTENT_TYPE_HTML, CacheStrategy.RESPECT_SYSTEM_SETTING, null, false, params.getLatestEndDate() ); Grid grid = analyticsService.getAggregatedDataValues( params, getItemsFromParam( columns ), getItemsFromParam( rows ) ); GridUtils.toHtmlCss( grid, response.getWriter() ); } @RequestMapping( value = RESOURCE_PATH + ".csv", method = RequestMethod.GET ) public void getCsv( @RequestParam Set<String> dimension, @RequestParam( required = false ) Set<String> filter, @RequestParam( required = false ) AggregationType aggregationType, @RequestParam( required = false ) String measureCriteria, @RequestParam( required = false ) String preAggregationMeasureCriteria, @RequestParam( required = false ) boolean skipMeta, @RequestParam( required = false ) boolean skipData, @RequestParam( required = false ) boolean skipRounding, @RequestParam( required = false ) boolean completedOnly, @RequestParam( required = false ) boolean hierarchyMeta, @RequestParam( required = false ) boolean ignoreLimit, @RequestParam( required = false ) boolean hideEmptyRows, @RequestParam( required = false ) boolean showHierarchy, @RequestParam( required = false ) boolean includeNumDen, @RequestParam( required = false ) DisplayProperty displayProperty, @RequestParam( required = false ) IdScheme outputIdScheme, @RequestParam( required = false ) IdScheme inputIdScheme, @RequestParam( required = false ) String approvalLevel, @RequestParam( required = false ) Date relativePeriodDate, @RequestParam( required = false ) String userOrgUnit, @RequestParam( required = false ) String columns, @RequestParam( required = false ) String rows, DhisApiVersion apiVersion, Model model, HttpServletResponse response ) throws Exception { DataQueryParams params = dataQueryService.getFromUrl( dimension, filter, aggregationType, measureCriteria, preAggregationMeasureCriteria, skipMeta, skipData, skipRounding, completedOnly, hierarchyMeta, ignoreLimit, hideEmptyRows, showHierarchy, includeNumDen, displayProperty, outputIdScheme, inputIdScheme, false, approvalLevel, relativePeriodDate, userOrgUnit, apiVersion ); contextUtils.configureAnalyticsResponse( response, ContextUtils.CONTENT_TYPE_CSV, CacheStrategy.RESPECT_SYSTEM_SETTING, "data.csv", true, params.getLatestEndDate() ); Grid grid = analyticsService.getAggregatedDataValues( params, getItemsFromParam( columns ), getItemsFromParam( rows ) ); GridUtils.toCsv( grid, response.getWriter() ); } @RequestMapping( value = RESOURCE_PATH + ".xls", method = RequestMethod.GET ) public void getXls( @RequestParam Set<String> dimension, @RequestParam( required = false ) Set<String> filter, @RequestParam( required = false ) AggregationType aggregationType, @RequestParam( required = false ) String measureCriteria, @RequestParam( required = false ) String preAggregationMeasureCriteria, @RequestParam( required = false ) boolean skipMeta, @RequestParam( required = false ) boolean skipData, @RequestParam( required = false ) boolean skipRounding, @RequestParam( required = false ) boolean completedOnly, @RequestParam( required = false ) boolean hierarchyMeta, @RequestParam( required = false ) boolean ignoreLimit, @RequestParam( required = false ) boolean hideEmptyRows, @RequestParam( required = false ) boolean showHierarchy, @RequestParam( required = false ) boolean includeNumDen, @RequestParam( required = false ) DisplayProperty displayProperty, @RequestParam( required = false ) IdScheme outputIdScheme, @RequestParam( required = false ) IdScheme inputIdScheme, @RequestParam( required = false ) String approvalLevel, @RequestParam( required = false ) Date relativePeriodDate, @RequestParam( required = false ) String userOrgUnit, @RequestParam( required = false ) String columns, @RequestParam( required = false ) String rows, DhisApiVersion apiVersion, Model model, HttpServletResponse response ) throws Exception { DataQueryParams params = dataQueryService.getFromUrl( dimension, filter, aggregationType, measureCriteria, preAggregationMeasureCriteria, skipMeta, skipData, skipRounding, completedOnly, hierarchyMeta, ignoreLimit, hideEmptyRows, showHierarchy, includeNumDen, displayProperty, outputIdScheme, inputIdScheme, false, approvalLevel, relativePeriodDate, userOrgUnit, apiVersion ); contextUtils.configureAnalyticsResponse( response, ContextUtils.CONTENT_TYPE_EXCEL, CacheStrategy.RESPECT_SYSTEM_SETTING, "data.xls", true, params.getLatestEndDate() ); Grid grid = analyticsService.getAggregatedDataValues( params, getItemsFromParam( columns ), getItemsFromParam( rows ) ); GridUtils.toXls( grid, response.getOutputStream() ); } @RequestMapping( value = RESOURCE_PATH + ".jrxml", method = RequestMethod.GET ) public void getJrxml( @RequestParam Set<String> dimension, @RequestParam( required = false ) Set<String> filter, @RequestParam( required = false ) AggregationType aggregationType, @RequestParam( required = false ) String measureCriteria, @RequestParam( required = false ) String preAggregationMeasureCriteria, @RequestParam( required = false ) boolean skipMeta, @RequestParam( required = false ) boolean skipData, @RequestParam( required = false ) boolean skipRounding, @RequestParam( required = false ) boolean completedOnly, @RequestParam( required = false ) boolean hierarchyMeta, @RequestParam( required = false ) boolean ignoreLimit, @RequestParam( required = false ) boolean hideEmptyRows, @RequestParam( required = false ) boolean showHierarchy, @RequestParam( required = false ) boolean includeNumDen, @RequestParam( required = false ) DisplayProperty displayProperty, @RequestParam( required = false ) IdScheme outputIdScheme, @RequestParam( required = false ) IdScheme inputIdScheme, @RequestParam( required = false ) OutputFormat outputFormat, @RequestParam( required = false ) Integer approvalLevel, @RequestParam( required = false ) Date relativePeriodDate, @RequestParam( required = false ) String userOrgUnit, @RequestParam( required = false ) String columns, @RequestParam( required = false ) String rows, DhisApiVersion apiVersion, Model model, HttpServletResponse response ) throws Exception { DataQueryParams params = dataQueryService.getFromUrl( dimension, filter, null, null, null, true, false, false, false, false, false, false, false, false, null, null, null, false, null, null, null, apiVersion ); contextUtils.configureAnalyticsResponse( response, ContextUtils.CONTENT_TYPE_XML, CacheStrategy.RESPECT_SYSTEM_SETTING, "data.jrxml", false, params.getLatestEndDate() ); Grid grid = analyticsService.getAggregatedDataValues( params ); GridUtils.toJrxml( grid, null, response.getWriter() ); } @RequestMapping( value = RESOURCE_PATH + "/debug/sql", method = RequestMethod.GET, produces = { "text/html", "text/plain" } ) public @ResponseBody String getDebugSql( @RequestParam Set<String> dimension, @RequestParam( required = false ) Set<String> filter, @RequestParam( required = false ) AggregationType aggregationType, @RequestParam( required = false ) String measureCriteria, @RequestParam( required = false ) String preAggregationMeasureCriteria, @RequestParam( required = false ) boolean skipMeta, @RequestParam( required = false ) boolean skipData, @RequestParam( required = false ) boolean skipRounding, @RequestParam( required = false ) boolean completedOnly, @RequestParam( required = false ) boolean hierarchyMeta, @RequestParam( required = false ) boolean ignoreLimit, @RequestParam( required = false ) boolean hideEmptyRows, @RequestParam( required = false ) boolean showHierarchy, @RequestParam( required = false ) boolean includeNumDen, @RequestParam( required = false ) DisplayProperty displayProperty, @RequestParam( required = false ) IdScheme outputIdScheme, @RequestParam( required = false ) IdScheme inputIdScheme, @RequestParam( required = false ) String approvalLevel, @RequestParam( required = false ) Date relativePeriodDate, @RequestParam( required = false ) String userOrgUnit, @RequestParam( required = false ) String columns, @RequestParam( required = false ) String rows, DhisApiVersion apiVersion, Model model, HttpServletResponse response ) throws Exception { DataQueryParams params = dataQueryService.getFromUrl( dimension, filter, aggregationType, measureCriteria, preAggregationMeasureCriteria, skipMeta, skipData, skipRounding, completedOnly, hierarchyMeta, ignoreLimit, hideEmptyRows, showHierarchy, includeNumDen, displayProperty, outputIdScheme, inputIdScheme, false, approvalLevel, relativePeriodDate, userOrgUnit, apiVersion ); contextUtils.configureAnalyticsResponse( response, ContextUtils.CONTENT_TYPE_TEXT, CacheStrategy.NO_CACHE, "debug.sql", false, params.getLatestEndDate() ); return AnalyticsUtils.getDebugDataSql( params ); } // ------------------------------------------------------------------------- // Raw data // ------------------------------------------------------------------------- @RequestMapping( value = RESOURCE_PATH + RAW_DATA_PATH + ".json", method = RequestMethod.GET ) public @ResponseBody Grid getRawDataJson( @RequestParam Set<String> dimension, @RequestParam( required = false ) Date startDate, @RequestParam( required = false ) Date endDate, @RequestParam( required = false ) boolean skipMeta, @RequestParam( required = false ) boolean skipData, @RequestParam( required = false ) boolean hierarchyMeta, @RequestParam( required = false ) boolean showHierarchy, @RequestParam( required = false ) DisplayProperty displayProperty, @RequestParam( required = false ) IdScheme outputIdScheme, @RequestParam( required = false ) IdScheme inputIdScheme, @RequestParam( required = false ) String userOrgUnit, DhisApiVersion apiVersion, Model model, HttpServletResponse response ) throws Exception { DataQueryParams params = dataQueryService.getFromUrl( dimension, null, null, null, null, skipMeta, skipData, false, false, hierarchyMeta, false, false, showHierarchy, false, displayProperty, outputIdScheme, inputIdScheme, false, null, null, userOrgUnit, apiVersion ); params = DataQueryParams.newBuilder( params ) .withStartDate( startDate ) .withEndDate( endDate ).build(); contextUtils.configureAnalyticsResponse( response, ContextUtils.CONTENT_TYPE_JSON, CacheStrategy.RESPECT_SYSTEM_SETTING, null, false, params.getLatestEndDate() ); return analyticsService.getRawDataValues( params ); } @RequestMapping( value = RESOURCE_PATH + RAW_DATA_PATH + ".csv", method = RequestMethod.GET ) public void getRawDataCsv( @RequestParam Set<String> dimension, @RequestParam( required = false ) Date startDate, @RequestParam( required = false ) Date endDate, @RequestParam( required = false ) boolean skipMeta, @RequestParam( required = false ) boolean skipData, @RequestParam( required = false ) boolean hierarchyMeta, @RequestParam( required = false ) boolean showHierarchy, @RequestParam( required = false ) DisplayProperty displayProperty, @RequestParam( required = false ) IdScheme outputIdScheme, @RequestParam( required = false ) IdScheme inputIdScheme, @RequestParam( required = false ) String userOrgUnit, DhisApiVersion apiVersion, Model model, HttpServletResponse response ) throws Exception { DataQueryParams params = dataQueryService.getFromUrl( dimension, null, null, null, null, skipMeta, skipData, false, false, hierarchyMeta, false, false, showHierarchy, false, displayProperty, outputIdScheme, inputIdScheme, false, null, null, userOrgUnit, apiVersion ); params = DataQueryParams.newBuilder( params ) .withStartDate( startDate ) .withEndDate( endDate ).build(); contextUtils.configureAnalyticsResponse( response, ContextUtils.CONTENT_TYPE_CSV, CacheStrategy.RESPECT_SYSTEM_SETTING, null, false, params.getLatestEndDate() ); Grid grid = analyticsService.getRawDataValues( params ); GridUtils.toCsv( grid, response.getWriter() ); } // ------------------------------------------------------------------------- // Data value set // ------------------------------------------------------------------------- @RequestMapping( value = RESOURCE_PATH + DATA_VALUE_SET_PATH + ".xml", method = RequestMethod.GET ) public @ResponseBody DataValueSet getDataValueSetXml( @RequestParam Set<String> dimension, @RequestParam( required = false ) Set<String> filter, @RequestParam( required = false ) AggregationType aggregationType, @RequestParam( required = false ) String measureCriteria, @RequestParam( required = false ) String preAggregationMeasureCriteria, @RequestParam( required = false ) boolean skipMeta, @RequestParam( required = false ) boolean skipData, @RequestParam( required = false ) boolean skipRounding, @RequestParam( required = false ) boolean completedOnly, @RequestParam( required = false ) boolean hierarchyMeta, @RequestParam( required = false ) boolean ignoreLimit, @RequestParam( required = false ) boolean hideEmptyRows, @RequestParam( required = false ) boolean showHierarchy, @RequestParam( required = false ) boolean includeNumDen, @RequestParam( required = false ) DisplayProperty displayProperty, @RequestParam( required = false ) IdScheme outputIdScheme, @RequestParam( required = false ) IdScheme inputIdScheme, @RequestParam( required = false ) boolean duplicatesOnly, @RequestParam( required = false ) String approvalLevel, @RequestParam( required = false ) Date relativePeriodDate, @RequestParam( required = false ) String userOrgUnit, @RequestParam( required = false ) String columns, @RequestParam( required = false ) String rows, DhisApiVersion apiVersion, Model model, HttpServletResponse response ) throws Exception { DataQueryParams params = dataQueryService.getFromUrl( dimension, filter, aggregationType, measureCriteria, preAggregationMeasureCriteria, skipMeta, skipData, skipRounding, completedOnly, hierarchyMeta, ignoreLimit, hideEmptyRows, showHierarchy, includeNumDen, displayProperty, outputIdScheme, inputIdScheme, duplicatesOnly, approvalLevel, relativePeriodDate, userOrgUnit, apiVersion ); contextUtils.configureAnalyticsResponse( response, ContextUtils.CONTENT_TYPE_XML, CacheStrategy.RESPECT_SYSTEM_SETTING, null, false, params.getLatestEndDate() ); return analyticsService.getAggregatedDataValueSet( params ); } @RequestMapping( value = RESOURCE_PATH + DATA_VALUE_SET_PATH + ".json", method = RequestMethod.GET ) public @ResponseBody DataValueSet getDataValueSetJson( @RequestParam Set<String> dimension, @RequestParam( required = false ) Set<String> filter, @RequestParam( required = false ) AggregationType aggregationType, @RequestParam( required = false ) String measureCriteria, @RequestParam( required = false ) String preAggregationMeasureCriteria, @RequestParam( required = false ) boolean skipMeta, @RequestParam( required = false ) boolean skipData, @RequestParam( required = false ) boolean skipRounding, @RequestParam( required = false ) boolean completedOnly, @RequestParam( required = false ) boolean hierarchyMeta, @RequestParam( required = false ) boolean ignoreLimit, @RequestParam( required = false ) boolean hideEmptyRows, @RequestParam( required = false ) boolean showHierarchy, @RequestParam( required = false ) boolean includeNumDen, @RequestParam( required = false ) DisplayProperty displayProperty, @RequestParam( required = false ) IdScheme outputIdScheme, @RequestParam( required = false ) IdScheme inputIdScheme, @RequestParam( required = false ) boolean duplicatesOnly, @RequestParam( required = false ) String approvalLevel, @RequestParam( required = false ) Date relativePeriodDate, @RequestParam( required = false ) String userOrgUnit, @RequestParam( required = false ) String columns, @RequestParam( required = false ) String rows, DhisApiVersion apiVersion, Model model, HttpServletResponse response ) throws Exception { DataQueryParams params = dataQueryService.getFromUrl( dimension, filter, aggregationType, measureCriteria, preAggregationMeasureCriteria, skipMeta, skipData, skipRounding, completedOnly, hierarchyMeta, ignoreLimit, hideEmptyRows, showHierarchy, includeNumDen, displayProperty, outputIdScheme, inputIdScheme, duplicatesOnly, approvalLevel, relativePeriodDate, userOrgUnit, apiVersion ); contextUtils.configureAnalyticsResponse( response, ContextUtils.CONTENT_TYPE_JSON, CacheStrategy.RESPECT_SYSTEM_SETTING, null, false, params.getLatestEndDate() ); return analyticsService.getAggregatedDataValueSet( params ); } }
/* * Copyright 2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.integtests.fixtures; import com.google.common.base.Throwables; import org.junit.runner.Description; import org.junit.runner.RunWith; import org.junit.runner.Runner; import org.junit.runner.manipulation.Filter; import org.junit.runner.manipulation.Filterable; import org.junit.runner.manipulation.NoTestsRemainException; import org.junit.runner.notification.Failure; import org.junit.runner.notification.RunListener; import org.junit.runner.notification.RunNotifier; import org.junit.runners.BlockJUnit4ClassRunner; import org.junit.runners.Suite; import org.junit.runners.model.InitializationError; import org.junit.runners.model.RunnerBuilder; import javax.annotation.Nullable; import java.lang.annotation.Annotation; import java.lang.reflect.InvocationTargetException; import java.util.*; /** * A base class for those test runners which execute a test multiple times. */ public abstract class AbstractMultiTestRunner extends Runner implements Filterable { protected final Class<?> target; private final List<Execution> executions = new ArrayList<Execution>(); private Description description; private Description templateDescription; protected AbstractMultiTestRunner(Class<?> target) { this.target = target; } @Override public Description getDescription() { initDescription(); return description; } @Override public void run(RunNotifier notifier) { initDescription(); for (Execution execution : executions) { execution.run(notifier); } } public void filter(Filter filter) throws NoTestsRemainException { initExecutions(); for (Execution execution : executions) { execution.filter(filter); } invalidateDescription(); } private void initExecutions() { if (executions.isEmpty()) { try { Runner descriptionProvider = createRunnerFor(Arrays.asList(target), Collections.<Filter>emptyList()); templateDescription = descriptionProvider.getDescription(); } catch (InitializationError initializationError) { throw Throwables.propagate(initializationError); } createExecutions(); for (Execution execution : executions) { execution.init(target, templateDescription); } } } private void initDescription() { initExecutions(); if (description == null) { description = Description.createSuiteDescription(target); for (Execution execution : executions) { execution.addDescriptions(description); } } } private void invalidateDescription() { description = null; templateDescription = null; } protected abstract void createExecutions(); protected void add(Execution execution) { executions.add(execution); } private static Runner createRunnerFor(List<? extends Class<?>> targetClasses, final List<Filter> filters) throws InitializationError { RunnerBuilder runnerBuilder = new RunnerBuilder() { @Override public Runner runnerForClass(Class<?> testClass) throws Throwable { for (Class<?> candidate = testClass; candidate != null; candidate = candidate.getSuperclass()) { RunWith runWith = candidate.getAnnotation(RunWith.class); if (runWith != null && !AbstractMultiTestRunner.class.isAssignableFrom(runWith.value())) { try { Runner r = (Runner) runWith.value().getConstructors()[0].newInstance(testClass); return filter(r); } catch (InvocationTargetException e) { throw e.getTargetException(); } } } return filter(new BlockJUnit4ClassRunner(testClass)); } //we need to filter at the level child runners because the suite is not doing the right thing here private Runner filter(Runner r) { for (Filter filter : filters) { try { ((Filterable)r).filter(filter); } catch (NoTestsRemainException e) { //ignore } } return r; } }; return new Suite(runnerBuilder, targetClasses.toArray(new Class<?>[targetClasses.size()])); } protected static abstract class Execution implements Filterable { protected Class<?> target; private Description templateDescription; private final Map<Description, Description> descriptionTranslations = new HashMap<Description, Description>(); private final Set<Description> enabledTests = new LinkedHashSet<Description>(); private final Set<Description> disabledTests = new LinkedHashSet<Description>(); private final List<Filter> filters = new LinkedList<Filter>(); final void init(Class<?> target, Description templateDescription) { this.target = target; this.templateDescription = templateDescription; } private Runner createExecutionRunner() throws InitializationError { List<? extends Class<?>> targetClasses = loadTargetClasses(); return createRunnerFor(targetClasses, filters); } final void addDescriptions(Description parent) { map(templateDescription, parent); } final void run(final RunNotifier notifier) { RunNotifier nested = new RunNotifier(); NestedRunListener nestedListener = new NestedRunListener(notifier); nested.addListener(nestedListener); try { runEnabledTests(nested); } finally { nestedListener.cleanup(); } for (Description disabledTest : disabledTests) { nested.fireTestStarted(disabledTest); nested.fireTestIgnored(disabledTest); } } private void runEnabledTests(RunNotifier nested) { if (enabledTests.isEmpty()) { return; } Runner runner; try { runner = createExecutionRunner(); } catch (Throwable t) { runner = new CannotExecuteRunner(getDisplayName(), target, t); } try { if (!disabledTests.isEmpty()) { ((Filterable) runner).filter(new Filter() { @Override public boolean shouldRun(Description description) { return !disabledTests.contains(description); } @Override public String describe() { return "disabled tests"; } }); } } catch (NoTestsRemainException e) { return; } runner.run(nested); } private Description translateDescription(Description description) { return descriptionTranslations.containsKey(description) ? descriptionTranslations.get(description) : description; } public void filter(Filter filter) throws NoTestsRemainException { filters.add(filter); for (Map.Entry<Description, Description> entry : descriptionTranslations.entrySet()) { if (!filter.shouldRun(entry.getKey())) { enabledTests.remove(entry.getValue()); disabledTests.remove(entry.getValue()); } } } protected void before() { } protected void after() { } private void map(Description source, Description parent) { for (Description child : source.getChildren()) { Description mappedChild; if (child.getMethodName() != null) { mappedChild = Description.createSuiteDescription(String.format("%s [%s](%s)", child.getMethodName(), getDisplayName(), child.getClassName())); parent.addChild(mappedChild); if (!isTestEnabled(new TestDescriptionBackedTestDetails(source, child))) { disabledTests.add(child); } else { enabledTests.add(child); } } else { mappedChild = Description.createSuiteDescription(child.getClassName()); } descriptionTranslations.put(child, mappedChild); map(child, parent); } } /** * Returns a display name for this execution. Used in the JUnit descriptions for test execution. */ protected abstract String getDisplayName(); /** * Returns true if the given test should be executed, false if it should be ignored. Default is true. */ protected boolean isTestEnabled(TestDetails testDetails) { return true; } /** * Checks that this execution can be executed, throwing an exception if not. */ protected void assertCanExecute() { } /** * Loads the target classes for this execution. Default is the target class that this runner was constructed with. */ protected List<? extends Class<?>> loadTargetClasses() { return Collections.singletonList(target); } private static class CannotExecuteRunner extends Runner { private final Description description; private final Throwable failure; public CannotExecuteRunner(String displayName, Class<?> testClass, Throwable failure) { description = Description.createSuiteDescription(String.format("%s(%s)", displayName, testClass.getName())); this.failure = failure; } @Override public Description getDescription() { return description; } @Override public void run(RunNotifier notifier) { Description description = getDescription(); notifier.fireTestStarted(description); notifier.fireTestFailure(new Failure(description, failure)); notifier.fireTestFinished(description); } } private class NestedRunListener extends RunListener { private final RunNotifier notifier; boolean started; boolean complete; public NestedRunListener(RunNotifier notifier) { this.notifier = notifier; } @Override public void testStarted(Description description) { Description translated = translateDescription(description); notifier.fireTestStarted(translated); if (!started && !complete) { try { assertCanExecute(); started = true; before(); } catch (Throwable t) { notifier.fireTestFailure(new Failure(translated, t)); } } } @Override public void testFailure(Failure failure) { Description translated = translateDescription(failure.getDescription()); notifier.fireTestFailure(new Failure(translated, failure.getException())); } @Override public void testAssumptionFailure(Failure failure) { Description translated = translateDescription(failure.getDescription()); notifier.fireTestAssumptionFailed(new Failure(translated, failure.getException())); } @Override public void testIgnored(Description description) { Description translated = translateDescription(description); notifier.fireTestIgnored(translated); } @Override public void testFinished(Description description) { Description translated = translateDescription(description); notifier.fireTestFinished(translated); } public void cleanup() { if (started) { after(); } // Prevent further tests (ignored) from triggering start actions complete = true; } } } public interface TestDetails { /** * Locates the given annotation for the test. May be inherited from test class. */ @Nullable <A extends Annotation> A getAnnotation(Class<A> type); } private static class TestDescriptionBackedTestDetails implements TestDetails { private final Description parent; private final Description test; private TestDescriptionBackedTestDetails(Description parent, Description test) { this.parent = parent; this.test = test; } @Override public String toString() { return test.toString(); } public <A extends Annotation> A getAnnotation(Class<A> type) { A annotation = test.getAnnotation(type); if (annotation != null) { return annotation; } return parent.getAnnotation(type); } } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.refactoring.introduceVariable; import com.intellij.codeInsight.CodeInsightUtil; import com.intellij.codeInsight.completion.JavaCompletionUtil; import com.intellij.codeInsight.daemon.impl.quickfix.AddNewArrayExpressionFix; import com.intellij.codeInsight.highlighting.HighlightManager; import com.intellij.codeInsight.lookup.LookupManager; import com.intellij.codeInsight.unwrap.ScopeHighlighter; import com.intellij.codeInspection.RemoveRedundantTypeArgumentsUtil; import com.intellij.featureStatistics.FeatureUsageTracker; import com.intellij.featureStatistics.ProductivityFeatureNames; import com.intellij.ide.util.PropertiesComponent; import com.intellij.java.JavaBundle; import com.intellij.java.refactoring.JavaRefactoringBundle; import com.intellij.lang.LanguageRefactoringSupport; import com.intellij.lang.injection.InjectedLanguageManager; import com.intellij.lang.refactoring.RefactoringSupportProvider; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.diagnostic.Attachment; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.editor.SelectionModel; import com.intellij.openapi.editor.colors.EditorColors; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.*; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.wm.WindowManager; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.codeStyle.JavaCodeStyleSettings; import com.intellij.psi.codeStyle.SuggestedNameInfo; import com.intellij.psi.codeStyle.VariableKind; import com.intellij.psi.impl.PsiDiamondTypeUtil; import com.intellij.psi.impl.source.jsp.jspJava.JspCodeBlock; import com.intellij.psi.impl.source.jsp.jspJava.JspHolderMethod; import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil; import com.intellij.psi.impl.source.tree.java.ReplaceExpressionUtil; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.*; import com.intellij.refactoring.*; import com.intellij.refactoring.chainCall.ChainCallExtractor; import com.intellij.refactoring.introduce.inplace.AbstractInplaceIntroducer; import com.intellij.refactoring.introduce.inplace.OccurrencesChooser; import com.intellij.refactoring.introduce.inplace.OccurrencesChooser.ReplaceChoice; import com.intellij.refactoring.introduceField.ElementToWorkOn; import com.intellij.refactoring.listeners.RefactoringEventData; import com.intellij.refactoring.listeners.RefactoringEventListener; import com.intellij.refactoring.ui.TypeSelectorManagerImpl; import com.intellij.refactoring.util.CommonRefactoringUtil; import com.intellij.refactoring.util.RefactoringUIUtil; import com.intellij.refactoring.util.RefactoringUtil; import com.intellij.refactoring.util.occurrences.ExpressionOccurrenceManager; import com.intellij.refactoring.util.occurrences.NotInSuperCallOccurrenceFilter; import com.intellij.util.IncorrectOperationException; import com.intellij.util.Processor; import com.intellij.util.SlowOperations; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.MultiMap; import com.siyeh.ig.psiutils.CommentTracker; import com.siyeh.ig.psiutils.VariableAccessUtils; import com.siyeh.ipp.psiutils.ErrorUtil; import one.util.streamex.EntryStream; import one.util.streamex.StreamEx; import org.jetbrains.annotations.*; import java.util.*; import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; /** * @author dsl */ public abstract class IntroduceVariableBase extends IntroduceHandlerBase { public static class JavaReplaceChoice implements OccurrencesChooser.BaseReplaceChoice { public static final JavaReplaceChoice NO = new JavaReplaceChoice(ReplaceChoice.NO, null, false); public static final JavaReplaceChoice NO_WRITE = new JavaReplaceChoice(ReplaceChoice.NO_WRITE, null, false); public static final JavaReplaceChoice ALL = new JavaReplaceChoice(ReplaceChoice.ALL, null, false); private final @Nls String myDescription; private final boolean myChain; private final ReplaceChoice myChoice; JavaReplaceChoice(@NotNull ReplaceChoice choice, @Nullable @Nls String description, boolean chain) { myChoice = choice; myDescription = description; myChain = chain; } @Override public boolean isAll() { return myChoice.isAll(); } public boolean isChain() { return myChain; } public PsiExpression[] filter(ExpressionOccurrenceManager manager) { switch (myChoice) { case NO: return new PsiExpression[]{manager.getMainOccurence()}; case NO_WRITE: return StreamEx.of(manager.getOccurrences()).filter(expr -> !PsiUtil.isAccessedForWriting(expr)).toArray(PsiExpression.EMPTY_ARRAY); case ALL: return manager.getOccurrences(); default: throw new IllegalStateException("Unexpected value: " + myChoice); } } @Override public String formatDescription(int occurrencesCount) { return myDescription == null ? myChoice.formatDescription(occurrencesCount) : myDescription; } @Override public String toString() { // For debug/test purposes return formatDescription(0); } @NotNull private static IntroduceVariableBase.JavaReplaceChoice allOccurrencesInside(PsiElement parent, int sameKeywordCount, String finalKeyword) { return new JavaReplaceChoice(ReplaceChoice.ALL, null, false) { @Override public PsiExpression[] filter(ExpressionOccurrenceManager manager) { return StreamEx.of(manager.getOccurrences()) .filter(expr -> PsiTreeUtil.isAncestor(parent, getPhysicalElement(expr), true)) .toArray(PsiExpression.EMPTY_ARRAY); } @Override public String formatDescription(int occurrencesCount) { return JavaRefactoringBundle.message("replace.occurrences.inside.statement", occurrencesCount, finalKeyword, sameKeywordCount); } }; } } private static final Logger LOG = Logger.getInstance(IntroduceVariableBase.class); @NonNls private static final String PREFER_STATEMENTS_OPTION = "introduce.variable.prefer.statements"; @NonNls private static final String REFACTORING_ID = "refactoring.extractVariable"; public static final Key<Boolean> NEED_PARENTHESIS = Key.create("NEED_PARENTHESIS"); private JavaVariableInplaceIntroducer myInplaceIntroducer; public static SuggestedNameInfo getSuggestedName(@Nullable PsiType type, @NotNull final PsiExpression expression) { return getSuggestedName(type, expression, expression); } public static SuggestedNameInfo getSuggestedName(@Nullable PsiType type, @NotNull final PsiExpression expression, final PsiElement anchor) { final JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(expression.getProject()); final SuggestedNameInfo nameInfo = codeStyleManager.suggestVariableName(VariableKind.LOCAL_VARIABLE, null, expression, type); final String[] strings = JavaCompletionUtil .completeVariableNameForRefactoring(codeStyleManager, type, VariableKind.LOCAL_VARIABLE, nameInfo); final SuggestedNameInfo.Delegate delegate = new SuggestedNameInfo.Delegate(strings, nameInfo); return codeStyleManager.suggestUniqueVariableName(delegate, anchor, true); } @Override public void invoke(@NotNull final Project project, final Editor editor, final PsiFile file, DataContext dataContext) { final SelectionModel selectionModel = editor.getSelectionModel(); if (!selectionModel.hasSelection()) { final int offset = editor.getCaretModel().getOffset(); Pair<TextRange, List<PsiExpression>> rangeAndExpressions = getExpressionsAndSelectionRange(project, editor, file, offset); TextRange suggestedSelection = rangeAndExpressions.getFirst(); if (suggestedSelection != null) { selectionModel.setSelection(suggestedSelection.getStartOffset(), suggestedSelection.getEndOffset()); } else { final PsiElement[] statementsInRange = findStatementsAtOffset(editor, file, offset); List<PsiExpression> expressions = rangeAndExpressions.getSecond(); IntroduceTargetChooser.showChooser(editor, expressions, new Pass<>() { @Override public void pass(final PsiExpression selectedValue) { invoke(project, editor, file, selectedValue.getTextRange().getStartOffset(), selectedValue.getTextRange().getEndOffset()); } }, new PsiExpressionTrimRenderer.RenderFunction(), RefactoringBundle.message("introduce.target.chooser.expressions.title"), preferredSelection(statementsInRange, expressions), ScopeHighlighter.NATURAL_RANGER); return; } } if (invoke(project, editor, file, selectionModel.getSelectionStart(), selectionModel.getSelectionEnd()) && LookupManager.getActiveLookup(editor) == null) { selectionModel.removeSelection(); } } @NotNull public static Pair<TextRange, List<PsiExpression>> getExpressionsAndSelectionRange(@NotNull final Project project, final Editor editor, final PsiFile file, int offset) { final PsiElement[] statementsInRange = findStatementsAtOffset(editor, file, offset); Document document = editor.getDocument(); int line = document.getLineNumber(offset); TextRange lineRange = TextRange.create(document.getLineStartOffset(line), Math.min(document.getLineEndOffset(line) + 1, document.getTextLength())); //try line selection if (statementsInRange.length == 1 && selectLineAtCaret(offset, statementsInRange)) { final PsiExpression expressionInRange = findExpressionInRange(project, file, lineRange.getStartOffset(), lineRange.getEndOffset()); if (expressionInRange != null && getErrorMessage(expressionInRange) == null) { return Pair.create(lineRange, Collections.singletonList(expressionInRange)); } } final List<PsiExpression> expressions = ContainerUtil .filter(collectExpressions(file, editor, offset), expression -> RefactoringUtil.getParentStatement(expression, false) != null || PsiTreeUtil.getParentOfType(expression, PsiField.class, true, PsiStatement.class) != null); if (expressions.isEmpty()) { return Pair.create(lineRange, Collections.emptyList()); } else if (!isChooserNeeded(expressions)) { return Pair.create(expressions.get(0).getTextRange(), expressions); } else { return Pair.create(null, expressions); } } public static boolean isChooserNeeded(List<? extends PsiExpression> expressions) { if (expressions.size() == 1) { final PsiExpression expression = expressions.get(0); return expression instanceof PsiNewExpression && ((PsiNewExpression)expression).getAnonymousClass() != null; } return true; } public static boolean selectLineAtCaret(int offset, PsiElement[] statementsInRange) { TextRange range = statementsInRange[0].getTextRange(); if (statementsInRange[0] instanceof PsiExpressionStatement) { range = ((PsiExpressionStatement)statementsInRange[0]).getExpression().getTextRange(); } return range.getStartOffset() > offset || range.getEndOffset() <= offset || isPreferStatements(); } public static int preferredSelection(PsiElement[] statementsInRange, List<? extends PsiExpression> expressions) { int selection; if (statementsInRange.length == 1 && statementsInRange[0] instanceof PsiExpressionStatement && PsiUtilCore.hasErrorElementChild(statementsInRange[0])) { selection = expressions.indexOf(((PsiExpressionStatement)statementsInRange[0]).getExpression()); } else { PsiExpression expression = expressions.get(0); if (expression instanceof PsiReferenceExpression && ((PsiReferenceExpression)expression).resolve() instanceof PsiLocalVariable) { selection = 1; } else { selection = -1; } } return selection; } public static boolean isPreferStatements() { return Boolean.valueOf(PropertiesComponent.getInstance().getBoolean(PREFER_STATEMENTS_OPTION)) || Registry.is(PREFER_STATEMENTS_OPTION, false); } public static List<PsiExpression> collectExpressions(final PsiFile file, final Editor editor, final int offset) { return collectExpressions(file, editor, offset, false); } public static List<PsiExpression> collectExpressions(final PsiFile file, final Editor editor, final int offset, boolean acceptVoid) { return collectExpressions(file, editor.getDocument(), offset, acceptVoid); } public static List<PsiExpression> collectExpressions(final PsiFile file, final Document document, final int offset, boolean acceptVoid) { CharSequence text = document.getCharsSequence(); int correctedOffset = offset; int textLength = document.getTextLength(); if (offset >= textLength) { correctedOffset = textLength - 1; } else if (!Character.isJavaIdentifierPart(text.charAt(offset))) { correctedOffset--; } if (correctedOffset < 0) { correctedOffset = offset; } else if (!Character.isJavaIdentifierPart(text.charAt(correctedOffset))) { if (text.charAt(correctedOffset) == ';') {//initially caret on the end of line correctedOffset--; } if (correctedOffset < 0 || text.charAt(correctedOffset) != ')' && text.charAt(correctedOffset) != '.' && text.charAt(correctedOffset) != '}') { correctedOffset = offset; } } final PsiElement elementAtCaret = file.findElementAt(correctedOffset); final List<PsiExpression> expressions = new ArrayList<>(); PsiExpression expression = PsiTreeUtil.getParentOfType(elementAtCaret, PsiExpression.class); while (expression != null) { if (!expressions.contains(expression) && !(expression instanceof PsiParenthesizedExpression) && !(expression instanceof PsiSuperExpression) && (acceptVoid || !PsiType.VOID.equals(expression.getType()))) { if (isExtractable(expression)) { expressions.add(expression); } } expression = PsiTreeUtil.getParentOfType(expression, PsiExpression.class); } return expressions; } public static boolean isExtractable(PsiExpression expression) { if (expression instanceof PsiMethodReferenceExpression) { return true; } else if (!(expression instanceof PsiAssignmentExpression)) { if (!(expression instanceof PsiReferenceExpression)) { return true; } else { if (!(expression.getParent() instanceof PsiMethodCallExpression)) { final PsiElement resolve = ((PsiReferenceExpression)expression).resolve(); if (!(resolve instanceof PsiClass) && !(resolve instanceof PsiPackage)) { return true; } } } } return false; } public static PsiElement[] findStatementsAtOffset(final Editor editor, final PsiFile file, final int offset) { final Document document = editor.getDocument(); final int lineNumber = document.getLineNumber(offset); final int lineStart = document.getLineStartOffset(lineNumber); final int lineEnd = document.getLineEndOffset(lineNumber); return CodeInsightUtil.findStatementsInRange(file, lineStart, lineEnd); } private boolean invoke(final Project project, final Editor editor, PsiFile file, int startOffset, int endOffset) { FeatureUsageTracker.getInstance().triggerFeatureUsed(ProductivityFeatureNames.REFACTORING_INTRODUCE_VARIABLE); PsiDocumentManager.getInstance(project).commitAllDocuments(); return invokeImpl(project, findExpressionInRange(project, file, startOffset, endOffset), editor); } private static PsiExpression findExpressionInRange(Project project, PsiFile file, int startOffset, int endOffset) { PsiExpression tempExpr = CodeInsightUtil.findExpressionInRange(file, startOffset, endOffset); if (tempExpr == null) { PsiElement[] statements = CodeInsightUtil.findStatementsInRange(file, startOffset, endOffset); if (statements.length == 1) { if (statements[0] instanceof PsiExpressionStatement) { tempExpr = ((PsiExpressionStatement) statements[0]).getExpression(); } else if (statements[0] instanceof PsiReturnStatement) { tempExpr = ((PsiReturnStatement)statements[0]).getReturnValue(); } else if (statements[0] instanceof PsiSwitchStatement) { PsiExpression expr = JavaPsiFacade.getElementFactory(project).createExpressionFromText(statements[0].getText(), statements[0]); TextRange range = statements[0].getTextRange(); final RangeMarker rangeMarker = FileDocumentManager.getInstance().getDocument(file.getVirtualFile()).createRangeMarker(range); expr.putUserData(ElementToWorkOn.TEXT_RANGE, rangeMarker); expr.putUserData(ElementToWorkOn.PARENT, statements[0]); return expr; } } } if (tempExpr == null) { tempExpr = getSelectedExpression(project, file, startOffset, endOffset); } return isExtractable(tempExpr) ? tempExpr : null; } /** * @return can return NotNull value although extraction will fail: reason could be retrieved from {@link #getErrorMessage(PsiExpression)} */ public static PsiExpression getSelectedExpression(final Project project, PsiFile file, int startOffset, int endOffset) { final InjectedLanguageManager injectedLanguageManager = InjectedLanguageManager.getInstance(project); PsiElement elementAtStart = file.findElementAt(startOffset); if (elementAtStart == null || elementAtStart instanceof PsiWhiteSpace || elementAtStart instanceof PsiComment) { final PsiElement element = PsiTreeUtil.skipWhitespacesAndCommentsForward(elementAtStart); if (element != null) { startOffset = element.getTextOffset(); elementAtStart = file.findElementAt(startOffset); } if (elementAtStart == null) { if (injectedLanguageManager.isInjectedFragment(file)) { return getSelectionFromInjectedHost(project, file, injectedLanguageManager, startOffset, endOffset); } else { return null; } } startOffset = elementAtStart.getTextOffset(); } PsiElement elementAtEnd = file.findElementAt(endOffset - 1); if (elementAtEnd == null || elementAtEnd instanceof PsiWhiteSpace || elementAtEnd instanceof PsiComment) { elementAtEnd = PsiTreeUtil.skipWhitespacesAndCommentsBackward(elementAtEnd); if (elementAtEnd == null) return null; endOffset = elementAtEnd.getTextRange().getEndOffset(); } if (endOffset <= startOffset) return null; PsiElement elementAt = PsiTreeUtil.findCommonParent(elementAtStart, elementAtEnd); if (elementAt instanceof PsiExpressionStatement) { return ((PsiExpressionStatement)elementAt).getExpression(); } final PsiExpression containingExpression = PsiTreeUtil.getParentOfType(elementAt, PsiExpression.class, false); if (containingExpression != null && containingExpression == elementAtEnd && startOffset == containingExpression.getTextOffset()) { return containingExpression; } if (containingExpression == null || containingExpression instanceof PsiLambdaExpression) { if (injectedLanguageManager.isInjectedFragment(file)) { return getSelectionFromInjectedHost(project, file, injectedLanguageManager, startOffset, endOffset); } elementAt = null; } final PsiLiteralExpression literalExpression = PsiTreeUtil.getParentOfType(elementAt, PsiLiteralExpression.class); final PsiLiteralExpression startLiteralExpression = PsiTreeUtil.getParentOfType(elementAtStart, PsiLiteralExpression.class); final PsiLiteralExpression endLiteralExpression = PsiTreeUtil.getParentOfType(file.findElementAt(endOffset), PsiLiteralExpression.class); final PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(project); String text = null; PsiExpression tempExpr; try { text = file.getText().subSequence(startOffset, endOffset).toString(); String prefix = null; if (startLiteralExpression != null) { final int startExpressionOffset = startLiteralExpression.getTextOffset(); if (startOffset == startExpressionOffset + 1) { text = "\"" + text; } else if (startOffset > startExpressionOffset + 1){ prefix = "\" + "; text = "\"" + text; } } String suffix = null; if (endLiteralExpression != null) { final int endExpressionOffset = endLiteralExpression.getTextOffset() + endLiteralExpression.getTextLength(); if (endOffset == endExpressionOffset - 1) { text += "\""; } else if (endOffset < endExpressionOffset - 1) { suffix = " + \""; text += "\""; } } if (literalExpression != null && text.equals(literalExpression.getText())) return literalExpression; final PsiElement parent = literalExpression != null ? literalExpression : elementAt; tempExpr = elementFactory.createExpressionFromText(text, parent); if (ErrorUtil.containsDeepError(tempExpr)) return null; tempExpr.putUserData(ElementToWorkOn.PREFIX, prefix); tempExpr.putUserData(ElementToWorkOn.SUFFIX, suffix); final RangeMarker rangeMarker = FileDocumentManager.getInstance().getDocument(file.getVirtualFile()).createRangeMarker(startOffset, endOffset); tempExpr.putUserData(ElementToWorkOn.TEXT_RANGE, rangeMarker); if (parent != null) { tempExpr.putUserData(ElementToWorkOn.PARENT, parent); } else { PsiErrorElement errorElement = elementAtStart instanceof PsiErrorElement ? (PsiErrorElement)elementAtStart : PsiTreeUtil.getNextSiblingOfType(elementAtStart, PsiErrorElement.class); if (errorElement == null) { errorElement = PsiTreeUtil.getParentOfType(elementAtStart, PsiErrorElement.class); } if (errorElement == null) return null; if (!(errorElement.getParent() instanceof PsiClass)) return null; tempExpr.putUserData(ElementToWorkOn.PARENT, errorElement); tempExpr.putUserData(ElementToWorkOn.OUT_OF_CODE_BLOCK, Boolean.TRUE); } final String fakeInitializer = "intellijidearulezzz"; final int[] refIdx = new int[1]; final PsiElement toBeExpression = createReplacement(fakeInitializer, project, prefix, suffix, parent, rangeMarker, refIdx); if (ErrorUtil.containsDeepError(toBeExpression)) return null; if (literalExpression != null && toBeExpression instanceof PsiExpression) { PsiType type = ((PsiExpression)toBeExpression).getType(); if (type != null && !type.equals(literalExpression.getType())) { return null; } } else if (containingExpression != null) { PsiType containingExpressionType = containingExpression.getType(); PsiType tempExprType = tempExpr.getType(); if (containingExpressionType != null && (tempExprType == null || !TypeConversionUtil.isAssignable(containingExpressionType, tempExprType))) { return null; } } final PsiReferenceExpression refExpr = PsiTreeUtil.getParentOfType(toBeExpression.findElementAt(refIdx[0]), PsiReferenceExpression.class); if (refExpr == null) return null; if (toBeExpression == refExpr && refIdx[0] > 0) { return null; } if (ReplaceExpressionUtil.isNeedParenthesis(refExpr.getNode(), tempExpr.getNode())) { tempExpr.putCopyableUserData(NEED_PARENTHESIS, Boolean.TRUE); return tempExpr; } } catch (IncorrectOperationException e) { if (elementAt instanceof PsiExpressionList) { final PsiElement parent = elementAt.getParent(); return parent instanceof PsiCallExpression ? createArrayCreationExpression(text, startOffset, endOffset, (PsiCallExpression)parent) : null; } return null; } return tempExpr; } private static PsiExpression getSelectionFromInjectedHost(Project project, PsiFile file, InjectedLanguageManager injectedLanguageManager, int startOffset, int endOffset) { final PsiLanguageInjectionHost injectionHost = injectedLanguageManager.getInjectionHost(file); return getSelectedExpression(project, injectionHost.getContainingFile(), injectedLanguageManager.injectedToHost(file, startOffset), injectedLanguageManager.injectedToHost(file, endOffset)); } @NlsContexts.DialogMessage @Nullable public static String getErrorMessage(PsiExpression expr) { final Boolean needParenthesis = expr.getCopyableUserData(NEED_PARENTHESIS); if (needParenthesis != null && needParenthesis.booleanValue()) { return JavaBundle.message("introduce.variable.change.semantics.warning"); } if (expr instanceof PsiClassObjectAccessExpression && PsiUtil.hasErrorElementChild(expr)) { return JavaRefactoringBundle.message("selected.block.should.represent.an.expression"); } return null; } private static PsiExpression createArrayCreationExpression(String text, int startOffset, int endOffset, PsiCallExpression parent) { if (text == null || parent == null) return null; if (text.contains(",")) { PsiExpressionList argumentList = parent.getArgumentList(); assert argumentList != null; // checked at call site final PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(parent.getProject()); final JavaResolveResult resolveResult = parent.resolveMethodGenerics(); final PsiMethod psiMethod = (PsiMethod)resolveResult.getElement(); if (psiMethod == null || !psiMethod.isVarArgs()) return null; final PsiParameter[] parameters = psiMethod.getParameterList().getParameters(); final PsiParameter varargParameter = parameters[parameters.length - 1]; final PsiType type = varargParameter.getType(); LOG.assertTrue(type instanceof PsiEllipsisType); final PsiArrayType psiType = (PsiArrayType)((PsiEllipsisType)type).toArrayType(); final PsiExpression[] args = argumentList.getExpressions(); final PsiSubstitutor psiSubstitutor = resolveResult.getSubstitutor(); if (args.length < parameters.length || startOffset < args[parameters.length - 1].getTextRange().getStartOffset()) return null; final PsiFile containingFile = parent.getContainingFile(); PsiElement startElement = containingFile.findElementAt(startOffset); while (startElement != null && startElement.getParent() != argumentList) { startElement = startElement.getParent(); } if (!(startElement instanceof PsiExpression) || startOffset > startElement.getTextOffset()) return null; PsiElement endElement = containingFile.findElementAt(endOffset - 1); while (endElement != null && endElement.getParent() != argumentList) { endElement = endElement.getParent(); } if (!(endElement instanceof PsiExpression) || endOffset < endElement.getTextRange().getEndOffset()) return null; final PsiType componentType = TypeConversionUtil.erasure(psiSubstitutor.substitute(psiType.getComponentType())); try { final PsiExpression expressionFromText = elementFactory.createExpressionFromText("new " + componentType.getCanonicalText() + "[]{" + text + "}", parent); final RangeMarker rangeMarker = FileDocumentManager.getInstance().getDocument(containingFile.getVirtualFile()).createRangeMarker(startOffset, endOffset); expressionFromText.putUserData(ElementToWorkOn.TEXT_RANGE, rangeMarker); expressionFromText.putUserData(ElementToWorkOn.PARENT, parent); return expressionFromText; } catch (IncorrectOperationException e) { return null; } } return null; } @NotNull public Pair<List<PsiElement>, List<PsiExpression>> getPossibleAnchorsAndOccurrences(final Project project, final PsiExpression expr) { OccurrencesInfo occurrencesInfo = buildOccurrencesInfo(project, expr); final LinkedHashMap<JavaReplaceChoice, List<PsiExpression>> occurrencesMap = occurrencesInfo.buildOccurrencesMap(expr); List<PsiElement> anchors = occurrencesMap.values().stream() .map(o -> getAnchor(o.toArray(PsiExpression.EMPTY_ARRAY))) .filter(Objects::nonNull) .flatMap(anchor -> IntroduceVariableTargetBlockChooser.getContainers(anchor, expr).stream()) .distinct() .collect(Collectors.toList()); return Pair.create(anchors, occurrencesInfo.myOccurrences); } @NotNull public Map<String, JavaReplaceChoice> getPossibleReplaceChoices(final Project project, final PsiExpression expr) { OccurrencesInfo occurrencesInfo = buildOccurrencesInfo(project, expr); final LinkedHashMap<JavaReplaceChoice, List<PsiExpression>> occurrencesMap = occurrencesInfo.buildOccurrencesMap(expr); return occurrencesMap.entrySet().stream().collect(Collectors.toMap( entry -> entry.getKey().formatDescription(entry.getValue().size()), entry -> entry.getKey() )); } public void invoke(final Project project, final PsiExpression expr, final PsiElement target, final JavaReplaceChoice replaceChoice, final Editor editor) { OccurrencesInfo info = buildOccurrencesInfo(project, expr); LinkedHashMap<JavaReplaceChoice, List<PsiExpression>> occurrencesMap = info.buildOccurrencesMap(expr); } @NotNull private OccurrencesInfo buildOccurrencesInfo(Project project, PsiExpression expr) { final PsiElement anchorStatement = getAnchor(expr); PsiElement tempContainer = checkAnchorStatement(project, null, anchorStatement); final ExpressionOccurrenceManager occurrenceManager = createOccurrenceManager(expr, tempContainer); final PsiExpression[] occurrences = occurrenceManager.getOccurrences(); return new OccurrencesInfo(occurrences); } @Nullable private static JavaReplaceChoice findChoice(@NotNull LinkedHashMap<JavaReplaceChoice, List<PsiExpression>> occurrencesMap, @NotNull JavaReplaceChoice replaceChoice) { return ContainerUtil.find(occurrencesMap.entrySet(), entry -> { return entry.getKey().formatDescription(0) == replaceChoice.formatDescription(0); }).getKey(); } @Override protected boolean invokeImpl(final Project project, final PsiExpression expr, final Editor editor) { return invokeImpl(project, expr, null, null, editor); } public boolean invokeImpl(final Project project, final PsiExpression expr, @Nullable PsiElement targetContainer, @Nullable JavaReplaceChoice replaceChoice, final Editor editor) { if (expr != null) { final String errorMessage = getErrorMessage(expr); if (errorMessage != null) { showErrorMessage(project, editor, RefactoringBundle.getCannotRefactorMessage(errorMessage)); return false; } } if (expr != null && expr.getParent() instanceof PsiExpressionStatement) { FeatureUsageTracker.getInstance().triggerFeatureUsed("refactoring.introduceVariable.incompleteStatement"); } if (LOG.isDebugEnabled()) { LOG.debug("expression:" + expr); } if (expr == null || !expr.isPhysical()) { if (ReassignVariableUtil.reassign(editor)) return false; if (expr == null) { String message = RefactoringBundle.getCannotRefactorMessage(JavaRefactoringBundle.message("selected.block.should.represent.an.expression")); showErrorMessage(project, editor, message); return false; } } String enumInSwitchError = RefactoringUtil.checkEnumConstantInSwitchLabel(expr); if (enumInSwitchError != null) { showErrorMessage(project, editor, enumInSwitchError); return false; } final PsiType originalType = RefactoringUtil.getTypeByExpressionWithExpectedType(expr); if (originalType == null || LambdaUtil.notInferredType(originalType)) { String message = RefactoringBundle.getCannotRefactorMessage(JavaRefactoringBundle.message("unknown.expression.type")); showErrorMessage(project, editor, message); return false; } if (PsiType.VOID.equals(originalType)) { String message = RefactoringBundle.getCannotRefactorMessage(JavaRefactoringBundle.message("selected.expression.has.void.type")); showErrorMessage(project, editor, message); return false; } try { JavaPsiFacade.getElementFactory(project).createTypeElementFromText( GenericsUtil.getVariableTypeByExpressionType(originalType).getCanonicalText(), expr); } catch (IncorrectOperationException ignore) { String message = RefactoringBundle.getCannotRefactorMessage(JavaRefactoringBundle.message("unknown.expression.type")); showErrorMessage(project, editor, message); return false; } for (PsiPatternVariable variable : JavaPsiPatternUtil.getExposedPatternVariables(expr)) { if (VariableAccessUtils.getVariableReferences(variable, variable.getDeclarationScope()).stream() .anyMatch(ref -> !PsiTreeUtil.isAncestor(expr, ref, true))) { String message = RefactoringBundle.getCannotRefactorMessage( JavaRefactoringBundle.message("selected.expression.introduces.pattern.variable", variable.getName())); showErrorMessage(project, editor, message); return false; } } final PsiElement anchorStatement = getAnchor(expr); PsiElement tempContainer = checkAnchorStatement(project, editor, anchorStatement); if (tempContainer == null) return false; final PsiFile file = anchorStatement.getContainingFile(); LOG.assertTrue(file != null, "expr.getContainingFile() == null"); final PsiElement nameSuggestionContext = editor == null ? null : file.findElementAt(editor.getCaretModel().getOffset()); final RefactoringSupportProvider supportProvider = LanguageRefactoringSupport.INSTANCE.forContext(expr); final boolean isInplaceAvailableOnDataContext = supportProvider != null && editor.getSettings().isVariableInplaceRenameEnabled() && supportProvider.isInplaceIntroduceAvailable(expr, nameSuggestionContext) && (!ApplicationManager.getApplication().isUnitTestMode() || isInplaceAvailableInTestMode()) && !isInJspHolderMethod(expr); if (isInplaceAvailableOnDataContext) { final MultiMap<PsiElement, String> conflicts = new MultiMap<>(); checkInLoopCondition(expr, conflicts); if (!conflicts.isEmpty()) { showErrorMessage(project, editor, StringUtil.join(conflicts.values(), "<br>")); return false; } } final ExpressionOccurrenceManager occurrenceManager = createOccurrenceManager(expr, tempContainer); final PsiExpression[] occurrences = occurrenceManager.getOccurrences(); OccurrencesInfo occurrencesInfo = new OccurrencesInfo(occurrences); if (!CommonRefactoringUtil.checkReadOnlyStatus(project, file)) return false; final LinkedHashMap<JavaReplaceChoice, List<PsiExpression>> occurrencesMap = occurrencesInfo.buildOccurrencesMap(expr); class IntroduceVariablePass extends Pass<JavaReplaceChoice> { boolean wasSucceed = true; @Override public void pass(final JavaReplaceChoice choice) { Consumer<JavaReplaceChoice> dialogIntroduce = c -> CommandProcessor.getInstance().executeCommand(project, () -> introduce(c), getRefactoringName(), null); if (choice == null) { dialogIntroduce.accept(null); } else { SlowOperations.allowSlowOperations( () -> inplaceIntroduce(project, editor, choice, targetContainer, occurrenceManager, originalType, dialogIntroduce)); } } private void introduce(@Nullable JavaReplaceChoice choice) { if (!anchorStatement.isValid()) { return; } final Editor topLevelEditor; if (!InjectedLanguageManager.getInstance(project).isInjectedFragment(anchorStatement.getContainingFile())) { topLevelEditor = InjectedLanguageUtil.getTopLevelEditor(editor); } else { topLevelEditor = editor; } PsiVariable variable = null; try { boolean hasWriteAccess = occurrencesInfo.myHasWriteAccess; final InputValidator validator = new InputValidator(IntroduceVariableBase.this, project, occurrenceManager); final TypeSelectorManagerImpl typeSelectorManager = new TypeSelectorManagerImpl(project, originalType, expr, occurrences); boolean inFinalContext = occurrenceManager.isInFinalContext(); final IntroduceVariableSettings settings = getSettings(project, topLevelEditor, expr, occurrences, typeSelectorManager, inFinalContext, hasWriteAccess, validator, anchorStatement, choice); if (!settings.isOK()) { wasSucceed = false; return; } JavaReplaceChoice finalChoice = settings.getReplaceChoice(); PsiExpression[] selectedOccurrences = finalChoice.filter(occurrenceManager); if (selectedOccurrences.length == 0) { showErrorMessage(project, editor, JavaRefactoringBundle.message("introduce.variable.no.matching.occurrences")); wasSucceed = false; return; } final PsiElement chosenAnchor = getAnchor(selectedOccurrences); if (chosenAnchor == null) { String text = file.getText(); String textWithOccurrences = StreamEx.of(selectedOccurrences) .map(e -> getPhysicalElement(e).getTextRange()) .flatMapToEntry(range -> EntryStream.of(range.getStartOffset(), "[", range.getEndOffset(), "]").toMap()) .sortedBy(Map.Entry::getKey) .prepend(0, "") .append(text.length(), "") .map(Function.identity()) .pairMap((prev, next) -> text.substring(prev.getKey(), next.getKey()) + next.getValue()) .joining(); LOG.error("Unable to find anchor for a new variable; selectedOccurrences.length = "+selectedOccurrences.length, new Attachment("source.java", textWithOccurrences)); return; } final RefactoringEventData beforeData = new RefactoringEventData(); beforeData.addElement(expr); project.getMessageBus() .syncPublisher(RefactoringEventListener.REFACTORING_EVENT_TOPIC).refactoringStarted(REFACTORING_ID, beforeData); variable = VariableExtractor.introduce(project, expr, topLevelEditor, chosenAnchor, selectedOccurrences, settings); } finally { final RefactoringEventData afterData = new RefactoringEventData(); afterData.addElement(variable); project.getMessageBus() .syncPublisher(RefactoringEventListener.REFACTORING_EVENT_TOPIC).refactoringDone(REFACTORING_ID, afterData); } } } final IntroduceVariablePass callback = new IntroduceVariablePass(); if (replaceChoice != null) { callback.pass(findChoice(occurrencesMap, replaceChoice)); } else if (!isInplaceAvailableOnDataContext) { callback.pass(null); } else { String title = occurrencesInfo.myChainMethodName != null && occurrences.length == 1 ? JavaRefactoringBundle.message("replace.lambda.chain.detected") : RefactoringBundle.message("replace.multiple.occurrences.found"); createOccurrencesChooser(editor).showChooser(callback, occurrencesMap, title); } return callback.wasSucceed; } @NotNull public static OccurrencesChooser<PsiExpression> createOccurrencesChooser(Editor editor) { return new OccurrencesChooser<>(editor) { @Override protected TextRange getOccurrenceRange(PsiExpression occurrence) { RangeMarker rangeMarker = occurrence.getUserData(ElementToWorkOn.TEXT_RANGE); if (rangeMarker != null) { return new TextRange(rangeMarker.getStartOffset(), rangeMarker.getEndOffset()); } return occurrence.getTextRange(); } }; } private void inplaceIntroduce(@NotNull Project project, Editor editor, @NotNull JavaReplaceChoice choice, @Nullable PsiElement targetContainer, @NotNull ExpressionOccurrenceManager occurrenceManager, @NotNull PsiType originalType, @NotNull Consumer<JavaReplaceChoice> dialogIntroduce) { boolean inFinalContext = occurrenceManager.isInFinalContext(); PsiExpression expr = occurrenceManager.getMainOccurence(); PsiExpression[] selectedOccurrences = choice.filter(occurrenceManager); final InputValidator validator = new InputValidator(this, project, occurrenceManager); final TypeSelectorManagerImpl typeSelectorManager = new TypeSelectorManagerImpl(project, originalType, expr, selectedOccurrences); typeSelectorManager.setAllOccurrences(true); boolean hasWriteAccess = ContainerUtil.exists(selectedOccurrences, occ -> PsiUtil.isAccessedForWriting(occ)); final PsiElement chosenAnchor = getAnchor(selectedOccurrences); final IntroduceVariableSettings settings = getSettings(project, editor, expr, selectedOccurrences, typeSelectorManager, inFinalContext, hasWriteAccess, validator, chosenAnchor, choice); if (choice.isChain()) { myInplaceIntroducer = new ChainCallInplaceIntroducer(project, settings, chosenAnchor, editor, expr, selectedOccurrences, typeSelectorManager, getRefactoringName()); if (!myInplaceIntroducer.startInplaceIntroduceTemplate()) { dialogIntroduce.accept(choice); } } else { final boolean cantChangeFinalModifier = hasWriteAccess || inFinalContext && choice.isAll() || chosenAnchor instanceof PsiSwitchLabelStatementBase; Pass<PsiElement> callback = new Pass<>() { @Override public void pass(final PsiElement container) { PsiElement anchor = container instanceof PsiLambdaExpression ? getAnchor(container) : container; if (checkAnchorStatement(project, editor, anchor) == null) { return; } myInplaceIntroducer = new JavaVariableInplaceIntroducer(project, settings, anchor, editor, expr, cantChangeFinalModifier, selectedOccurrences, typeSelectorManager, getRefactoringName()); if (!myInplaceIntroducer.startInplaceIntroduceTemplate()) { dialogIntroduce.accept(choice); } } }; if (targetContainer != null) { callback.pass(targetContainer); } else { IntroduceVariableTargetBlockChooser.chooseTargetAndPerform(editor, chosenAnchor, expr, callback); } } } public static boolean canBeExtractedWithoutExplicitType(PsiExpression expr) { if (PsiUtil.isLanguageLevel10OrHigher(expr)) { PsiType type = getNormalizedType(expr); if (type != null && !PsiType.NULL.equals(type) && PsiTypesUtil.isDenotableType(type, expr)) { PsiExpression copy = (PsiExpression)(type instanceof PsiDisjunctionType ? expr.copy() : LambdaUtil.copyWithExpectedType(expr, type)); if (type.equals(getNormalizedType(copy))) { return true; } } } return false; } @Nullable private static PsiType getNormalizedType(PsiExpression expr) { PsiType type = expr.getType(); PsiClass refClass = PsiUtil.resolveClassInType(type); if (refClass instanceof PsiAnonymousClass) { return ((PsiAnonymousClass)refClass).getBaseClassType(); } return type; } @Nullable public static PsiElement getAnchor(PsiElement place) { place = getPhysicalElement(place); PsiElement anchorStatement = RefactoringUtil.getParentStatement(place, false); if (anchorStatement == null) { PsiField field = PsiTreeUtil.getParentOfType(place, PsiField.class, true, PsiStatement.class); if (field != null && !(field instanceof PsiEnumConstant)) { PsiExpression initializer = field.getInitializer(); // Could be also an annotation argument if (PsiTreeUtil.isAncestor(initializer, place, false)) { anchorStatement = initializer; } } } return anchorStatement; } static @Nullable PsiElement getAnchor(PsiExpression[] places) { if (places.length == 1) { return getAnchor(places[0]); } PsiElement anchor = RefactoringUtil.getAnchorElementForMultipleExpressions(places, null); return anchor instanceof PsiField && !(anchor instanceof PsiEnumConstant) ? ((PsiField)anchor).getInitializer() : anchor; } private static @NotNull PsiElement getPhysicalElement(PsiElement place) { PsiElement physicalElement = place.getUserData(ElementToWorkOn.PARENT); return physicalElement != null ? physicalElement : place; } @Contract("_, _, null -> null") protected PsiElement checkAnchorStatement(Project project, Editor editor, PsiElement anchorStatement) { if (anchorStatement == null) { String message = JavaRefactoringBundle.message("refactoring.is.not.supported.in.the.current.context", getRefactoringName()); showErrorMessage(project, editor, message); return null; } if (checkAnchorBeforeThisOrSuper(project, editor, anchorStatement, getRefactoringName(), HelpID.INTRODUCE_VARIABLE)) return null; final PsiElement tempContainer = anchorStatement.getParent(); if (!(tempContainer instanceof PsiCodeBlock) && !RefactoringUtil.isLoopOrIf(tempContainer) && !(tempContainer instanceof PsiLambdaExpression) && (tempContainer.getParent() instanceof PsiLambdaExpression)) { String message = JavaRefactoringBundle.message("refactoring.is.not.supported.in.the.current.context", getRefactoringName()); showErrorMessage(project, editor, message); return null; } return tempContainer; } protected boolean isInplaceAvailableInTestMode() { return false; } private static ExpressionOccurrenceManager createOccurrenceManager(PsiExpression expr, PsiElement tempContainer) { Set<PsiVariable> vars = new HashSet<>(); SyntaxTraverser.psiTraverser().withRoot(expr) .filter(element -> element instanceof PsiReferenceExpression) .forEach(element -> { final PsiElement resolve = ((PsiReferenceExpression)element).resolve(); if (resolve instanceof PsiVariable) { vars.add((PsiVariable)resolve); } }); PsiElement containerParent = tempContainer; PsiElement lastScope = tempContainer; while (true) { if (containerParent instanceof PsiFile) break; if (containerParent instanceof PsiMethod) { PsiClass containingClass = ((PsiMethod)containerParent).getContainingClass(); if (containingClass == null || !PsiUtil.isLocalOrAnonymousClass(containingClass)) break; if (vars.stream().anyMatch(variable -> PsiTreeUtil.isAncestor(containingClass, variable, true))) { break; } } if (containerParent instanceof PsiLambdaExpression) { PsiParameter[] parameters = ((PsiLambdaExpression)containerParent).getParameterList().getParameters(); if (ContainerUtil.exists(parameters, vars::contains)) { break; } } if (containerParent instanceof PsiForStatement) { PsiForStatement forStatement = (PsiForStatement)containerParent; if (vars.stream().anyMatch(variable -> PsiTreeUtil.isAncestor(forStatement.getInitialization(), variable, true))) { break; } } containerParent = containerParent.getParent(); if (containerParent instanceof PsiCodeBlock) { lastScope = containerParent; } } return new ExpressionOccurrenceManager(expr, lastScope, NotInSuperCallOccurrenceFilter.INSTANCE); } private static boolean isInJspHolderMethod(PsiExpression expr) { final PsiElement parent1 = expr.getParent(); if (parent1 == null) { return false; } final PsiElement parent2 = parent1.getParent(); if (!(parent2 instanceof JspCodeBlock)) return false; final PsiElement parent3 = parent2.getParent(); return parent3 instanceof JspHolderMethod; } static boolean isFinalVariableOnLHS(PsiExpression expr) { if (expr instanceof PsiReferenceExpression && RefactoringUtil.isAssignmentLHS(expr)) { final PsiElement resolve = ((PsiReferenceExpression)expr).resolve(); if (resolve instanceof PsiVariable && ((PsiVariable)resolve).hasModifierProperty(PsiModifier.FINAL)) { //should be inserted after assignment return true; } } return false; } public static PsiExpression simplifyVariableInitializer(final PsiExpression initializer, final PsiType expectedType) { return simplifyVariableInitializer(initializer, expectedType, true); } public static PsiExpression simplifyVariableInitializer(final PsiExpression initializer, final PsiType expectedType, final boolean inDeclaration) { if (initializer instanceof PsiTypeCastExpression) { PsiExpression operand = ((PsiTypeCastExpression)initializer).getOperand(); if (operand != null) { PsiType operandType = operand.getType(); if (operandType != null && TypeConversionUtil.isAssignable(expectedType, operandType)) { return operand; } } } else if (initializer instanceof PsiNewExpression) { final PsiNewExpression newExpression = (PsiNewExpression)initializer; if (newExpression.getArrayInitializer() != null) { if (inDeclaration) { return newExpression.getArrayInitializer(); } } else { PsiJavaCodeReferenceElement ref = newExpression.getClassOrAnonymousClassReference(); if (ref != null) { final PsiExpression tryToDetectDiamondNewExpr = ((PsiVariable)JavaPsiFacade.getElementFactory(initializer.getProject()) .createVariableDeclarationStatement("x", expectedType, initializer, initializer).getDeclaredElements()[0]) .getInitializer(); if (tryToDetectDiamondNewExpr instanceof PsiNewExpression && PsiDiamondTypeUtil.canCollapseToDiamond((PsiNewExpression)tryToDetectDiamondNewExpr, (PsiNewExpression)tryToDetectDiamondNewExpr, expectedType)) { final PsiElement paramList = RemoveRedundantTypeArgumentsUtil.replaceExplicitWithDiamond(ref.getParameterList()); return PsiTreeUtil.getParentOfType(paramList, PsiNewExpression.class); } } } } return initializer; } /** * Ensure that diamond inside initializer is expanded, then replace variable type with var */ public static PsiElement expandDiamondsAndReplaceExplicitTypeWithVar(PsiTypeElement typeElement, PsiElement context) { PsiElement parent = typeElement.getParent(); if (parent instanceof PsiVariable) { PsiExpression copyVariableInitializer = ((PsiVariable)parent).getInitializer(); if (copyVariableInitializer instanceof PsiNewExpression) { final PsiDiamondType.DiamondInferenceResult diamondResolveResult = PsiDiamondTypeImpl.resolveInferredTypesNoCheck((PsiNewExpression)copyVariableInitializer, copyVariableInitializer); if (!diamondResolveResult.getInferredTypes().isEmpty()) { PsiDiamondTypeUtil.expandTopLevelDiamondsInside(copyVariableInitializer); } } else if (copyVariableInitializer instanceof PsiArrayInitializerExpression) { new AddNewArrayExpressionFix((PsiArrayInitializerExpression)copyVariableInitializer).doFix(); } else if (copyVariableInitializer instanceof PsiFunctionalExpression) { PsiTypeCastExpression castExpression = (PsiTypeCastExpression)JavaPsiFacade.getElementFactory(copyVariableInitializer.getProject()) .createExpressionFromText("(" + typeElement.getText() + ")a", copyVariableInitializer); Objects.requireNonNull(castExpression.getOperand()).replace(copyVariableInitializer); copyVariableInitializer.replace(castExpression); } } return new CommentTracker().replaceAndRestoreComments(typeElement, JavaPsiFacade.getElementFactory(context.getProject()).createTypeElementFromText("var", context)); } public static PsiElement replace(final PsiExpression expr1, final PsiExpression ref, final Project project) throws IncorrectOperationException { final PsiExpression expr2; if (expr1 instanceof PsiArrayInitializerExpression && expr1.getParent() instanceof PsiNewExpression) { expr2 = (PsiNewExpression) expr1.getParent(); } else { expr2 = RefactoringUtil.outermostParenthesizedExpression(expr1); } if (expr2.isPhysical() || expr1.getUserData(ElementToWorkOn.REPLACE_NON_PHYSICAL) != null) { return expr2.replace(ref); } else { final String prefix = expr1.getUserData(ElementToWorkOn.PREFIX); final String suffix = expr1.getUserData(ElementToWorkOn.SUFFIX); final PsiElement parent = expr1.getUserData(ElementToWorkOn.PARENT); final RangeMarker rangeMarker = expr1.getUserData(ElementToWorkOn.TEXT_RANGE); LOG.assertTrue(parent != null, expr1); return parent.replace(createReplacement(ref.getText(), project, prefix, suffix, parent, rangeMarker, new int[1])); } } private static PsiElement createReplacement(final @NonNls String refText, final Project project, final String prefix, final String suffix, final PsiElement parent, final RangeMarker rangeMarker, int[] refIdx) { String text = refText; if (parent != null) { final String allText = parent.getContainingFile().getText(); final TextRange parentRange = parent.getTextRange(); LOG.assertTrue(parentRange.getStartOffset() <= rangeMarker.getStartOffset(), parent + "; prefix:" + prefix + "; suffix:" + suffix); String beg = allText.substring(parentRange.getStartOffset(), rangeMarker.getStartOffset()); //noinspection SSBasedInspection (suggested replacement breaks behavior) if (StringUtil.stripQuotesAroundValue(beg).trim().isEmpty() && prefix == null) beg = ""; LOG.assertTrue(rangeMarker.getEndOffset() <= parentRange.getEndOffset(), parent + "; prefix:" + prefix + "; suffix:" + suffix); String end = allText.substring(rangeMarker.getEndOffset(), parentRange.getEndOffset()); //noinspection SSBasedInspection (suggested replacement breaks behavior) if (StringUtil.stripQuotesAroundValue(end).trim().isEmpty() && suffix == null) end = ""; final String start = beg + (prefix != null ? prefix : ""); refIdx[0] = start.length(); text = start + refText + (suffix != null ? suffix : "") + end; } final PsiElementFactory factory = JavaPsiFacade.getElementFactory(project); return parent instanceof PsiStatement ? factory.createStatementFromText(text, parent) : parent instanceof PsiCodeBlock ? factory.createCodeBlockFromText(text, parent) : factory.createExpressionFromText(text, parent); } @Override protected boolean invokeImpl(Project project, PsiLocalVariable localVariable, Editor editor) { throw new UnsupportedOperationException(); } protected static void highlightReplacedOccurrences(Project project, Editor editor, PsiElement[] replacedOccurrences){ if (editor == null) return; if (ApplicationManager.getApplication().isUnitTestMode()) return; HighlightManager highlightManager = HighlightManager.getInstance(project); highlightManager.addOccurrenceHighlights(editor, replacedOccurrences, EditorColors.SEARCH_RESULT_ATTRIBUTES, true, null); WindowManager.getInstance().getStatusBar(project).setInfo(RefactoringBundle.message("press.escape.to.remove.the.highlighting")); } protected abstract void showErrorMessage(Project project, Editor editor, String message); protected boolean reportConflicts(MultiMap<PsiElement,String> conflicts, Project project, IntroduceVariableSettings settings){ return false; } public IntroduceVariableSettings getSettings(Project project, Editor editor, PsiExpression expr, PsiExpression[] occurrences, final TypeSelectorManagerImpl typeSelectorManager, boolean declareFinalIfAll, boolean anyAssignmentLHS, final InputValidator validator, PsiElement anchor, final JavaReplaceChoice replaceChoice) { final boolean replaceAll = replaceChoice.isAll(); final SuggestedNameInfo suggestedName = getSuggestedName(typeSelectorManager.getDefaultType(), expr, anchor); final String variableName = suggestedName.names.length > 0 ? suggestedName.names[0] : "v"; final boolean declareFinal = replaceAll && declareFinalIfAll || !anyAssignmentLHS && createFinals(anchor.getContainingFile()) || anchor instanceof PsiSwitchLabelStatementBase; final boolean declareVarType = canBeExtractedWithoutExplicitType(expr) && createVarType() && !replaceChoice.isChain(); final boolean replaceWrite = anyAssignmentLHS && replaceAll; return new IntroduceVariableSettings() { @Override public @NlsSafe String getEnteredName() { return variableName; } @Override public boolean isReplaceAllOccurrences() { return replaceAll; } @Override public boolean isDeclareFinal() { return declareFinal; } @Override public boolean isDeclareVarType() { return declareVarType; } @Override public boolean isReplaceLValues() { return replaceWrite; } @Override public PsiType getSelectedType() { final PsiType selectedType = typeSelectorManager.getTypeSelector().getSelectedType(); return selectedType != null ? selectedType : typeSelectorManager.getDefaultType(); } @Override public JavaReplaceChoice getReplaceChoice() { return replaceChoice; } @Override public boolean isOK() { return true; } }; } public static boolean createFinals(@NotNull PsiFile file) { final Boolean createFinals = JavaRefactoringSettings.getInstance().INTRODUCE_LOCAL_CREATE_FINALS; return createFinals == null ? JavaCodeStyleSettings.getInstance(file).GENERATE_FINAL_LOCALS : createFinals.booleanValue(); } public static boolean createVarType() { final Boolean createVarType = JavaRefactoringSettings.getInstance().INTRODUCE_LOCAL_CREATE_VAR_TYPE; return createVarType != null && createVarType.booleanValue(); } public static boolean checkAnchorBeforeThisOrSuper(final Project project, final Editor editor, final PsiElement tempAnchorElement, final @NlsContexts.DialogTitle String refactoringName, final String helpID) { if (tempAnchorElement instanceof PsiExpressionStatement) { PsiExpression enclosingExpr = ((PsiExpressionStatement)tempAnchorElement).getExpression(); if (enclosingExpr instanceof PsiMethodCallExpression) { PsiMethod method = ((PsiMethodCallExpression)enclosingExpr).resolveMethod(); if (method != null && method.isConstructor()) { //This is either 'this' or 'super', both must be the first in the respective constructor String message = RefactoringBundle.getCannotRefactorMessage(JavaRefactoringBundle.message("invalid.expression.context")); CommonRefactoringUtil.showErrorHint(project, editor, message, refactoringName, helpID); return true; } } } return false; } public interface Validator { boolean isOK(IntroduceVariableSettings dialog); } public static void checkInLoopCondition(PsiExpression occurence, MultiMap<PsiElement, String> conflicts) { final PsiElement loopForLoopCondition = RefactoringUtil.getLoopForLoopCondition(occurence); if (loopForLoopCondition == null || loopForLoopCondition instanceof PsiWhileStatement) return; final List<PsiVariable> referencedVariables = RefactoringUtil.collectReferencedVariables(occurence); final List<PsiVariable> modifiedInBody = new ArrayList<>(); for (PsiVariable psiVariable : referencedVariables) { if (RefactoringUtil.isModifiedInScope(psiVariable, loopForLoopCondition)) { modifiedInBody.add(psiVariable); } } if (!modifiedInBody.isEmpty()) { for (PsiVariable variable : modifiedInBody) { final String message = JavaRefactoringBundle.message("is.modified.in.loop.body", RefactoringUIUtil.getDescription(variable, false)); conflicts.putValue(variable, StringUtil.capitalize(message)); } conflicts.putValue(occurence, JavaRefactoringBundle.message("introducing.variable.may.break.code.logic")); } } @Override public AbstractInplaceIntroducer getInplaceIntroducer() { return myInplaceIntroducer; } public static class OccurrencesInfo { List<PsiExpression> myOccurrences; List<PsiExpression> myNonWrite; boolean myCantReplaceAll; boolean myCantReplaceAllButWrite; boolean myHasWriteAccess; final String myChainMethodName; public OccurrencesInfo(PsiExpression[] occurrences) { this(occurrences, true); } public OccurrencesInfo(PsiExpression[] occurrences, boolean chainCallPossible) { myOccurrences = Arrays.asList(occurrences); myNonWrite = new ArrayList<>(); myCantReplaceAll = false; myCantReplaceAllButWrite = false; for (PsiExpression occurrence : myOccurrences) { if (!RefactoringUtil.isAssignmentLHS(occurrence)) { myNonWrite.add(occurrence); } else if (isFinalVariableOnLHS(occurrence)) { myCantReplaceAll = true; } else if (!myNonWrite.isEmpty()){ myCantReplaceAllButWrite = true; myCantReplaceAll = true; } } myHasWriteAccess = myOccurrences.size() > myNonWrite.size() && myOccurrences.size() > 1; myChainMethodName = chainCallPossible ? getChainCallExtractor() : null; } private String getChainCallExtractor() { if (myHasWriteAccess || myOccurrences.isEmpty()) return null; PsiExpression expression = myOccurrences.get(0); // The whole lambda body selected if (myOccurrences.size() == 1 && expression.getParent() instanceof PsiLambdaExpression) return null; PsiElement parent = PsiTreeUtil.findCommonParent(myOccurrences); if (parent == null) return null; PsiType type = expression.getType(); PsiLambdaExpression lambda = PsiTreeUtil.getParentOfType(parent, PsiLambdaExpression.class, true, PsiStatement.class); ChainCallExtractor extractor = ChainCallExtractor.findExtractor(lambda, expression, type); if (extractor == null) return null; PsiParameter parameter = lambda.getParameterList().getParameters()[0]; if (!ReferencesSearch.search(parameter).forEach((Processor<PsiReference>)ref -> myOccurrences.stream().anyMatch(expr -> PsiTreeUtil.isAncestor(expr, ref.getElement(), false)))) { return null; } return extractor.getMethodName(parameter, expression, type); } @NotNull public LinkedHashMap<JavaReplaceChoice, List<PsiExpression>> buildOccurrencesMap(PsiExpression expr) { final LinkedHashMap<JavaReplaceChoice, List<PsiExpression>> occurrencesMap = new LinkedHashMap<>(); if (myChainMethodName != null) { if (myOccurrences.size() > 1 && !myCantReplaceAll) { occurrencesMap.put(JavaReplaceChoice.NO, Collections.singletonList(expr)); occurrencesMap.put(JavaReplaceChoice.ALL, myOccurrences); occurrencesMap.put(new JavaReplaceChoice(ReplaceChoice.ALL, null, true) { @Override public String formatDescription(int occurrencesCount) { return JavaRefactoringBundle.message("replace.all.and.extract", occurrencesCount, myChainMethodName); } }, myOccurrences); } else { JavaReplaceChoice noChain = new JavaReplaceChoice(ReplaceChoice.NO, JavaRefactoringBundle.message("replace.inside.current.lambda"), false); JavaReplaceChoice chain = new JavaReplaceChoice(ReplaceChoice.NO, JavaRefactoringBundle.message("replace.as.separate.operation", myChainMethodName), true); occurrencesMap.put(noChain, Collections.singletonList(expr)); occurrencesMap.put(chain, Collections.singletonList(expr)); } } else { occurrencesMap.put(JavaReplaceChoice.NO, Collections.singletonList(expr)); boolean hasWrite = myHasWriteAccess && !myCantReplaceAllButWrite; if (hasWrite && myNonWrite.contains(expr)) { occurrencesMap.put(JavaReplaceChoice.NO_WRITE, myNonWrite); } if (myOccurrences.size() > 1 && !myCantReplaceAll) { if (hasWrite) { JavaReplaceChoice choice = new JavaReplaceChoice( ReplaceChoice.ALL, myNonWrite.isEmpty() ? JavaRefactoringBundle.message("replace.all.occurrences.changes.semantics", myOccurrences.size()) : JavaRefactoringBundle.message("replace.all.read.and.write"), false); occurrencesMap.put(choice, myOccurrences); } else { generateScopeBasedChoices(expr, occurrencesMap); occurrencesMap.put(JavaReplaceChoice.ALL, myOccurrences); } } } return occurrencesMap; } private void generateScopeBasedChoices(PsiExpression expr, LinkedHashMap<JavaReplaceChoice, List<PsiExpression>> occurrencesMap) { // This comparator can correctly compare only elements that represent a single ancestor chain // i.e. for two compared elements a and b either a is ancestor of b or vice versa Comparator<PsiElement> treeOrder = (e1, e2) -> { if (PsiTreeUtil.isAncestor(e1, e2, true)) return 1; if (PsiTreeUtil.isAncestor(e2, e1, true)) return -1; return 0; }; PsiElement physical = getPhysicalElement(expr); TreeMap<PsiElement, List<PsiExpression>> groupByBlock = StreamEx.of(myOccurrences) .map(place -> (PsiExpression)getPhysicalElement(place)) .groupingBy(e -> PsiTreeUtil.findCommonParent(e, physical), () -> new TreeMap<>(treeOrder), Collectors.toList()); LOG.assertTrue(!groupByBlock.isEmpty()); List<PsiExpression> currentOccurrences = new ArrayList<>(); Map<String, Integer> counts = new HashMap<>(); groupByBlock.forEach((parent, occurrences) -> { PsiElement nextParent = groupByBlock.higherKey(parent); if (nextParent == null) return; currentOccurrences.addAll(occurrences); if (currentOccurrences.size() == 1) return; PsiElement current = parent.getParent(); @NonNls String keyword = null; while (current != nextParent) { if (current instanceof PsiIfStatement || current instanceof PsiWhileStatement || current instanceof PsiForStatement || current instanceof PsiTryStatement) { keyword = current.getFirstChild().getText(); } else if (current instanceof PsiDoWhileStatement) { keyword = "do-while"; } else if (current instanceof PsiForeachStatement) { keyword = "for-each"; } else if (current instanceof PsiLambdaExpression) { keyword = "lambda"; } if (keyword != null) { break; } current = current.getParent(); } if (keyword == null && nextParent instanceof PsiIfStatement) { PsiStatement thenBranch = ((PsiIfStatement)nextParent).getThenBranch(); PsiStatement elseBranch = ((PsiIfStatement)nextParent).getElseBranch(); if (PsiTreeUtil.isAncestor(thenBranch, parent, false)) { keyword = "if-then"; } else if (PsiTreeUtil.isAncestor(elseBranch, parent, false)) { keyword = "else"; } } if (keyword != null) { int sameKeywordCount = counts.merge(keyword, 1, Integer::sum); if (sameKeywordCount <= 2) { JavaReplaceChoice choice = JavaReplaceChoice.allOccurrencesInside(parent, sameKeywordCount, keyword); occurrencesMap.put(choice, new ArrayList<>(currentOccurrences)); } } }); } } protected static @NlsContexts.Command String getRefactoringName() { return RefactoringBundle.message("introduce.variable.title"); } }
// ********************************************************************** // // <copyright> // // BBN Technologies // 10 Moulton Street // Cambridge, MA 02138 // (617) 873-8000 // // Copyright (C) BBNT Solutions LLC. All rights reserved. // // </copyright> // ********************************************************************** // // $Source: /cvs/distapps/openmap/src/openmap/com/bbn/openmap/proj/coords/ECEFPoint.java,v $ // $RCSfile: ECEFPoint.java,v $ // $Revision: 1.9 $ // $Date: 2009/02/25 22:34:04 $ // $Author: dietrick $ // // ********************************************************************** // ********************************************************************** // // Based on coordinate conversion utilities in GeoTools // // Note: Height calculations are present, but commented out. // // ********************************************************************** package com.bbn.openmap.proj.coords; import java.awt.geom.Point2D; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import com.bbn.openmap.MoreMath; import com.bbn.openmap.proj.Ellipsoid; import com.bbn.openmap.util.HashCodeUtil; /** * From: http://www.commlinx.com.au/Datum%20Transformation%20Description.html : * <P> * The Cartesian coordinate frame of reference used in GPS/GLONASS is called * Earth-Centered, Earth-Fixed (ECEF). ECEF uses three-dimensional XYZ * coordinates (in meters) to describe the location of a GPS user or satellite. * The term "Earth- Centered" comes from the fact that the origin of the axis * (0,0,0) is located at the mass center of gravity (determined through years of * tracking satellite trajectories). The term "Earth-Fixed" implies that the * axes are fixed with respect to the earth (that is, they rotate with the * earth). The Z-axis pierces the North Pole, and the XY-axis defines the * equatorial plane (Figure 1). * <P> * * ECEF coordinates are expressed in a reference system that is related to * mapping representations. Because the earth has a complex shape, a simple, yet * accurate, method to approximate the earth's shape is required. The use of a * reference ellipsoid allows for the conversion of the ECEF coordinates to the * more commonly used geodetic-mapping coordinates of Latitude, Longitude, and * Altitude (LLA). */ public class ECEFPoint { protected static double EQUIVALENT_TOLERANCE = 0.001; protected double x_ = 0.0; protected double y_ = 0.0; protected double z_ = 0.0; /** * Construct a default ECEFPoint. */ public ECEFPoint() { } /** * Construct an ECEFPoint */ public ECEFPoint(double x, double y, double z) { setECEF(x, y, z); } /** * Construct an ECEFPoint * * @param pt ECEFPoint */ public ECEFPoint(ECEFPoint pt) { x_ = pt.x_; y_ = pt.y_; z_ = pt.z_; } /** * Construct an ECEFPoint */ public ECEFPoint(float x, float y, float z) { this((double) x, (double) y, (double) z); } public static ECEFPoint LLtoECEF(LatLonPoint llpoint) { return LLtoECEF(llpoint, new ECEFPoint()); } public static ECEFPoint LLtoECEF(LatLonPoint llpoint, ECEFPoint ecef) { if (ecef == null) { ecef = new ECEFPoint(); } ecef.setLatLon(llpoint); return ecef; } /** * Returns a string representation of the object. * * @return String representation */ public String toString() { return "ECEFPoint[x=" + x_ + ",y=" + y_ + ",z=" + z_ + "]"; } /** * Set x. */ public void setx(double x) { x_ = x; } /** * Set y. */ public void sety(double y) { y_ = y; } /** * Set z. */ public void setz(double z) { z_ = z; } /** * Set x y z. */ public void setECEF(double x, double y, double z) { x_ = x; y_ = y; z_ = z; } /** * Set ECEFPoint. */ public void setECEF(ECEFPoint pt) { x_ = pt.x_; y_ = pt.y_; z_ = pt.z_; } /** * Get x */ public double getx() { return x_; } /** * Get y */ public double gety() { return y_; } /** * Get z */ public double getz() { return z_; } /** * Determines whether two ECEFPoints are equal. * * @param obj Object * @return Whether the two points are equal */ public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final ECEFPoint pt = (ECEFPoint) obj; return (MoreMath.approximately_equal(x_, pt.x_, EQUIVALENT_TOLERANCE) && MoreMath.approximately_equal(y_, pt.y_, EQUIVALENT_TOLERANCE) && MoreMath.approximately_equal(z_, pt.z_, EQUIVALENT_TOLERANCE)); } /** * Write object. * * @param s DataOutputStream */ public void write(DataOutputStream s) throws IOException { // Write my information s.writeDouble(x_); s.writeDouble(y_); s.writeDouble(z_); } /** * Read object. * * @param s DataInputStream */ public void read(DataInputStream s) throws IOException { setECEF(s.readDouble(), s.readDouble(), s.readDouble()); } /** * Set an ECEFPoint from a LatLonPoint * * @param pt LatLonPoint */ public void setLatLon(LatLonPoint pt) { setLatLon(pt.getY(), pt.getX(), Ellipsoid.WGS_84); } /** * Set an ECEFPoint from a Lat, Lon */ public void setLatLon(float lat, float lon) { setLatLon(lat, lon, Ellipsoid.WGS_84); } /** * Set an ECEFPoint from a Lat, Lon */ public void setLatLon(double lat, double lon, Ellipsoid ellip) { final double a = ellip.radius; // semimajor (meters) final double b = ellip.polarRadius; // semiminor (meters) final double a2 = a * a; final double b2 = b * b; final double e2 = (a2 - b2) / a2; final double L = Math.toRadians(lon); // Longitude final double P = Math.toRadians(lat); // Latitude final double h = 0; // Height above the ellipsoid (m) final double cosLat = Math.cos(P); final double sinLat = Math.sin(P); final double rn = a / Math.sqrt(1 - e2 * (sinLat * sinLat)); final double x = (rn + h) * cosLat * Math.cos(L); // X: Toward // prime // meridian final double y = (rn + h) * cosLat * Math.sin(L); // Y: Toward // East final double z = (rn * (1 - e2) + h) * sinLat; // Z: Toward // North this.setECEF(x, y, z); } /** * Return a LatLonPoint in WGS 84 */ public LatLonPoint getLatLon() { return getLatLon(new LatLonPoint.Double()); } /** * Return a LatLonPoint in WGS 84 */ public LatLonPoint getLatLon(LatLonPoint instance) { Point2D p = getLatLon(Ellipsoid.WGS_84, null); instance.setLatLon(p.getY(), p.getX()); return instance; } /** * Return a Point2D in the given {@link Ellipsoid} with longitude as x and * latitude as y */ public Point2D getLatLon(Ellipsoid ellip, Point2D ret) { if (ret == null) { ret = new Point2D.Double(); } final double a = ellip.radius; // semimajor (meters) final double b = ellip.polarRadius; // semiminor (meters) final double a2 = a * a; final double b2 = b * b; final double e2 = (a2 - b2) / a2; final double ep2 = (a2 - b2) / b2; /** * Cosine of 67.5 degrees. */ // final double COS_67P5 = 0.38268343236508977; /** * Toms region 1 constant. */ final double AD_C = 1.0026000; final double x = x_; // Toward prime meridian final double y = y_; // Toward East final double z = z_; // Toward North // Note: The Java version of 'atan2' work correctly for x==0. // No need for special handling like in the C version. // No special handling neither for latitude. Formulas // below are generic enough, considering that 'atan' // work correctly with infinities (1/0). // Note: Variable names follow the notation used in Toms, Feb // 1996 final double W2 = x * x + y * y; // square of distance from Z // axis final double W = Math.sqrt(W2); // distance from Z axis final double T0 = z * AD_C; // initial estimate of vertical // component final double S0 = Math.sqrt(T0 * T0 + W2); // initial estimate // of horizontal // component final double sin_B0 = T0 / S0; // sin(B0), B0 is estimate of // Bowring aux variable final double cos_B0 = W / S0; // cos(B0) final double sin3_B0 = sin_B0 * sin_B0 * sin_B0; // cube of // sin(B0) final double T1 = z + b * ep2 * sin3_B0; // corrected estimate // of vertical // component final double sum = W - a * e2 * (cos_B0 * cos_B0 * cos_B0); // numerator // of // cos(phi1) final double S1 = Math.sqrt(T1 * T1 + sum * sum); // corrected // estimate // of // horizontal // component final double sin_p1 = T1 / S1; // sin(phi1), phi1 is estimated // latitude final double cos_p1 = sum / S1; // cos(phi1) final double longitude = Math.toDegrees(Math.atan2(y, x)); final double latitude = Math.toDegrees(Math.atan(sin_p1 / cos_p1)); // final double height; // if (computeHeight) { // final double rn = a/Math.sqrt(1-e2*(sin_p1*sin_p1)); // // Earth radius at location // if (cos_p1 >= +COS_67P5) height = W / +cos_p1 - rn; // else if (cos_p1 <= -COS_67P5) height = W / -cos_p1 - rn; // else height = z / sin_p1 + rn*(e2 - 1.0); // } ret.setLocation(longitude, latitude); // LatLonPoint ret = new LatLonPoint((float) latitude, (float) longitude); return ret; } /* * (non-Javadoc) * * @see java.lang.Object#hashCode() */ public int hashCode() { int result = HashCodeUtil.SEED; result = HashCodeUtil.hash(result, x_); result = HashCodeUtil.hash(result, y_); result = HashCodeUtil.hash(result, z_); return result; } }
/* Derby - Class org.apache.derby.impl.sql.compile.HashTableNode Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.impl.sql.compile; import org.apache.derby.iapi.services.context.ContextManager; import org.apache.derby.iapi.sql.compile.Optimizable; import org.apache.derby.iapi.sql.compile.OptimizablePredicate; import org.apache.derby.iapi.sql.compile.OptimizablePredicateList; import org.apache.derby.iapi.sql.compile.Optimizer; import org.apache.derby.iapi.sql.compile.CostEstimate; import org.apache.derby.iapi.sql.compile.OptimizableList; import org.apache.derby.iapi.sql.compile.Visitable; import org.apache.derby.iapi.sql.compile.Visitor; import org.apache.derby.iapi.sql.compile.RequiredRowOrdering; import org.apache.derby.iapi.sql.compile.RowOrdering; import org.apache.derby.iapi.sql.compile.AccessPath; import org.apache.derby.iapi.reference.ClassName; import org.apache.derby.iapi.sql.dictionary.DataDictionary; import org.apache.derby.iapi.sql.dictionary.ConglomerateDescriptor; import org.apache.derby.iapi.sql.Activation; import org.apache.derby.iapi.sql.ResultSet; import org.apache.derby.iapi.error.StandardException; import org.apache.derby.iapi.store.access.TransactionController; import org.apache.derby.impl.sql.compile.ExpressionClassBuilder; import org.apache.derby.impl.sql.compile.ActivationClassBuilder; import org.apache.derby.iapi.services.compiler.MethodBuilder; import org.apache.derby.iapi.services.loader.GeneratedMethod; import org.apache.derby.iapi.services.sanity.SanityManager; import org.apache.derby.catalog.types.ReferencedColumnsDescriptorImpl; import org.apache.derby.iapi.services.io.FormatableArrayHolder; import org.apache.derby.iapi.services.io.FormatableIntHolder; import org.apache.derby.iapi.util.JBitSet; import org.apache.derby.iapi.services.classfile.VMOpcode; import java.util.Properties; /** * A HashTableNode represents a result set where a hash table is built. * */ public class HashTableNode extends SingleChildResultSetNode { PredicateList searchPredicateList; PredicateList joinPredicateList; SubqueryList pSubqueryList; SubqueryList rSubqueryList; /** * Initializer for a HashTableNode. * * @param childResult The child result set * @param tableProperties Properties list associated with the table * @param resultColumns The RCL. * @param searchPredicateList Single table clauses * @param joinPredicateList Multi table clauses * @param accessPath The access path * @param costEstimate The cost estimate * @param pSubqueryList List of subqueries in RCL * @param rSubqueryList List of subqueries in Predicate lists * @param hashKeyColumns Hash key columns */ public void init( Object childResult, Object tableProperties, Object resultColumns, Object searchPredicateList, Object joinPredicateList, Object accessPath, Object costEstimate, Object pSubqueryList, Object rSubqueryList, Object hashKeyColumns) { super.init(childResult, tableProperties); this.resultColumns = (ResultColumnList) resultColumns; this.searchPredicateList = (PredicateList) searchPredicateList; this.joinPredicateList = (PredicateList) joinPredicateList; this.trulyTheBestAccessPath = (AccessPathImpl) accessPath; this.costEstimate = (CostEstimate) costEstimate; this.pSubqueryList = (SubqueryList) pSubqueryList; this.rSubqueryList = (SubqueryList) rSubqueryList; setHashKeyColumns((int[]) hashKeyColumns); } /* * Optimizable interface */ /** * @see Optimizable#modifyAccessPath * * @exception StandardException Thrown on error */ public Optimizable modifyAccessPath(JBitSet outerTables, Optimizer optimizer) throws StandardException { return this; } /** * Prints the sub-nodes of this object. See QueryTreeNode.java for * how tree printing is supposed to work. * * @param depth The depth of this node in the tree */ public void printSubNodes(int depth) { if (SanityManager.DEBUG) { super.printSubNodes(depth); if (searchPredicateList != null) { printLabel(depth, "searchPredicateList: "); searchPredicateList.treePrint(depth + 1); } if (joinPredicateList != null) { printLabel(depth, "joinPredicateList: "); joinPredicateList.treePrint(depth + 1); } } } /** * For joins, the tree will be (nodes are left out if the clauses * are empty): * * ProjectRestrictResultSet -- for the having and the select list * SortResultSet -- for the group by list * ProjectRestrictResultSet -- for the where and the select list (if no group or having) * the result set for the fromList * * * @exception StandardException Thrown on error */ public void generate(ActivationClassBuilder acb, MethodBuilder mb) throws StandardException { if (SanityManager.DEBUG) SanityManager.ASSERT(resultColumns != null, "Tree structure bad"); // // If we are projecting and restricting the stream from a table // function, then give the table function all of the information that // it needs in order to push the projection and qualifiers into // the table function. See DERBY-4357. // if ( childResult instanceof FromVTI ) { ((FromVTI) childResult).computeProjectionAndRestriction( searchPredicateList ); } generateMinion( acb, mb, false); } /** * General logic shared by Core compilation and by the Replication Filter * compiler. A couple ResultSets (the ones used by PREPARE SELECT FILTER) * implement this method. * * @param acb The ExpressionClassBuilder for the class being built * @param mb the method the expression will go into * * * @exception StandardException Thrown on error */ public void generateResultSet(ExpressionClassBuilder acb, MethodBuilder mb) throws StandardException { generateMinion( acb, mb, true); } /** * Logic shared by generate() and generateResultSet(). * * @param acb The ExpressionClassBuilder for the class being built * @param mb the method the expression will go into * * @exception StandardException Thrown on error */ private void generateMinion(ExpressionClassBuilder acb, MethodBuilder mb, boolean genChildResultSet) throws StandardException { MethodBuilder userExprFun; ValueNode searchClause = null; ValueNode equijoinClause = null; /* The tableProperties, if non-null, must be correct to get this far. * We simply call verifyProperties to set initialCapacity and * loadFactor. */ verifyProperties(getDataDictionary()); // build up the tree. /* Put the predicates back into the tree */ if (searchPredicateList != null) { // Remove any redundant predicates before restoring searchPredicateList.removeRedundantPredicates(); searchClause = searchPredicateList.restorePredicates(); /* Allow the searchPredicateList to get garbage collected now * that we're done with it. */ searchPredicateList = null; } // for the single table predicates, we generate an exprFun // that evaluates the expression of the clause // against the current row of the child's result. // if the restriction is empty, simply pass null // to optimize for run time performance. // generate the function and initializer: // Note: Boolean lets us return nulls (boolean would not) // private Boolean exprN() // { // return <<searchClause.generate(ps)>>; // } // static Method exprN = method pointer to exprN; // Map the result columns to the source columns ResultColumnList.ColumnMapping mappingArrays = resultColumns.mapSourceColumns(); int[] mapArray = mappingArrays.mapArray; int mapArrayItem = acb.addItem(new ReferencedColumnsDescriptorImpl(mapArray)); // Save the hash key columns FormatableIntHolder[] fihArray = FormatableIntHolder.getFormatableIntHolders(hashKeyColumns()); FormatableArrayHolder hashKeyHolder = new FormatableArrayHolder(fihArray); int hashKeyItem = acb.addItem(hashKeyHolder); /* Generate the HashTableResultSet: * arg1: childExpress - Expression for childResultSet * arg2: searchExpress - Expression for single table predicates * arg3 : equijoinExpress - Qualifier[] for hash table look up * arg4: projectExpress - Expression for projection, if any * arg5: resultSetNumber * arg6: mapArrayItem - item # for mapping of source columns * arg7: reuseResult - whether or not the result row can be reused * (ie, will it always be the same) * arg8: hashKeyItem - item # for int[] of hash column #s * arg9: removeDuplicates - don't remove duplicates in hash table (for now) * arg10: maxInMemoryRowCount - max row size for in-memory hash table * arg11: initialCapacity - initialCapacity for java.util.Hashtable * arg12 : loadFactor - loadFactor for java.util.Hashtable * arg13: estimated row count * arg14: estimated cost * arg15: close method */ acb.pushGetResultSetFactoryExpression(mb); if (genChildResultSet) childResult.generateResultSet(acb, mb); else childResult.generate((ActivationClassBuilder) acb, mb); /* Get the next ResultSet #, so that we can number this ResultSetNode, its * ResultColumnList and ResultSet. */ assignResultSetNumber(); /* Set the point of attachment in all subqueries attached * to this node. */ if (pSubqueryList != null && pSubqueryList.size() > 0) { pSubqueryList.setPointOfAttachment(resultSetNumber); if (SanityManager.DEBUG) { SanityManager.ASSERT(pSubqueryList.size() == 0, "pSubqueryList.size() expected to be 0"); } } if (rSubqueryList != null && rSubqueryList.size() > 0) { rSubqueryList.setPointOfAttachment(resultSetNumber); if (SanityManager.DEBUG) { SanityManager.ASSERT(rSubqueryList.size() == 0, "rSubqueryList.size() expected to be 0"); } } // Get the final cost estimate based on child's cost. costEstimate = childResult.getFinalCostEstimate(); // if there is no searchClause, we just want to pass null. if (searchClause == null) { mb.pushNull(ClassName.GeneratedMethod); } else { // this sets up the method and the static field. // generates: // DataValueDescriptor userExprFun { } userExprFun = acb.newUserExprFun(); // searchClause knows it is returning its value; /* generates: * return <searchClause.generate(acb)>; * and adds it to userExprFun * NOTE: The explicit cast to DataValueDescriptor is required * since the searchClause may simply be a boolean column or subquery * which returns a boolean. For example: * where booleanColumn */ searchClause.generateExpression(acb, userExprFun); userExprFun.methodReturn(); /* PUSHCOMPILER userSB.newReturnStatement(searchClause.generateExpression(acb, userSB)); */ // we are done modifying userExprFun, complete it. userExprFun.complete(); // searchClause is used in the final result set as an access of the new static // field holding a reference to this new method. // generates: // ActivationClass.userExprFun // which is the static field that "points" to the userExprFun // that evaluates the where clause. acb.pushMethodReference(mb, userExprFun); } /* Generate the qualifiers for the look up into * the hash table. */ joinPredicateList.generateQualifiers(acb, mb, (Optimizable) childResult, false); /* Determine whether or not reflection is needed for the projection. * Reflection is not needed if all of the columns map directly to source * columns. */ if (reflectionNeededForProjection()) { // for the resultColumns, we generate a userExprFun // that creates a new row from expressions against // the current row of the child's result. // (Generate optimization: see if we can simply // return the current row -- we could, but don't, optimize // the function call out and have execution understand // that a null function pointer means take the current row // as-is, with the performance trade-off as discussed above.) /* Generate the Row function for the projection */ resultColumns.generateCore(acb, mb, false); } else { mb.pushNull(ClassName.GeneratedMethod); } mb.push(resultSetNumber); mb.push(mapArrayItem); mb.push(resultColumns.reusableResult()); mb.push(hashKeyItem); mb.push(false); mb.push(-1L); mb.push(initialCapacity); mb.push(loadFactor); mb.push(costEstimate.singleScanRowCount()); mb.push(costEstimate.getEstimatedCost()); mb.callMethod(VMOpcode.INVOKEINTERFACE, (String) null, "getHashTableResultSet", ClassName.NoPutResultSet, 14); } /** * Accept the visitor for all visitable children of this node. * * @param v the visitor * * @exception StandardException on error */ void acceptChildren(Visitor v) throws StandardException { super.acceptChildren(v); if (searchPredicateList != null) { searchPredicateList = (PredicateList)searchPredicateList.accept(v); } if (joinPredicateList != null) { joinPredicateList = (PredicateList)joinPredicateList.accept(v); } } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Describes an association between a resource attachment and a transit gateway route table. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/TransitGatewayAssociation" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class TransitGatewayAssociation implements Serializable, Cloneable { /** * <p> * The ID of the transit gateway route table. * </p> */ private String transitGatewayRouteTableId; /** * <p> * The ID of the attachment. * </p> */ private String transitGatewayAttachmentId; /** * <p> * The ID of the resource. * </p> */ private String resourceId; /** * <p> * The resource type. * </p> */ private String resourceType; /** * <p> * The state of the association. * </p> */ private String state; /** * <p> * The ID of the transit gateway route table. * </p> * * @param transitGatewayRouteTableId * The ID of the transit gateway route table. */ public void setTransitGatewayRouteTableId(String transitGatewayRouteTableId) { this.transitGatewayRouteTableId = transitGatewayRouteTableId; } /** * <p> * The ID of the transit gateway route table. * </p> * * @return The ID of the transit gateway route table. */ public String getTransitGatewayRouteTableId() { return this.transitGatewayRouteTableId; } /** * <p> * The ID of the transit gateway route table. * </p> * * @param transitGatewayRouteTableId * The ID of the transit gateway route table. * @return Returns a reference to this object so that method calls can be chained together. */ public TransitGatewayAssociation withTransitGatewayRouteTableId(String transitGatewayRouteTableId) { setTransitGatewayRouteTableId(transitGatewayRouteTableId); return this; } /** * <p> * The ID of the attachment. * </p> * * @param transitGatewayAttachmentId * The ID of the attachment. */ public void setTransitGatewayAttachmentId(String transitGatewayAttachmentId) { this.transitGatewayAttachmentId = transitGatewayAttachmentId; } /** * <p> * The ID of the attachment. * </p> * * @return The ID of the attachment. */ public String getTransitGatewayAttachmentId() { return this.transitGatewayAttachmentId; } /** * <p> * The ID of the attachment. * </p> * * @param transitGatewayAttachmentId * The ID of the attachment. * @return Returns a reference to this object so that method calls can be chained together. */ public TransitGatewayAssociation withTransitGatewayAttachmentId(String transitGatewayAttachmentId) { setTransitGatewayAttachmentId(transitGatewayAttachmentId); return this; } /** * <p> * The ID of the resource. * </p> * * @param resourceId * The ID of the resource. */ public void setResourceId(String resourceId) { this.resourceId = resourceId; } /** * <p> * The ID of the resource. * </p> * * @return The ID of the resource. */ public String getResourceId() { return this.resourceId; } /** * <p> * The ID of the resource. * </p> * * @param resourceId * The ID of the resource. * @return Returns a reference to this object so that method calls can be chained together. */ public TransitGatewayAssociation withResourceId(String resourceId) { setResourceId(resourceId); return this; } /** * <p> * The resource type. * </p> * * @param resourceType * The resource type. * @see TransitGatewayAttachmentResourceType */ public void setResourceType(String resourceType) { this.resourceType = resourceType; } /** * <p> * The resource type. * </p> * * @return The resource type. * @see TransitGatewayAttachmentResourceType */ public String getResourceType() { return this.resourceType; } /** * <p> * The resource type. * </p> * * @param resourceType * The resource type. * @return Returns a reference to this object so that method calls can be chained together. * @see TransitGatewayAttachmentResourceType */ public TransitGatewayAssociation withResourceType(String resourceType) { setResourceType(resourceType); return this; } /** * <p> * The resource type. * </p> * * @param resourceType * The resource type. * @return Returns a reference to this object so that method calls can be chained together. * @see TransitGatewayAttachmentResourceType */ public TransitGatewayAssociation withResourceType(TransitGatewayAttachmentResourceType resourceType) { this.resourceType = resourceType.toString(); return this; } /** * <p> * The state of the association. * </p> * * @param state * The state of the association. * @see TransitGatewayAssociationState */ public void setState(String state) { this.state = state; } /** * <p> * The state of the association. * </p> * * @return The state of the association. * @see TransitGatewayAssociationState */ public String getState() { return this.state; } /** * <p> * The state of the association. * </p> * * @param state * The state of the association. * @return Returns a reference to this object so that method calls can be chained together. * @see TransitGatewayAssociationState */ public TransitGatewayAssociation withState(String state) { setState(state); return this; } /** * <p> * The state of the association. * </p> * * @param state * The state of the association. * @return Returns a reference to this object so that method calls can be chained together. * @see TransitGatewayAssociationState */ public TransitGatewayAssociation withState(TransitGatewayAssociationState state) { this.state = state.toString(); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getTransitGatewayRouteTableId() != null) sb.append("TransitGatewayRouteTableId: ").append(getTransitGatewayRouteTableId()).append(","); if (getTransitGatewayAttachmentId() != null) sb.append("TransitGatewayAttachmentId: ").append(getTransitGatewayAttachmentId()).append(","); if (getResourceId() != null) sb.append("ResourceId: ").append(getResourceId()).append(","); if (getResourceType() != null) sb.append("ResourceType: ").append(getResourceType()).append(","); if (getState() != null) sb.append("State: ").append(getState()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof TransitGatewayAssociation == false) return false; TransitGatewayAssociation other = (TransitGatewayAssociation) obj; if (other.getTransitGatewayRouteTableId() == null ^ this.getTransitGatewayRouteTableId() == null) return false; if (other.getTransitGatewayRouteTableId() != null && other.getTransitGatewayRouteTableId().equals(this.getTransitGatewayRouteTableId()) == false) return false; if (other.getTransitGatewayAttachmentId() == null ^ this.getTransitGatewayAttachmentId() == null) return false; if (other.getTransitGatewayAttachmentId() != null && other.getTransitGatewayAttachmentId().equals(this.getTransitGatewayAttachmentId()) == false) return false; if (other.getResourceId() == null ^ this.getResourceId() == null) return false; if (other.getResourceId() != null && other.getResourceId().equals(this.getResourceId()) == false) return false; if (other.getResourceType() == null ^ this.getResourceType() == null) return false; if (other.getResourceType() != null && other.getResourceType().equals(this.getResourceType()) == false) return false; if (other.getState() == null ^ this.getState() == null) return false; if (other.getState() != null && other.getState().equals(this.getState()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getTransitGatewayRouteTableId() == null) ? 0 : getTransitGatewayRouteTableId().hashCode()); hashCode = prime * hashCode + ((getTransitGatewayAttachmentId() == null) ? 0 : getTransitGatewayAttachmentId().hashCode()); hashCode = prime * hashCode + ((getResourceId() == null) ? 0 : getResourceId().hashCode()); hashCode = prime * hashCode + ((getResourceType() == null) ? 0 : getResourceType().hashCode()); hashCode = prime * hashCode + ((getState() == null) ? 0 : getState().hashCode()); return hashCode; } @Override public TransitGatewayAssociation clone() { try { return (TransitGatewayAssociation) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright 2016-2020 chronicle.software * * https://chronicle.software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.openhft.chronicle.wire; import net.openhft.chronicle.bytes.Bytes; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.lang.annotation.RetentionPolicy; import java.lang.reflect.Type; import java.time.Instant; import java.time.LocalDateTime; import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.Arrays; import java.util.Collection; import java.util.Date; import static org.junit.Assert.*; import static org.junit.Assume.assumeFalse; import static org.junit.Assume.assumeTrue; @SuppressWarnings("rawtypes") @RunWith(value = Parameterized.class) public class WireTests { private final WireType wireType; private final boolean usePadding; @NotNull @Rule public TestName name = new TestName(); public WireTests(WireType wireType, boolean usePadding) { this.wireType = wireType; this.usePadding = usePadding; } @NotNull @Parameterized.Parameters(name = "{index}: {0} padding: {1}") public static Collection<Object[]> data() { Object[][] list = { {WireType.BINARY, true}, {WireType.BINARY, false}, {WireType.TEXT, false}, {WireType.JSON, false} }; return Arrays.asList(list); } @Test public void testHexLongNegativeTest() { final Bytes b = Bytes.elasticByteBuffer(); final long expectedLong1 = -1; final long expectedLong2 = Long.MIN_VALUE; try { final Wire wire = createWire(b); try (DocumentContext dc = wire.writingDocument()) { dc.wire().write("w") .int64_0x(expectedLong1); dc.wire().write("x") .int64_0x(expectedLong2); dc.wire().write("y").typeLiteral(String.class); } try (DocumentContext dc = wire.readingDocument()) { long w = dc.wire().read("w").int64(); assertEquals(expectedLong1, w); long x = dc.wire().read("x").int64(); assertEquals(expectedLong2, x); Class<Object> y = dc.wire().read("y").typeLiteral(); assertEquals(String.class, y); } } finally { b.releaseLast(); } } @Test public void testLenientTypeLiteral() { final Bytes b = Bytes.elasticByteBuffer(); try { final Wire wire = createWire(b); try (DocumentContext dc = wire.writingDocument()) { dc.wire().write("w").typeLiteral("DoesntExist"); } try (DocumentContext dc = wire.readingDocument()) { Type t = dc.wire().read("w").lenientTypeLiteral(); assertEquals("DoesntExist", t.getTypeName()); } } finally { b.releaseLast(); } } @Test public void testDate() { final Bytes b = Bytes.elasticByteBuffer(); final Wire wire = createWire(b); wire.getValueOut() .object(new Date(1234567890000L)); assertEquals(new Date(1234567890000L), wire.getValueIn() .object(Date.class)); /* Not sure why this would work final Date expectedDate = new Date(1234567890000L); String longDateInDefaultLocale = new SimpleDateFormat("EEE MMM d HH:mm:ss zzz yyyy").format(expectedDate); wire.getValueOut().object(longDateInDefaultLocale); assertEquals(expectedDate, wire.getValueIn() .object(Date.class)); wire.getValueOut().object("2009-02-13 23:31:30.000"); assertEquals(new Date(1234567890000L), wire.getValueIn() .object(Date.class)); */ } @Test public void testLocalDateTime() { final Bytes b = Bytes.elasticByteBuffer(); try { final Wire wire = createWire(b); LocalDateTime expected = LocalDateTime.ofInstant(Instant.EPOCH, ZoneId.systemDefault()); wire.getValueOut().object(expected); // is a hint needed? Class type = wireType == WireType.JSON ? LocalDateTime.class : Object.class; assertEquals(expected, wire.getValueIn().object(type)); } finally { b.releaseLast(); } } @Test public void testZonedDateTime() { final Bytes b = Bytes.elasticByteBuffer(); final Wire wire = createWire(b); ZonedDateTime expected = ZonedDateTime.ofInstant(Instant.EPOCH, ZoneId.systemDefault()); wire.getValueOut().object(expected); // is a hint needed? Class type = wireType == WireType.JSON ? ZonedDateTime.class : Object.class; assertEquals(expected, wire.getValueIn().object(type)); b.releaseLast(); } @Test public void testSkipValueWithNumbersAndStrings() { final Bytes b = Bytes.elasticByteBuffer(); final Wire wire = createWire(b); wire.write("value1").text("text"); wire.write("number").int64(125); StringBuilder field; field = new StringBuilder(); wire.read(field).skipValue(); // System.out.println("read field=" + field.toString()); field = new StringBuilder(); wire.read(field).skipValue(); // System.out.println("read field=" + field.toString()); b.releaseLast(); } @Test public void testWriteNull() { final Bytes b = Bytes.elasticByteBuffer(); final Wire wire = createWire(b); wire.write().object(null); wire.write().object(null); wire.write().object(null); wire.write().object(null); @Nullable Object o = wire.read().object(Object.class); Assert.assertNull(o); @Nullable String s = wire.read().object(String.class); Assert.assertNull(s); @Nullable RetentionPolicy rp = wire.read().object(RetentionPolicy.class); Assert.assertNull(rp); @Nullable Circle c = wire.read().object(Circle.class); // this fails without the check. Assert.assertNull(c); b.releaseLast(); } @Test public void testClassTypedMarshallableObject() { assumeFalse(wireType == WireType.JSON); @NotNull TestClass testClass = new TestClass(Boolean.class); final Bytes b = Bytes.elasticByteBuffer(); final Wire wire = createWire(b); wire.write().typedMarshallable(testClass); @Nullable TestClass o = wire.read().typedMarshallable(); assertEquals(Boolean.class, o.clazz()); b.releaseLast(); } @Test public void unknownFieldsAreClearedBetweenReadContexts() { final Bytes b = Bytes.elasticByteBuffer(); final Wire wire = createWire(b); try (final DocumentContext documentContext = wire.writingDocument()) { documentContext.wire().write("first").text("firstValue"); } try (final DocumentContext documentContext = wire.writingDocument()) { documentContext.wire().write("second").text("secondValue"); } try (final DocumentContext documentContext = wire.readingDocument()) { assertNull(documentContext.wire().read("not_there").text()); } try (final DocumentContext documentContext = wire.readingDocument()) { assertNull(documentContext.wire().read("first").text()); } } @Test public void testReadingPeekYaml() { assumeTrue(usePadding); assumeTrue(wireType == WireType.BINARY); Bytes b = Bytes.elasticByteBuffer(); final Wire wire = createWire(b); assertEquals("", wire.readingPeekYaml()); try (@NotNull DocumentContext dc = wire.writingDocument(false)) { dc.wire().write("some-data!").marshallable(m -> { m.write("some-other-data").int64(0); assertEquals("", wire.readingPeekYaml()); }); } try (@NotNull DocumentContext dc = wire.writingDocument(false)) { dc.wire().write("some-new").marshallable(m -> { m.write("some-other--new-data").int64(0); assertEquals("", wire.readingPeekYaml()); }); } assertEquals("", wire.readingPeekYaml()); try (@NotNull DocumentContext dc = wire.readingDocument()) { assertEquals("" + "--- !!data #binary\n" + "some-data!: {\n" + " some-other-data: 0\n" + "}\n", wire.readingPeekYaml()); dc.wire().read("some-data"); assertEquals("" + "--- !!data #binary\n" + "some-data!: {\n" + " some-other-data: 0\n" + "}\n", wire.readingPeekYaml()); } assertEquals("", wire.readingPeekYaml()); try (@NotNull DocumentContext dc = wire.writingDocument(false)) { dc.wire().write("some-data!").marshallable(m -> { m.write("some-other-data").int64(0); assertEquals("", wire.readingPeekYaml()); }); } try (@NotNull DocumentContext dc = wire.readingDocument()) { int position = usePadding ? 40 : 37; assertEquals("" + "# position: " + position + ", header: 0\n" + "--- !!data #binary\n" + "some-new: {\n" + " some-other--new-data: 0\n" + "}\n", wire.readingPeekYaml()); dc.wire().read("some-data"); assertEquals("" + "# position: " + position + ", header: 0\n" + "--- !!data #binary\n" + "some-new: {\n" + " some-other--new-data: 0\n" + "}\n", wire.readingPeekYaml()); } b.releaseLast(); } @Test public void isPresentReturnsTrueWhenValueIsPresent() { Bytes b = Bytes.elasticByteBuffer(); final Wire wire = createWire(b); wire.write("value").int32(12345); assertTrue(wire.read("value").isPresent()); } @Test public void isPresentReturnsFalseWhenValueIsNotPresent() { Bytes b = Bytes.elasticByteBuffer(); final Wire wire = createWire(b); wire.write("value").int32(12345); assertFalse(wire.read("anotherValue").isPresent()); } private Wire createWire(Bytes b) { final Wire wire = wireType.apply(b); wire.usePadding(usePadding); return wire; } static class TestClass extends SelfDescribingMarshallable { Class o; TestClass(Class o) { this.o = o; } Class clazz() { return o; } } }
package org.apereo.cas.config; import org.apache.commons.lang3.StringUtils; import org.apereo.cas.CipherExecutor; import org.apereo.cas.authentication.PseudoPlatformTransactionManager; import org.apereo.cas.configuration.CasConfigurationProperties; import org.apereo.cas.configuration.model.core.ticket.TicketGrantingTicketProperties; import org.apereo.cas.configuration.model.core.ticket.registry.TicketRegistryProperties; import org.apereo.cas.configuration.model.core.util.EncryptionJwtSigningJwtCryptographyProperties; import org.apereo.cas.configuration.support.Beans; import org.apereo.cas.logout.LogoutManager; import org.apereo.cas.ticket.DefaultTicketCatalog; import org.apereo.cas.ticket.ExpirationPolicy; import org.apereo.cas.ticket.ServiceTicketFactory; import org.apereo.cas.ticket.TicketCatalog; import org.apereo.cas.ticket.TicketCatalogConfigurer; import org.apereo.cas.ticket.TicketFactory; import org.apereo.cas.ticket.TicketGrantingTicketFactory; import org.apereo.cas.ticket.UniqueTicketIdGenerator; import org.apereo.cas.ticket.factory.DefaultProxyGrantingTicketFactory; import org.apereo.cas.ticket.factory.DefaultProxyTicketFactory; import org.apereo.cas.ticket.factory.DefaultServiceTicketFactory; import org.apereo.cas.ticket.factory.DefaultTicketFactory; import org.apereo.cas.ticket.factory.DefaultTicketGrantingTicketFactory; import org.apereo.cas.ticket.proxy.ProxyGrantingTicketFactory; import org.apereo.cas.ticket.proxy.ProxyHandler; import org.apereo.cas.ticket.proxy.ProxyTicketFactory; import org.apereo.cas.ticket.proxy.support.Cas10ProxyHandler; import org.apereo.cas.ticket.proxy.support.Cas20ProxyHandler; import org.apereo.cas.ticket.registry.CachingTicketRegistry; import org.apereo.cas.ticket.registry.DefaultTicketRegistry; import org.apereo.cas.ticket.registry.DefaultTicketRegistrySupport; import org.apereo.cas.ticket.registry.NoOpLockingStrategy; import org.apereo.cas.ticket.registry.TicketRegistry; import org.apereo.cas.ticket.registry.TicketRegistrySupport; import org.apereo.cas.ticket.registry.support.LockingStrategy; import org.apereo.cas.ticket.support.AlwaysExpiresExpirationPolicy; import org.apereo.cas.ticket.support.HardTimeoutExpirationPolicy; import org.apereo.cas.ticket.support.MultiTimeUseOrTimeoutExpirationPolicy; import org.apereo.cas.ticket.support.NeverExpiresExpirationPolicy; import org.apereo.cas.ticket.support.RememberMeDelegatingExpirationPolicy; import org.apereo.cas.ticket.support.ThrottledUseAndTimeoutExpirationPolicy; import org.apereo.cas.ticket.support.TicketGrantingTicketExpirationPolicy; import org.apereo.cas.ticket.support.TimeoutExpirationPolicy; import org.apereo.cas.util.HostNameBasedUniqueTicketIdGenerator; import org.apereo.cas.util.cipher.NoOpCipherExecutor; import org.apereo.cas.util.cipher.ProtocolTicketCipherExecutor; import org.apereo.cas.util.http.HttpClient; import org.jasig.cas.client.ssl.HttpURLConnectionFactory; import org.jasig.cas.client.validation.AbstractUrlBasedTicketValidator; import org.jasig.cas.client.validation.Cas30ServiceTicketValidator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.cloud.context.config.annotation.RefreshScope; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Lazy; import org.springframework.scheduling.annotation.EnableAsync; import org.springframework.scheduling.annotation.EnableScheduling; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.annotation.EnableTransactionManagement; import org.springframework.transaction.annotation.TransactionManagementConfigurer; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import java.net.HttpURLConnection; import java.net.URLConnection; import java.util.List; import java.util.Map; /** * This is {@link CasCoreTicketsConfiguration}. * * @author Misagh Moayyed * @since 5.0.0 */ @Configuration("casCoreTicketsConfiguration") @EnableConfigurationProperties(CasConfigurationProperties.class) @EnableScheduling @EnableAsync @EnableTransactionManagement(proxyTargetClass = true) @AutoConfigureAfter(value = {CasCoreUtilConfiguration.class, CasCoreTicketIdGeneratorsConfiguration.class}) public class CasCoreTicketsConfiguration implements TransactionManagementConfigurer { private static final Logger LOGGER = LoggerFactory.getLogger(CasCoreTicketsConfiguration.class); @Autowired private ApplicationContext applicationContext; @Autowired private CasConfigurationProperties casProperties; @Lazy @Autowired @Qualifier("uniqueIdGeneratorsMap") private Map<String, UniqueTicketIdGenerator> uniqueIdGeneratorsMap; @Autowired @Qualifier("ticketRegistry") private TicketRegistry ticketRegistry; @Autowired @Qualifier("supportsTrustStoreSslSocketFactoryHttpClient") private HttpClient httpClient; @Autowired @Qualifier("hostnameVerifier") private HostnameVerifier hostnameVerifier; @Autowired @Qualifier("sslContext") private SSLContext sslContext; @ConditionalOnMissingBean(name = "casClientTicketValidator") @Bean public AbstractUrlBasedTicketValidator casClientTicketValidator() { final Cas30ServiceTicketValidator validator = new Cas30ServiceTicketValidator(casProperties.getServer().getPrefix()); final HttpURLConnectionFactory factory = new HttpURLConnectionFactory() { private static final long serialVersionUID = 3692658214483917813L; @Override public HttpURLConnection buildHttpURLConnection(final URLConnection conn) { if (conn instanceof HttpsURLConnection) { final HttpsURLConnection httpsConnection = (HttpsURLConnection) conn; httpsConnection.setSSLSocketFactory(sslContext.getSocketFactory()); httpsConnection.setHostnameVerifier(hostnameVerifier); } return (HttpURLConnection) conn; } }; validator.setURLConnectionFactory(factory); return validator; } @ConditionalOnMissingBean(name = "defaultProxyGrantingTicketFactory") @Bean public ProxyGrantingTicketFactory defaultProxyGrantingTicketFactory() { return new DefaultProxyGrantingTicketFactory( ticketGrantingTicketUniqueIdGenerator(), grantingTicketExpirationPolicy(), protocolTicketCipherExecutor()); } @ConditionalOnMissingBean(name = "defaultProxyTicketFactory") @RefreshScope @Bean @Lazy public ProxyTicketFactory defaultProxyTicketFactory() { final boolean onlyTrackMostRecentSession = casProperties.getTicket().getTgt().isOnlyTrackMostRecentSession(); return new DefaultProxyTicketFactory(proxyTicketExpirationPolicy(), uniqueIdGeneratorsMap, protocolTicketCipherExecutor(), onlyTrackMostRecentSession); } @ConditionalOnMissingBean(name = "ticketGrantingTicketUniqueIdGenerator") @Bean @RefreshScope public UniqueTicketIdGenerator ticketGrantingTicketUniqueIdGenerator() { return new HostNameBasedUniqueTicketIdGenerator.TicketGrantingTicketIdGenerator( casProperties.getTicket().getTgt().getMaxLength(), casProperties.getHost().getName()); } @ConditionalOnMissingBean(name = "proxy20TicketUniqueIdGenerator") @Bean public UniqueTicketIdGenerator proxy20TicketUniqueIdGenerator() { return new HostNameBasedUniqueTicketIdGenerator.ProxyTicketIdGenerator( casProperties.getTicket().getPgt().getMaxLength(), casProperties.getHost().getName()); } @ConditionalOnMissingBean(name = "defaultServiceTicketFactory") @Bean @Lazy public ServiceTicketFactory defaultServiceTicketFactory() { final boolean onlyTrackMostRecentSession = casProperties.getTicket().getTgt().isOnlyTrackMostRecentSession(); return new DefaultServiceTicketFactory(serviceTicketExpirationPolicy(), uniqueIdGeneratorsMap, onlyTrackMostRecentSession, protocolTicketCipherExecutor()); } @ConditionalOnMissingBean(name = "defaultTicketGrantingTicketFactory") @Bean public TicketGrantingTicketFactory defaultTicketGrantingTicketFactory() { return new DefaultTicketGrantingTicketFactory(ticketGrantingTicketUniqueIdGenerator(), grantingTicketExpirationPolicy(), protocolTicketCipherExecutor()); } @ConditionalOnMissingBean(name = "defaultTicketFactory") @Bean public TicketFactory defaultTicketFactory() { return new DefaultTicketFactory(defaultProxyGrantingTicketFactory(), defaultTicketGrantingTicketFactory(), defaultServiceTicketFactory(), defaultProxyTicketFactory()); } @ConditionalOnMissingBean(name = "proxy10Handler") @Bean public ProxyHandler proxy10Handler() { return new Cas10ProxyHandler(); } @ConditionalOnMissingBean(name = "proxy20Handler") @Bean public ProxyHandler proxy20Handler() { return new Cas20ProxyHandler(httpClient, proxy20TicketUniqueIdGenerator()); } @ConditionalOnMissingBean(name = "ticketRegistry") @Bean public TicketRegistry ticketRegistry() { LOGGER.warn("Runtime memory is used as the persistence storage for retrieving and managing tickets. " + "Tickets that are issued during runtime will be LOST upon container restarts. This MAY impact SSO functionality."); final TicketRegistryProperties.InMemory mem = casProperties.getTicket().getRegistry().getInMemory(); final CipherExecutor cipher = Beans.newTicketRegistryCipherExecutor(mem.getCrypto(), "inMemory"); if (mem.isCache()) { final LogoutManager logoutManager = applicationContext.getBean("logoutManager", LogoutManager.class); return new CachingTicketRegistry(cipher, logoutManager); } return new DefaultTicketRegistry(mem.getInitialCapacity(), mem.getLoadFactor(), mem.getConcurrency(), cipher); } @ConditionalOnMissingBean(name = "defaultTicketRegistrySupport") @Bean public TicketRegistrySupport defaultTicketRegistrySupport() { return new DefaultTicketRegistrySupport(ticketRegistry); } @ConditionalOnMissingBean(name = "grantingTicketExpirationPolicy") @Bean public ExpirationPolicy grantingTicketExpirationPolicy() { final TicketGrantingTicketProperties tgt = casProperties.getTicket().getTgt(); if (tgt.getRememberMe().isEnabled()) { return rememberMeExpirationPolicy(); } return ticketGrantingTicketExpirationPolicy(); } @Bean public ExpirationPolicy rememberMeExpirationPolicy() { final TicketGrantingTicketProperties tgt = casProperties.getTicket().getTgt(); final HardTimeoutExpirationPolicy rememberMePolicy = new HardTimeoutExpirationPolicy(tgt.getRememberMe().getTimeToKillInSeconds()); final RememberMeDelegatingExpirationPolicy p = new RememberMeDelegatingExpirationPolicy(rememberMePolicy); p.addPolicy(RememberMeDelegatingExpirationPolicy.PolicyTypes.REMEMBER_ME, rememberMePolicy); p.addPolicy(RememberMeDelegatingExpirationPolicy.PolicyTypes.DEFAULT, ticketGrantingTicketExpirationPolicy()); return p; } @ConditionalOnMissingBean(name = "serviceTicketExpirationPolicy") @Bean public ExpirationPolicy serviceTicketExpirationPolicy() { return new MultiTimeUseOrTimeoutExpirationPolicy.ServiceTicketExpirationPolicy( casProperties.getTicket().getSt().getNumberOfUses(), casProperties.getTicket().getSt().getTimeToKillInSeconds()); } @ConditionalOnMissingBean(name = "proxyTicketExpirationPolicy") @Bean public ExpirationPolicy proxyTicketExpirationPolicy() { return new MultiTimeUseOrTimeoutExpirationPolicy.ProxyTicketExpirationPolicy( casProperties.getTicket().getPt().getNumberOfUses(), casProperties.getTicket().getPt().getTimeToKillInSeconds()); } @ConditionalOnMissingBean(name = "lockingStrategy") @Bean public LockingStrategy lockingStrategy() { return new NoOpLockingStrategy(); } @ConditionalOnMissingBean(name = "ticketTransactionManager") @Bean public PlatformTransactionManager ticketTransactionManager() { return new PseudoPlatformTransactionManager(); } @RefreshScope @Bean @ConditionalOnMissingBean(name = "protocolTicketCipherExecutor") public CipherExecutor protocolTicketCipherExecutor() { final EncryptionJwtSigningJwtCryptographyProperties crypto = casProperties.getTicket().getCrypto(); if (crypto.isEnabled()) { return new ProtocolTicketCipherExecutor( crypto.getEncryption().getKey(), crypto.getSigning().getKey(), crypto.getAlg()); } LOGGER.debug("Protocol tickets generated by CAS are not signed/encrypted."); return NoOpCipherExecutor.getInstance(); } @ConditionalOnMissingBean(name = "ticketGrantingTicketExpirationPolicy") @Bean public ExpirationPolicy ticketGrantingTicketExpirationPolicy() { final TicketGrantingTicketProperties tgt = casProperties.getTicket().getTgt(); if (tgt.getMaxTimeToLiveInSeconds() <= 0 && tgt.getTimeToKillInSeconds() <= 0) { LOGGER.warn("Ticket-granting ticket expiration policy is set to NEVER expire tickets."); return new NeverExpiresExpirationPolicy(); } if (tgt.getTimeout().getMaxTimeToLiveInSeconds() > 0) { LOGGER.debug("Ticket-granting ticket expiration policy is based on a timeout of [{}] seconds", tgt.getTimeout().getMaxTimeToLiveInSeconds()); return new TimeoutExpirationPolicy(tgt.getTimeout().getMaxTimeToLiveInSeconds()); } if (tgt.getMaxTimeToLiveInSeconds() > 0 && tgt.getTimeToKillInSeconds() > 0) { LOGGER.debug("Ticket-granting ticket expiration policy is based on hard/idle timeouts of [{}]/[{}] seconds", tgt.getMaxTimeToLiveInSeconds(), tgt.getTimeToKillInSeconds()); return new TicketGrantingTicketExpirationPolicy(tgt.getMaxTimeToLiveInSeconds(), tgt.getTimeToKillInSeconds()); } if (tgt.getThrottledTimeout().getTimeInBetweenUsesInSeconds() > 0 && tgt.getThrottledTimeout().getTimeToKillInSeconds() > 0) { final ThrottledUseAndTimeoutExpirationPolicy p = new ThrottledUseAndTimeoutExpirationPolicy(); p.setTimeToKillInSeconds(tgt.getThrottledTimeout().getTimeToKillInSeconds()); p.setTimeInBetweenUsesInSeconds(tgt.getThrottledTimeout().getTimeInBetweenUsesInSeconds()); LOGGER.debug("Ticket-granting ticket expiration policy is based on throttled timeouts"); return p; } if (tgt.getHardTimeout().getTimeToKillInSeconds() > 0) { LOGGER.debug("Ticket-granting ticket expiration policy is based on a hard timeout of [{}] seconds", tgt.getHardTimeout().getTimeToKillInSeconds()); return new HardTimeoutExpirationPolicy(tgt.getHardTimeout().getTimeToKillInSeconds()); } LOGGER.warn("Ticket-granting ticket expiration policy is set to ALWAYS expire tickets."); return new AlwaysExpiresExpirationPolicy(); } @Override public PlatformTransactionManager annotationDrivenTransactionManager() { return ticketTransactionManager(); } @ConditionalOnMissingBean(name = "ticketCatalog") @Autowired @Bean public TicketCatalog ticketCatalog(final List<TicketCatalogConfigurer> configurers) { final DefaultTicketCatalog plan = new DefaultTicketCatalog(); configurers.forEach(c -> { final String name = StringUtils.removePattern(c.getClass().getSimpleName(), "\\$.+"); LOGGER.debug("Configuring ticket metadata registration plan [{}]", name); c.configureTicketCatalog(plan); }); return plan; } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2020_03_01; import com.microsoft.azure.arm.model.HasInner; import com.microsoft.azure.management.network.v2020_03_01.implementation.ExpressRouteCircuitConnectionInner; import com.microsoft.azure.arm.model.Indexable; import com.microsoft.azure.arm.model.Refreshable; import com.microsoft.azure.arm.model.Updatable; import com.microsoft.azure.arm.model.Appliable; import com.microsoft.azure.arm.model.Creatable; import com.microsoft.azure.arm.resources.models.HasManager; import com.microsoft.azure.management.network.v2020_03_01.implementation.NetworkManager; import com.microsoft.azure.SubResource; /** * Type representing ExpressRouteCircuitConnection. */ public interface ExpressRouteCircuitConnection extends HasInner<ExpressRouteCircuitConnectionInner>, Indexable, Refreshable<ExpressRouteCircuitConnection>, Updatable<ExpressRouteCircuitConnection.Update>, HasManager<NetworkManager> { /** * @return the addressPrefix value. */ String addressPrefix(); /** * @return the authorizationKey value. */ String authorizationKey(); /** * @return the circuitConnectionStatus value. */ CircuitConnectionStatus circuitConnectionStatus(); /** * @return the etag value. */ String etag(); /** * @return the expressRouteCircuitPeering value. */ SubResource expressRouteCircuitPeering(); /** * @return the id value. */ String id(); /** * @return the ipv6CircuitConnectionConfig value. */ Ipv6CircuitConnectionConfig ipv6CircuitConnectionConfig(); /** * @return the name value. */ String name(); /** * @return the peerExpressRouteCircuitPeering value. */ SubResource peerExpressRouteCircuitPeering(); /** * @return the provisioningState value. */ ProvisioningState provisioningState(); /** * @return the type value. */ String type(); /** * The entirety of the ExpressRouteCircuitConnection definition. */ interface Definition extends DefinitionStages.Blank, DefinitionStages.WithPeering, DefinitionStages.WithCreate { } /** * Grouping of ExpressRouteCircuitConnection definition stages. */ interface DefinitionStages { /** * The first stage of a ExpressRouteCircuitConnection definition. */ interface Blank extends WithPeering { } /** * The stage of the expressroutecircuitconnection definition allowing to specify Peering. */ interface WithPeering { /** * Specifies resourceGroupName, circuitName, peeringName. * @param resourceGroupName The name of the resource group * @param circuitName The name of the express route circuit * @param peeringName The name of the peering * @return the next definition stage */ WithCreate withExistingPeering(String resourceGroupName, String circuitName, String peeringName); } /** * The stage of the expressroutecircuitconnection definition allowing to specify AddressPrefix. */ interface WithAddressPrefix { /** * Specifies addressPrefix. * @param addressPrefix /29 IP address space to carve out Customer addresses for tunnels * @return the next definition stage */ WithCreate withAddressPrefix(String addressPrefix); } /** * The stage of the expressroutecircuitconnection definition allowing to specify AuthorizationKey. */ interface WithAuthorizationKey { /** * Specifies authorizationKey. * @param authorizationKey The authorization key * @return the next definition stage */ WithCreate withAuthorizationKey(String authorizationKey); } /** * The stage of the expressroutecircuitconnection definition allowing to specify CircuitConnectionStatus. */ interface WithCircuitConnectionStatus { /** * Specifies circuitConnectionStatus. * @param circuitConnectionStatus Express Route Circuit connection state. Possible values include: 'Connected', 'Connecting', 'Disconnected' * @return the next definition stage */ WithCreate withCircuitConnectionStatus(CircuitConnectionStatus circuitConnectionStatus); } /** * The stage of the expressroutecircuitconnection definition allowing to specify ExpressRouteCircuitPeering. */ interface WithExpressRouteCircuitPeering { /** * Specifies expressRouteCircuitPeering. * @param expressRouteCircuitPeering Reference to Express Route Circuit Private Peering Resource of the circuit initiating connection * @return the next definition stage */ WithCreate withExpressRouteCircuitPeering(SubResource expressRouteCircuitPeering); } /** * The stage of the expressroutecircuitconnection definition allowing to specify Id. */ interface WithId { /** * Specifies id. * @param id Resource ID * @return the next definition stage */ WithCreate withId(String id); } /** * The stage of the expressroutecircuitconnection definition allowing to specify Ipv6CircuitConnectionConfig. */ interface WithIpv6CircuitConnectionConfig { /** * Specifies ipv6CircuitConnectionConfig. * @param ipv6CircuitConnectionConfig IPv6 Address PrefixProperties of the express route circuit connection * @return the next definition stage */ WithCreate withIpv6CircuitConnectionConfig(Ipv6CircuitConnectionConfig ipv6CircuitConnectionConfig); } /** * The stage of the expressroutecircuitconnection definition allowing to specify Name. */ interface WithName { /** * Specifies name. * @param name The name of the resource that is unique within a resource group. This name can be used to access the resource * @return the next definition stage */ WithCreate withName(String name); } /** * The stage of the expressroutecircuitconnection definition allowing to specify PeerExpressRouteCircuitPeering. */ interface WithPeerExpressRouteCircuitPeering { /** * Specifies peerExpressRouteCircuitPeering. * @param peerExpressRouteCircuitPeering Reference to Express Route Circuit Private Peering Resource of the peered circuit * @return the next definition stage */ WithCreate withPeerExpressRouteCircuitPeering(SubResource peerExpressRouteCircuitPeering); } /** * The stage of the definition which contains all the minimum required inputs for * the resource to be created (via {@link WithCreate#create()}), but also allows * for any other optional settings to be specified. */ interface WithCreate extends Creatable<ExpressRouteCircuitConnection>, DefinitionStages.WithAddressPrefix, DefinitionStages.WithAuthorizationKey, DefinitionStages.WithCircuitConnectionStatus, DefinitionStages.WithExpressRouteCircuitPeering, DefinitionStages.WithId, DefinitionStages.WithIpv6CircuitConnectionConfig, DefinitionStages.WithName, DefinitionStages.WithPeerExpressRouteCircuitPeering { } } /** * The template for a ExpressRouteCircuitConnection update operation, containing all the settings that can be modified. */ interface Update extends Appliable<ExpressRouteCircuitConnection>, UpdateStages.WithAddressPrefix, UpdateStages.WithAuthorizationKey, UpdateStages.WithCircuitConnectionStatus, UpdateStages.WithExpressRouteCircuitPeering, UpdateStages.WithId, UpdateStages.WithIpv6CircuitConnectionConfig, UpdateStages.WithName, UpdateStages.WithPeerExpressRouteCircuitPeering { } /** * Grouping of ExpressRouteCircuitConnection update stages. */ interface UpdateStages { /** * The stage of the expressroutecircuitconnection update allowing to specify AddressPrefix. */ interface WithAddressPrefix { /** * Specifies addressPrefix. * @param addressPrefix /29 IP address space to carve out Customer addresses for tunnels * @return the next update stage */ Update withAddressPrefix(String addressPrefix); } /** * The stage of the expressroutecircuitconnection update allowing to specify AuthorizationKey. */ interface WithAuthorizationKey { /** * Specifies authorizationKey. * @param authorizationKey The authorization key * @return the next update stage */ Update withAuthorizationKey(String authorizationKey); } /** * The stage of the expressroutecircuitconnection update allowing to specify CircuitConnectionStatus. */ interface WithCircuitConnectionStatus { /** * Specifies circuitConnectionStatus. * @param circuitConnectionStatus Express Route Circuit connection state. Possible values include: 'Connected', 'Connecting', 'Disconnected' * @return the next update stage */ Update withCircuitConnectionStatus(CircuitConnectionStatus circuitConnectionStatus); } /** * The stage of the expressroutecircuitconnection update allowing to specify ExpressRouteCircuitPeering. */ interface WithExpressRouteCircuitPeering { /** * Specifies expressRouteCircuitPeering. * @param expressRouteCircuitPeering Reference to Express Route Circuit Private Peering Resource of the circuit initiating connection * @return the next update stage */ Update withExpressRouteCircuitPeering(SubResource expressRouteCircuitPeering); } /** * The stage of the expressroutecircuitconnection update allowing to specify Id. */ interface WithId { /** * Specifies id. * @param id Resource ID * @return the next update stage */ Update withId(String id); } /** * The stage of the expressroutecircuitconnection update allowing to specify Ipv6CircuitConnectionConfig. */ interface WithIpv6CircuitConnectionConfig { /** * Specifies ipv6CircuitConnectionConfig. * @param ipv6CircuitConnectionConfig IPv6 Address PrefixProperties of the express route circuit connection * @return the next update stage */ Update withIpv6CircuitConnectionConfig(Ipv6CircuitConnectionConfig ipv6CircuitConnectionConfig); } /** * The stage of the expressroutecircuitconnection update allowing to specify Name. */ interface WithName { /** * Specifies name. * @param name The name of the resource that is unique within a resource group. This name can be used to access the resource * @return the next update stage */ Update withName(String name); } /** * The stage of the expressroutecircuitconnection update allowing to specify PeerExpressRouteCircuitPeering. */ interface WithPeerExpressRouteCircuitPeering { /** * Specifies peerExpressRouteCircuitPeering. * @param peerExpressRouteCircuitPeering Reference to Express Route Circuit Private Peering Resource of the peered circuit * @return the next update stage */ Update withPeerExpressRouteCircuitPeering(SubResource peerExpressRouteCircuitPeering); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.sjms; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.WeakHashMap; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.stream.Collectors; import javax.jms.Connection; import javax.jms.ExceptionListener; import javax.jms.JMSException; import javax.jms.MessageConsumer; import javax.jms.MessageListener; import javax.jms.Session; import org.apache.camel.Endpoint; import org.apache.camel.ExchangePattern; import org.apache.camel.Processor; import org.apache.camel.component.sjms.consumer.AbstractMessageHandler; import org.apache.camel.component.sjms.consumer.InOnlyMessageHandler; import org.apache.camel.component.sjms.consumer.InOutMessageHandler; import org.apache.camel.component.sjms.jms.ConnectionResource; import org.apache.camel.component.sjms.taskmanager.TimedTaskManager; import org.apache.camel.component.sjms.tx.BatchTransactionCommitStrategy; import org.apache.camel.component.sjms.tx.DefaultTransactionCommitStrategy; import org.apache.camel.component.sjms.tx.SessionBatchTransactionSynchronization; import org.apache.camel.component.sjms.tx.SessionTransactionSynchronization; import org.apache.camel.spi.Synchronization; import org.apache.camel.support.DefaultConsumer; import org.apache.camel.util.backoff.BackOff; import org.apache.camel.util.backoff.BackOffTimer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The SjmsConsumer is the base class for the SJMS MessageListener pool. */ public class SjmsConsumer extends DefaultConsumer { private static final Logger LOG = LoggerFactory.getLogger(SjmsConsumer.class); private final Map<Connection, List<MessageConsumerResources>> consumers = new WeakHashMap<>(); private ScheduledExecutorService scheduler; private Future<?> asyncStart; private BackOffTimer.Task rescheduleTask; public SjmsConsumer(Endpoint endpoint, Processor processor) { super(endpoint, processor); } @Override public SjmsEndpoint getEndpoint() { return (SjmsEndpoint) super.getEndpoint(); } @Override protected void doStart() throws Exception { super.doStart(); this.scheduler = getEndpoint().getCamelContext().getExecutorServiceManager().newDefaultScheduledThreadPool(this, "SjmsConsumer"); if (getEndpoint().isAsyncStartListener()) { asyncStart = getEndpoint().getComponent().getAsyncStartStopExecutorService().submit(new Runnable() { @Override public void run() { try { fillConsumersPool(); } catch (Throwable e) { LOG.warn("Error starting listener container on destination: " + getDestinationName() + ". This exception will be ignored.", e); if (getEndpoint().isReconnectOnError()) { scheduleRefill(); //we should try to fill consumer pool on next time } } } @Override public String toString() { return "AsyncStartListenerTask[" + getDestinationName() + "]"; } }); } else { fillConsumersPool(); } } private void fillConsumersPool() throws Exception { synchronized (consumers) { while (consumers.values().stream().collect(Collectors.summarizingInt(List::size)).getSum() < getConsumerCount()) { addConsumer(); } } } public void destroyObject(MessageConsumerResources model) { try { if (model.getMessageConsumer() != null) { model.getMessageConsumer().close(); } // If the resource has a if (model.getSession() != null) { if (model.getSession().getTransacted()) { try { model.getSession().rollback(); } catch (Exception e) { // Do nothing. Just make sure we are cleaned up } } model.getSession().close(); } } catch (JMSException ex) { LOG.warn("Exception caught on closing consumer", ex); } } @Override protected void doStop() throws Exception { super.doStop(); if (asyncStart != null && !asyncStart.isDone()) { asyncStart.cancel(true); } if (rescheduleTask != null) { rescheduleTask.cancel(); } if (getEndpoint().isAsyncStopListener()) { getEndpoint().getComponent().getAsyncStartStopExecutorService().submit(new Runnable() { @Override public void run() { try { synchronized (consumers) { consumers.values().stream().flatMap(Collection::stream).forEach(SjmsConsumer.this::destroyObject); consumers.clear(); } } catch (Throwable e) { LOG.warn("Error stopping listener container on destination: " + getDestinationName() + ". This exception will be ignored.", e); } } @Override public String toString() { return "AsyncStopListenerTask[" + getDestinationName() + "]"; } }); } else { synchronized (consumers) { consumers.values().stream().flatMap(Collection::stream).forEach(SjmsConsumer.this::destroyObject); consumers.clear(); } } if (this.scheduler != null) { getEndpoint().getCamelContext().getExecutorServiceManager().shutdownGraceful(this.scheduler); } } /** * Creates a {@link MessageConsumerResources} with a dedicated {@link Session} required for transacted and InOut * consumers. */ private void addConsumer() throws Exception { MessageConsumerResources answer; ConnectionResource connectionResource = getOrCreateConnectionResource(); Connection conn = connectionResource.borrowConnection(); try { Session session = conn.createSession(isTransacted(), isTransacted() ? Session.SESSION_TRANSACTED : Session.AUTO_ACKNOWLEDGE); MessageConsumer messageConsumer = getEndpoint().getJmsObjectFactory().createMessageConsumer(session, getEndpoint()); MessageListener handler = createMessageHandler(session); messageConsumer.setMessageListener(handler); if (getEndpoint().isReconnectOnError()) { ExceptionListener exceptionListener = conn.getExceptionListener(); ReconnectExceptionListener reconnectExceptionListener = new ReconnectExceptionListener(conn); if (exceptionListener == null) { exceptionListener = reconnectExceptionListener; } else { exceptionListener = new AggregatedExceptionListener(exceptionListener, reconnectExceptionListener); } conn.setExceptionListener(exceptionListener); } answer = new MessageConsumerResources(session, messageConsumer); consumers.compute(conn, (key, oldValue) -> { if (oldValue == null) { oldValue = new ArrayList<>(); } oldValue.add(answer); return oldValue; }); } catch (Exception e) { LOG.error("Unable to create the MessageConsumer", e); throw e; } finally { connectionResource.returnConnection(conn); } } /** * Helper factory method used to create a MessageListener based on the MEP * * @param session a session is only required if we are a transacted consumer * @return the listener */ protected MessageListener createMessageHandler(Session session) { TransactionCommitStrategy commitStrategy; if (getTransactionCommitStrategy() != null) { commitStrategy = getTransactionCommitStrategy(); } else if (getTransactionBatchCount() > 0) { commitStrategy = new BatchTransactionCommitStrategy(getTransactionBatchCount()); } else { commitStrategy = new DefaultTransactionCommitStrategy(); } Synchronization synchronization; if (commitStrategy instanceof BatchTransactionCommitStrategy) { TimedTaskManager timedTaskManager = getEndpoint().getComponent().getTimedTaskManager(); synchronization = new SessionBatchTransactionSynchronization( timedTaskManager, session, commitStrategy, getTransactionBatchTimeout()); } else { synchronization = new SessionTransactionSynchronization(session, commitStrategy); } AbstractMessageHandler messageHandler; if (getEndpoint().getExchangePattern().equals(ExchangePattern.InOnly)) { if (isTransacted() || isSynchronous()) { messageHandler = new InOnlyMessageHandler(getEndpoint(), scheduler, synchronization); } else { messageHandler = new InOnlyMessageHandler(getEndpoint(), scheduler); } } else { if (isTransacted() || isSynchronous()) { messageHandler = new InOutMessageHandler(getEndpoint(), scheduler, synchronization); } else { messageHandler = new InOutMessageHandler(getEndpoint(), scheduler); } } messageHandler.setSession(session); messageHandler.setProcessor(getAsyncProcessor()); messageHandler.setSynchronous(isSynchronous()); messageHandler.setTransacted(isTransacted()); messageHandler.setSharedJMSSession(isSharedJMSSession()); messageHandler.setTopic(isTopic()); return messageHandler; } /** * @deprecated use {@link #getOrCreateConnectionResource()} */ @Deprecated protected ConnectionResource getConnectionResource() { return getEndpoint().getConnectionResource(); } protected ConnectionResource getOrCreateConnectionResource() { ConnectionResource answer = getEndpoint().getConnectionResource(); if (answer == null) { answer = getEndpoint().createConnectionResource(this); } return answer; } public int getAcknowledgementMode() { return getEndpoint().getAcknowledgementMode().intValue(); } /** * Use to determine if transactions are enabled or disabled. * * @return true if transacted, otherwise false */ public boolean isTransacted() { return getEndpoint().isTransacted(); } /** * Use to determine if JMS session should be propagated to share with other SJMS endpoints. * * @return true if shared, otherwise false */ public boolean isSharedJMSSession() { return getEndpoint().isSharedJMSSession(); } /** * Use to determine whether or not to process exchanges synchronously. * * @return true if synchronous */ public boolean isSynchronous() { return getEndpoint().isSynchronous(); } /** * The destination name for this consumer. * * @return String */ public String getDestinationName() { return getEndpoint().getDestinationName(); } /** * Returns the number of consumer listeners. * * @return the consumerCount */ public int getConsumerCount() { return getEndpoint().getConsumerCount(); } /** * Flag set by the endpoint used by consumers and producers to determine if the consumer is a JMS Topic. * * @return the topic true if consumer is a JMS Topic, default is false */ public boolean isTopic() { return getEndpoint().isTopic(); } /** * Gets the JMS Message selector syntax. */ public String getMessageSelector() { return getEndpoint().getMessageSelector(); } /** * Gets the durable subscription Id. * * @return the durableSubscriptionId */ public String getDurableSubscriptionId() { return getEndpoint().getDurableSubscriptionId(); } /** * Gets the commit strategy. * * @return the transactionCommitStrategy */ public TransactionCommitStrategy getTransactionCommitStrategy() { return getEndpoint().getTransactionCommitStrategy(); } /** * If transacted, returns the nubmer of messages to be processed before committing the transaction. * * @return the transactionBatchCount */ public int getTransactionBatchCount() { return getEndpoint().getTransactionBatchCount(); } /** * Returns the timeout value for batch transactions. * * @return long */ public long getTransactionBatchTimeout() { return getEndpoint().getTransactionBatchTimeout(); } private boolean refillPool(BackOffTimer.Task task) { LOG.debug("Refill consumers pool task running"); try { fillConsumersPool(); LOG.info("Refill consumers pool completed (attempt: {})", task.getCurrentAttempts()); return false; } catch (Exception ex) { LOG.warn( "Refill consumers pool failed (attempt: {}) due to: {}. Will try again in {} millis. (stacktrace in DEBUG level)", task.getCurrentAttempts(), ex.getMessage(), task.getCurrentDelay()); if (LOG.isDebugEnabled()) { LOG.debug("Refill consumers pool failed", ex); } } return true; } private void scheduleRefill() { if (rescheduleTask == null || rescheduleTask.getStatus() != BackOffTimer.Task.Status.Active) { BackOff backOff = BackOff.builder().delay(getEndpoint().getReconnectBackOff()).build(); rescheduleTask = new BackOffTimer(scheduler).schedule(backOff, this::refillPool); } } private final class ReconnectExceptionListener implements ExceptionListener { private final WeakReference<Connection> connection; private ReconnectExceptionListener(Connection connection) { this.connection = new WeakReference<>(connection); } @Override public void onException(JMSException exception) { LOG.debug("Handling JMSException for reconnecting", exception); Connection currentConnection = connection.get(); if (currentConnection != null) { synchronized (consumers) { List<MessageConsumerResources> toClose = consumers.get(currentConnection); if (toClose != null) { toClose.forEach(SjmsConsumer.this::destroyObject); } consumers.remove(currentConnection); } scheduleRefill(); } } //hash and equals to prevent multiple instances for same connection @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ReconnectExceptionListener that = (ReconnectExceptionListener) o; return Objects.equals(connection.get(), that.connection.get()); } @Override public int hashCode() { final Connection currentConnection = this.connection.get(); return currentConnection == null ? 0 : currentConnection.hashCode(); } } }
/* * * Copyright 2017 Crown Copyright * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package stroom.security.identity.account; import stroom.util.shared.HasIntegerId; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; @JsonInclude(JsonInclude.Include.NON_NULL) public class Account implements HasIntegerId { @JsonProperty private Integer id; @JsonProperty private Integer version; @JsonProperty private Long createTimeMs; @JsonProperty private Long updateTimeMs; @JsonProperty private String createUser; @JsonProperty private String updateUser; @JsonProperty private String userId; @JsonProperty private String email; @JsonProperty private String firstName; @JsonProperty private String lastName; @JsonProperty private String comments; @JsonProperty private int loginCount; @JsonProperty private int loginFailures; @JsonProperty private Long lastLoginMs; @JsonProperty private Long reactivatedMs; @JsonProperty private boolean forcePasswordChange; @JsonProperty private boolean neverExpires; @JsonProperty private boolean enabled; @JsonProperty private boolean inactive; @JsonProperty private boolean locked; @JsonProperty private boolean processingAccount; public Account() { } @JsonCreator public Account(@JsonProperty("id") final Integer id, @JsonProperty("version") final Integer version, @JsonProperty("createTimeMs") final Long createTimeMs, @JsonProperty("updateTimeMs") final Long updateTimeMs, @JsonProperty("createUser") final String createUser, @JsonProperty("updateUser") final String updateUser, @JsonProperty("userId") final String userId, @JsonProperty("email") final String email, @JsonProperty("firstName") final String firstName, @JsonProperty("lastName") final String lastName, @JsonProperty("comments") final String comments, @JsonProperty("loginCount") final int loginCount, @JsonProperty("loginFailures") final int loginFailures, @JsonProperty("lastLoginMs") final Long lastLoginMs, @JsonProperty("reactivatedMs") final Long reactivatedMs, @JsonProperty("forcePasswordChange") final boolean forcePasswordChange, @JsonProperty("neverExpires") final boolean neverExpires, @JsonProperty("enabled") final boolean enabled, @JsonProperty("inactive") final boolean inactive, @JsonProperty("locked") final boolean locked, @JsonProperty("processingAccount") final boolean processingAccount) { this.id = id; this.version = version; this.createTimeMs = createTimeMs; this.updateTimeMs = updateTimeMs; this.createUser = createUser; this.updateUser = updateUser; this.userId = userId; this.email = email; this.firstName = firstName; this.lastName = lastName; this.comments = comments; this.loginCount = loginCount; this.loginFailures = loginFailures; this.lastLoginMs = lastLoginMs; this.reactivatedMs = reactivatedMs; this.forcePasswordChange = forcePasswordChange; this.neverExpires = neverExpires; this.enabled = enabled; this.inactive = inactive; this.locked = locked; this.processingAccount = processingAccount; } @Override public Integer getId() { return id; } public void setId(final Integer id) { this.id = id; } public Integer getVersion() { return version; } public void setVersion(final Integer version) { this.version = version; } public Long getCreateTimeMs() { return createTimeMs; } public void setCreateTimeMs(final Long createTimeMs) { this.createTimeMs = createTimeMs; } public Long getUpdateTimeMs() { return updateTimeMs; } public void setUpdateTimeMs(final Long updateTimeMs) { this.updateTimeMs = updateTimeMs; } public String getCreateUser() { return createUser; } public void setCreateUser(final String createUser) { this.createUser = createUser; } public String getUpdateUser() { return updateUser; } public void setUpdateUser(final String updateUser) { this.updateUser = updateUser; } public String getUserId() { return userId; } public void setUserId(final String userId) { this.userId = userId; } public String getEmail() { return email; } public void setEmail(final String email) { this.email = email; } public String getFirstName() { return firstName; } public void setFirstName(final String firstName) { this.firstName = firstName; } public String getLastName() { return lastName; } public void setLastName(final String lastName) { this.lastName = lastName; } public String getComments() { return comments; } public void setComments(final String comments) { this.comments = comments; } public int getLoginCount() { return loginCount; } public void setLoginCount(final int loginCount) { this.loginCount = loginCount; } public int getLoginFailures() { return loginFailures; } public void setLoginFailures(final int loginFailures) { this.loginFailures = loginFailures; } public Long getLastLoginMs() { return lastLoginMs; } public void setLastLoginMs(final Long lastLoginMs) { this.lastLoginMs = lastLoginMs; } public Long getReactivatedMs() { return reactivatedMs; } public void setReactivatedMs(final Long reactivatedMs) { this.reactivatedMs = reactivatedMs; } public boolean isForcePasswordChange() { return forcePasswordChange; } public void setForcePasswordChange(final boolean forcePasswordChange) { this.forcePasswordChange = forcePasswordChange; } public boolean isNeverExpires() { return neverExpires; } public void setNeverExpires(final boolean neverExpires) { this.neverExpires = neverExpires; } public boolean isEnabled() { return enabled; } public void setEnabled(final boolean enabled) { this.enabled = enabled; } public boolean isInactive() { return inactive; } public void setInactive(final boolean inactive) { this.inactive = inactive; } public boolean isLocked() { return locked; } public void setLocked(final boolean locked) { this.locked = locked; } public boolean isProcessingAccount() { return processingAccount; } public void setProcessingAccount(final boolean processingAccount) { this.processingAccount = processingAccount; } @JsonIgnore public String getStatus() { if (locked) { return "Locked"; } else if (inactive) { return "Inactive"; } else if (enabled) { return "Enabled"; } else { return "Disabled"; } } @Override public String toString() { return "Account{" + "id=" + id + ", version=" + version + ", createTimeMs=" + createTimeMs + ", updateTimeMs=" + updateTimeMs + ", createUser='" + createUser + '\'' + ", updateUser='" + updateUser + '\'' + ", userId='" + userId + '\'' + ", email='" + email + '\'' + ", firstName='" + firstName + '\'' + ", lastName='" + lastName + '\'' + ", comments='" + comments + '\'' + ", loginCount=" + loginCount + ", loginFailures=" + loginFailures + ", lastLoginMs=" + lastLoginMs + ", reactivatedMs=" + reactivatedMs + ", forcePasswordChange=" + forcePasswordChange + ", neverExpires=" + neverExpires + ", enabled=" + enabled + ", inactive=" + inactive + ", locked=" + locked + ", processingAccount=" + processingAccount + '}'; } }
/* * Copyright 2015 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.integrationtests; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.List; import org.drools.core.command.runtime.rule.FireAllRulesCommand; import org.drools.core.definitions.rule.impl.RuleImpl; import org.drools.core.event.DebugProcessEventListener; import org.jbpm.test.util.AbstractBaseTest; import org.junit.Test; import org.kie.api.command.Command; import org.kie.api.definition.type.FactType; import org.kie.api.event.rule.DebugAgendaEventListener; import org.kie.api.io.ResourceType; import org.kie.api.runtime.KieSession; import org.kie.api.runtime.rule.AgendaFilter; import org.kie.api.runtime.rule.Match; import org.kie.internal.KnowledgeBase; import org.kie.internal.KnowledgeBaseFactory; import org.kie.internal.builder.KnowledgeBuilder; import org.kie.internal.builder.KnowledgeBuilderFactory; import org.kie.internal.command.CommandFactory; import org.kie.internal.definition.KnowledgePackage; import org.kie.internal.io.ResourceFactory; import org.kie.internal.runtime.StatefulKnowledgeSession; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class AgendaFilterTest extends AbstractBaseTest { private static final Logger logger = LoggerFactory.getLogger(AgendaFilterTest.class); @Test public void testAgendaFilter() { // JBRULES-3374 String drl = "package org.drools\n" + " \n" + "import org.jbpm.integrationtests.AgendaFilterTest.Message\n" + " \n" + "rule \"R1\"\n" + "ruleflow-group \"node1\"\n" + "no-loop \n" + "salience 3\n" + " when\n" + " Message( status == Message.GOODBYE, myMessage : message )\n" + " then\n" + " System.out.println( \"R1\" );\n" + "end\n" + "\n" + "rule \"R2\"\n" + "ruleflow-group \"node1\"\n" + "no-loop \n" + "salience 2\n" + " when\n" + " m : Message( status == Message.HELLO, myMessage : message )\n" + " then\n" + " System.out.println( \"R2\" );\n" + " m.setMessage( \"Goodbye cruel world\" );\n" + " m.setStatus( Message.GOODBYE );\n" + " update( m );\n" + "end\n" + "\n" + "rule \"R3\"\n" + "ruleflow-group \"node2\"\n" + "no-loop \n" + " when\n" + " m: Message( status == Message.GOODBYE, myMessage : message )\n" + " then\n" + " System.out.println( \"R3\" );\n" + " m.setStatus(5);\n" + " update (m);\n" + " \n" + "end"; String rf = "<?xml version=\"1.0\" encoding=\"UTF-8\"?> \n" + "<process xmlns=\"http://drools.org/drools-5.0/process\"\n" + " xmlns:xs=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + " xs:schemaLocation=\"http://drools.org/drools-5.0/process drools-processes-5.0.xsd\"\n" + " type=\"RuleFlow\" name=\"flow\" id=\"process-test\" package-name=\"com.sample\" >\n" + "\n" + " <header>\n" + " </header>\n" + "\n" + " <nodes>\n" + " <start id=\"1\" name=\"Start\" x=\"122\" y=\"96\" width=\"48\" height=\"48\" />\n" + " <ruleSet id=\"2\" name=\"Node1\" x=\"277\" y=\"96\" width=\"80\" height=\"48\" ruleFlowGroup=\"node1\" />\n" + " <ruleSet id=\"3\" name=\"Node2\" x=\"433\" y=\"98\" width=\"80\" height=\"48\" ruleFlowGroup=\"node2\" />\n" + " <end id=\"4\" name=\"End\" x=\"645\" y=\"96\" width=\"48\" height=\"48\" />\n" + " </nodes>\n" + "\n" + " <connections>\n" + " <connection from=\"1\" to=\"2\" />\n" + " <connection from=\"2\" to=\"3\" />\n" + " <connection from=\"3\" to=\"4\" />\n" + " </connections>\n" + "\n" + "</process>"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource(drl.getBytes()), ResourceType.DRL ); kbuilder.add( ResourceFactory.newByteArrayResource(rf.getBytes()), ResourceType.DRF ); if ( kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } KieSession ksession = createKieSession(kbuilder.getKnowledgePackages().toArray(new KnowledgePackage[0])); // go ! Message message = new Message(); message.setMessage("Hello World"); message.setStatus(Message.HELLO); ksession.insert(message); ksession.startProcess("process-test"); SalienceFilter filter = new SalienceFilter(); int fired = ksession.fireAllRules(filter); assertEquals(2, fired); } public static class Message { public static final int HELLO = 0; public static final int GOODBYE = 1; private String message; private int status; public String getMessage() { return this.message; } public void setMessage(String message) { this.message = message; } public int getStatus() { return this.status; } public void setStatus(int status) { this.status = status; } } public static class SalienceFilter implements AgendaFilter { private Integer currentSalience = null; public boolean accept(Match activation) { RuleImpl rule = (RuleImpl)activation.getRule(); if (currentSalience == null){ currentSalience = rule.getSalience() != null ? Integer.valueOf(rule.getSalience().toString()) : 0; } boolean nocancel = currentSalience >= Integer.valueOf(rule.getSalience().toString()); if(!nocancel){ logger.info("cancelling -> {}", rule.getName()); } return nocancel; } } @Test public void testActivationCancelled() { // JBRULES-3376 String drl = "package org.jboss.qa.brms.agendafilter\n" + "declare CancelFact\n" + " cancel : boolean = true\n" + "end\n" + "rule NoCancel\n" + " ruleflow-group \"rfg\"\n" + " when\n" + " $fact : CancelFact ( cancel == false )\n" + " then\n" + " System.out.println(\"No cancel...\");\n" + " modify ($fact) {\n" + " setCancel(true);\n" + " }\n" + "end\n" + "rule PresenceOfBothFacts\n" + " ruleflow-group \"rfg\"\n" + " salience -1\n" + " when\n" + " $fact1 : CancelFact( cancel == false )\n" + " $fact2 : CancelFact( cancel == true )\n" + " then\n" + " System.out.println(\"Both facts!\");\n" + "end\n" + "rule PresenceOfFact\n" + " ruleflow-group \"rfg\"\n" + " when\n" + " $fact : CancelFact( )\n" + " then\n" + " System.out.println(\"We have a \" + ($fact.isCancel() ? \"\" : \"non-\") + \"cancelling fact!\");\n" + "end\n" + "rule Cancel\n" + " ruleflow-group \"rfg\"\n" + " when\n" + " $fact : CancelFact ( cancel == true )\n" + " then\n" + " System.out.println(\"Cancel!\");\n" + "end"; String rf = "<?xml version=\"1.0\" encoding=\"UTF-8\"?> \n" + "<process xmlns=\"http://drools.org/drools-5.0/process\"\n" + " xmlns:xs=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + " xs:schemaLocation=\"http://drools.org/drools-5.0/process drools-processes-5.0.xsd\"\n" + " type=\"RuleFlow\" name=\"flow\" id=\"bz761715\" package-name=\"org.jboss.qa.brms.agendafilter\" >\n" + " <header>\n" + " </header>\n" + " <nodes>\n" + " <start id=\"1\" name=\"Start\" x=\"16\" y=\"16\" width=\"48\" height=\"48\" />\n" + " <ruleSet id=\"2\" name=\"Rule\" x=\"208\" y=\"16\" width=\"80\" height=\"48\" ruleFlowGroup=\"rfg\" />\n" + " <actionNode id=\"3\" name=\"Script\" x=\"320\" y=\"16\" width=\"80\" height=\"48\" >\n" + " <action type=\"expression\" dialect=\"java\" >System.out.println(\"Finishing process...\");</action>\n" + " </actionNode>\n" + " <end id=\"4\" name=\"End\" x=\"432\" y=\"16\" width=\"48\" height=\"48\" />\n" + " <actionNode id=\"5\" name=\"Script\" x=\"96\" y=\"16\" width=\"80\" height=\"48\" >\n" + " <action type=\"expression\" dialect=\"java\" >System.out.println(\"Starting process...\");</action>\n" + " </actionNode>\n" + " </nodes>\n" + " <connections>\n" + " <connection from=\"5\" to=\"2\" />\n" + " <connection from=\"2\" to=\"3\" />\n" + " <connection from=\"3\" to=\"4\" />\n" + " <connection from=\"1\" to=\"5\" />\n" + " </connections>\n" + "</process>"; KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); kbuilder.add( ResourceFactory.newByteArrayResource(drl.getBytes()), ResourceType.DRL ); kbuilder.add( ResourceFactory.newByteArrayResource(rf.getBytes()), ResourceType.DRF ); if ( kbuilder.hasErrors() ) { fail( kbuilder.getErrors().toString() ); } KnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); kbase.addKnowledgePackages( kbuilder.getKnowledgePackages() ); StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession(); ksession.addEventListener(new DebugAgendaEventListener()); ksession.addEventListener(new DebugProcessEventListener()); List<Command<?>> commands = new ArrayList<Command<?>>(); commands.add(CommandFactory.newInsert(newCancelFact(ksession, false))); commands.add(CommandFactory.newInsert(newCancelFact(ksession, true))); commands.add(CommandFactory.newStartProcess("bz761715")); commands.add(new FireAllRulesCommand(new CancelAgendaFilter())); commands.add(new FireAllRulesCommand(new CancelAgendaFilter())); commands.add(new FireAllRulesCommand(new CancelAgendaFilter())); ksession.execute(CommandFactory.newBatchExecution(commands)); } private Object newCancelFact(StatefulKnowledgeSession ksession, boolean cancel) { FactType type = ksession.getKieBase().getFactType("org.jboss.qa.brms.agendafilter", "CancelFact"); Object instance = null; try { instance = type.newInstance(); type.set(instance, "cancel", cancel); } catch (IllegalAccessException ex) { ex.printStackTrace(); } catch (InstantiationException ex) { ex.printStackTrace(); } return instance; } public static class CancelAgendaFilter implements AgendaFilter { public boolean accept(Match activation) { return !"Cancel".equals(activation.getRule().getName()); } } @Test public void testGetListeners() { // JBRULES-3378 KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); if (kbuilder.hasErrors()) { throw new RuntimeException(kbuilder.getErrors().toString()); } StatefulKnowledgeSession ksession = kbuilder.newKnowledgeBase().newStatefulKnowledgeSession(); ksession.getAgendaEventListeners(); ksession.getProcessEventListeners(); ksession.getRuleRuntimeEventListeners(); ksession.dispose(); } }
package org.apache.lucene.analysis.miscellaneous; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.ArrayList; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.KeywordAttribute; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefHash; import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.IntsRefBuilder; import org.apache.lucene.util.UnicodeUtil; import org.apache.lucene.util.fst.ByteSequenceOutputs; import org.apache.lucene.util.fst.FST; import org.apache.lucene.util.fst.FST.Arc; import org.apache.lucene.util.fst.FST.BytesReader; /** * Provides the ability to override any {@link KeywordAttribute} aware stemmer * with custom dictionary-based stemming. */ public final class StemmerOverrideFilter extends TokenFilter { private final StemmerOverrideMap stemmerOverrideMap; private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); private final KeywordAttribute keywordAtt = addAttribute(KeywordAttribute.class); private final BytesReader fstReader; private final Arc<BytesRef> scratchArc = new FST.Arc<>(); private char[] spare = new char[0]; /** * Create a new StemmerOverrideFilter, performing dictionary-based stemming * with the provided <code>dictionary</code>. * <p> * Any dictionary-stemmed terms will be marked with {@link KeywordAttribute} * so that they will not be stemmed with stemmers down the chain. * </p> */ public StemmerOverrideFilter(final TokenStream input, final StemmerOverrideMap stemmerOverrideMap) { super(input); this.stemmerOverrideMap = stemmerOverrideMap; fstReader = stemmerOverrideMap.getBytesReader(); } @Override public boolean incrementToken() throws IOException { if (input.incrementToken()) { if (fstReader == null) { // No overrides return true; } if (!keywordAtt.isKeyword()) { // don't muck with already-keyworded terms final BytesRef stem = stemmerOverrideMap.get(termAtt.buffer(), termAtt.length(), scratchArc, fstReader); if (stem != null) { spare = ArrayUtil.grow(termAtt.buffer(), stem.length); final int length = UnicodeUtil.UTF8toUTF16(stem, spare); if (spare != termAtt.buffer()) { termAtt.copyBuffer(spare, 0, length); } else { termAtt.setLength(length); } keywordAtt.setKeyword(true); } } return true; } else { return false; } } /** * A read-only 4-byte FST backed map that allows fast case-insensitive key * value lookups for {@link StemmerOverrideFilter} */ // TODO maybe we can generalize this and reuse this map somehow? public final static class StemmerOverrideMap { private final FST<BytesRef> fst; private final boolean ignoreCase; /** * Creates a new {@link StemmerOverrideMap} * @param fst the fst to lookup the overrides * @param ignoreCase if the keys case should be ingored */ public StemmerOverrideMap(FST<BytesRef> fst, boolean ignoreCase) { this.fst = fst; this.ignoreCase = ignoreCase; } /** * Returns a {@link BytesReader} to pass to the {@link #get(char[], int, FST.Arc, FST.BytesReader)} method. */ public BytesReader getBytesReader() { if (fst == null) { return null; } else { return fst.getBytesReader(); } } /** * Returns the value mapped to the given key or <code>null</code> if the key is not in the FST dictionary. */ public BytesRef get(char[] buffer, int bufferLen, Arc<BytesRef> scratchArc, BytesReader fstReader) throws IOException { BytesRef pendingOutput = fst.outputs.getNoOutput(); BytesRef matchOutput = null; int bufUpto = 0; fst.getFirstArc(scratchArc); while (bufUpto < bufferLen) { final int codePoint = Character.codePointAt(buffer, bufUpto, bufferLen); if (fst.findTargetArc(ignoreCase ? Character.toLowerCase(codePoint) : codePoint, scratchArc, scratchArc, fstReader) == null) { return null; } pendingOutput = fst.outputs.add(pendingOutput, scratchArc.output); bufUpto += Character.charCount(codePoint); } if (scratchArc.isFinal()) { matchOutput = fst.outputs.add(pendingOutput, scratchArc.nextFinalOutput); } return matchOutput; } } /** * This builder builds an {@link FST} for the {@link StemmerOverrideFilter} */ public static class Builder { private final BytesRefHash hash = new BytesRefHash(); private final BytesRefBuilder spare = new BytesRefBuilder(); private final ArrayList<CharSequence> outputValues = new ArrayList<>(); private final boolean ignoreCase; private final CharsRefBuilder charsSpare = new CharsRefBuilder(); /** * Creates a new {@link Builder} with ignoreCase set to <code>false</code> */ public Builder() { this(false); } /** * Creates a new {@link Builder} * @param ignoreCase if the input case should be ignored. */ public Builder(boolean ignoreCase) { this.ignoreCase = ignoreCase; } /** * Adds an input string and it's stemmer override output to this builder. * * @param input the input char sequence * @param output the stemmer override output char sequence * @return <code>false</code> iff the input has already been added to this builder otherwise <code>true</code>. */ public boolean add(CharSequence input, CharSequence output) { final int length = input.length(); if (ignoreCase) { // convert on the fly to lowercase charsSpare.grow(length); final char[] buffer = charsSpare.chars(); for (int i = 0; i < length; ) { i += Character.toChars( Character.toLowerCase( Character.codePointAt(input, i)), buffer, i); } spare.copyChars(buffer, 0, length); } else { spare.copyChars(input, 0, length); } if (hash.add(spare.get()) >= 0) { outputValues.add(output); return true; } return false; } /** * Returns an {@link StemmerOverrideMap} to be used with the {@link StemmerOverrideFilter} * @return an {@link StemmerOverrideMap} to be used with the {@link StemmerOverrideFilter} * @throws IOException if an {@link IOException} occurs; */ public StemmerOverrideMap build() throws IOException { ByteSequenceOutputs outputs = ByteSequenceOutputs.getSingleton(); org.apache.lucene.util.fst.Builder<BytesRef> builder = new org.apache.lucene.util.fst.Builder<>( FST.INPUT_TYPE.BYTE4, outputs); final int[] sort = hash.sort(BytesRef.getUTF8SortedAsUnicodeComparator()); IntsRefBuilder intsSpare = new IntsRefBuilder(); final int size = hash.size(); BytesRef spare = new BytesRef(); for (int i = 0; i < size; i++) { int id = sort[i]; BytesRef bytesRef = hash.get(id, spare); intsSpare.copyUTF8Bytes(bytesRef); builder.add(intsSpare.get(), new BytesRef(outputValues.get(id))); } return new StemmerOverrideMap(builder.finish(), ignoreCase); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.test.spring; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; import org.apache.camel.impl.DefaultDebugger; import org.apache.camel.impl.InterceptSendToMockEndpointStrategy; import org.apache.camel.management.JmxSystemPropertyKeys; import org.apache.camel.spi.Breakpoint; import org.apache.camel.spi.Debugger; import org.apache.camel.spring.SpringCamelContext; import org.apache.camel.test.ExcludingPackageScanClassResolver; import org.apache.camel.test.spring.CamelSpringTestHelper.DoToSpringCamelContextsStrategy; import org.apache.camel.util.CastUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.support.RootBeanDefinition; import org.springframework.beans.factory.xml.XmlBeanDefinitionReader; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.AnnotationConfigUtils; import org.springframework.context.support.GenericApplicationContext; import org.springframework.core.annotation.AnnotationUtils; import org.springframework.test.context.MergedContextConfiguration; import org.springframework.test.context.support.AbstractContextLoader; import org.springframework.test.context.support.AbstractGenericContextLoader; import org.springframework.test.context.support.GenericXmlContextLoader; import org.springframework.util.StringUtils; /** * Replacement for the default {@link GenericXmlContextLoader} that provides hooks for * processing some class level Camel related test annotations. */ public class CamelSpringTestContextLoader extends AbstractContextLoader { private static final Logger LOG = LoggerFactory.getLogger(CamelSpringTestContextLoader.class); /** * Modeled after the Spring implementation in {@link AbstractGenericContextLoader}, * this method creates and refreshes the application context while providing for * processing of additional Camel specific post-refresh actions. We do not provide the * pre-post hooks for customization seen in {@link AbstractGenericContextLoader} because * they probably are unnecessary for 90+% of users. * <p/> * For some functionality, we cannot use {@link TestExecutionListener} because we need * to both produce the desired outcome during application context loading, and also cleanup * after ourselves even if the test class never executes. Thus the listeners, which * only run if the application context is successfully initialized are insufficient to * provide the behavior described above. */ @Override public ApplicationContext loadContext(MergedContextConfiguration mergedConfig) throws Exception { Class<?> testClass = getTestClass(); if (LOG.isDebugEnabled()) { LOG.debug(String.format("Loading ApplicationContext for merged context configuration [%s].", mergedConfig)); } try { GenericApplicationContext context = createContext(testClass); context.getEnvironment().setActiveProfiles(mergedConfig.getActiveProfiles()); loadBeanDefinitions(context, mergedConfig); return loadContext(context, testClass); } finally { cleanup(testClass); } } /** * Modeled after the Spring implementation in {@link AbstractGenericContextLoader}, * this method creates and refreshes the application context while providing for * processing of additional Camel specific post-refresh actions. We do not provide the * pre-post hooks for customization seen in {@link AbstractGenericContextLoader} because * they probably are unnecessary for 90+% of users. * <p/> * For some functionality, we cannot use {@link TestExecutionListener} because we need * to both produce the desired outcome during application context loading, and also cleanup * after ourselves even if the test class never executes. Thus the listeners, which * only run if the application context is successfully initialized are insufficient to * provide the behavior described above. */ @Override public ApplicationContext loadContext(String... locations) throws Exception { Class<?> testClass = getTestClass(); if (LOG.isDebugEnabled()) { LOG.debug("Loading ApplicationContext for locations [" + StringUtils.arrayToCommaDelimitedString(locations) + "]."); } try { GenericApplicationContext context = createContext(testClass); loadBeanDefinitions(context, locations); return loadContext(context, testClass); } finally { cleanup(testClass); } } /** * Returns &quot;<code>-context.xml</code>&quot;. */ @Override public String getResourceSuffix() { return "-context.xml"; } /** * Performs the bulk of the Spring application context loading/customization. * * @param context the partially configured context. The context should have the bean definitions loaded, but nothing else. * @param testClass the test class being executed * * @return the initialized (refreshed) Spring application context * * @throws Exception if there is an error during initialization/customization */ protected ApplicationContext loadContext(GenericApplicationContext context, Class<?> testClass) throws Exception { AnnotationConfigUtils.registerAnnotationConfigProcessors(context); // Pre CamelContext(s) instantiation setup handleDisableJmx(context, testClass); // Temporarily disable CamelContext start while the contexts are instantiated. SpringCamelContext.setNoStart(true); context.refresh(); context.registerShutdownHook(); // Turn CamelContext startup back on since the context's have now been instantiated. SpringCamelContext.setNoStart(false); // Post CamelContext(s) instantiation but pre CamelContext(s) start setup handleProvidesBreakpoint(context, testClass); handleShutdownTimeout(context, testClass); handleMockEndpoints(context, testClass); handleMockEndpointsAndSkip(context, testClass); handleLazyLoadTypeConverters(context, testClass); // CamelContext(s) startup handleCamelContextStartup(context, testClass); return context; } /** * Cleanup/restore global state to defaults / pre-test values after the test setup * is complete. * * @param testClass the test class being executed */ protected void cleanup(Class<?> testClass) { SpringCamelContext.setNoStart(false); if (testClass.isAnnotationPresent(DisableJmx.class)) { if (CamelSpringTestHelper.getOriginalJmxDisabled() == null) { System.clearProperty(JmxSystemPropertyKeys.DISABLED); } else { System.setProperty(JmxSystemPropertyKeys.DISABLED, CamelSpringTestHelper.getOriginalJmxDisabled()); } } } protected void loadBeanDefinitions(GenericApplicationContext context, MergedContextConfiguration mergedConfig) { (new XmlBeanDefinitionReader(context)).loadBeanDefinitions(mergedConfig.getLocations()); } protected void loadBeanDefinitions(GenericApplicationContext context, String... locations) { (new XmlBeanDefinitionReader(context)).loadBeanDefinitions(locations); } /** * Returns all methods defined in {@code clazz} and its superclasses/interfaces. */ protected Collection<Method> getAllMethods(Class<?> clazz) { Set<Method> methods = new HashSet<Method>(); Class<?> currentClass = clazz; while (currentClass != null) { methods.addAll(Arrays.asList(clazz.getMethods())); currentClass = currentClass.getSuperclass(); } return methods; } /** * Creates and starts the Spring context while optionally starting any loaded Camel contexts. * * @param testClass the test class that is being executed * * @return the loaded Spring context */ protected GenericApplicationContext createContext(Class<?> testClass) { GenericApplicationContext routeExcludingContext = null; if (testClass.isAnnotationPresent(ExcludeRoutes.class)) { Class<?>[] excludedClasses = testClass.getAnnotation( ExcludeRoutes.class).value(); if (excludedClasses.length > 0) { if (LOG.isDebugEnabled()) { LOG.debug("Setting up package scanning excluded classes as ExcludeRoutes " + "annotation was found. Excluding [" + StringUtils.arrayToCommaDelimitedString(excludedClasses) + "]."); } routeExcludingContext = new GenericApplicationContext(); routeExcludingContext.registerBeanDefinition( "excludingResolver", new RootBeanDefinition( ExcludingPackageScanClassResolver.class)); routeExcludingContext.refresh(); ExcludingPackageScanClassResolver excludingResolver = routeExcludingContext.getBean("excludingResolver", ExcludingPackageScanClassResolver.class); List<Class<?>> excluded = CastUtils.cast(Arrays.asList(excludedClasses)); excludingResolver.setExcludedClasses(new HashSet<Class<?>>(excluded)); } else { if (LOG.isDebugEnabled()) { LOG.debug("Not enabling package scanning excluded classes as ExcludeRoutes " + "annotation was found but no classes were excluded."); } } } GenericApplicationContext context; if (routeExcludingContext != null) { context = new GenericApplicationContext(routeExcludingContext); } else { context = new GenericApplicationContext(); } return context; } /** * Handles disabling of JMX on Camel contexts based on {@link DisableJmx}. * * @param context the initialized Spring context * @param testClass the test class being executed */ protected void handleDisableJmx(GenericApplicationContext context, Class<?> testClass) { CamelSpringTestHelper.setOriginalJmxDisabledValue( System.getProperty(JmxSystemPropertyKeys.DISABLED)); if (testClass.isAnnotationPresent(DisableJmx.class)) { if (testClass.getAnnotation(DisableJmx.class).value()) { LOG.info("Disabling Camel JMX globally as DisableJmx annotation was found " + "and disableJmx is set to true."); System.setProperty(JmxSystemPropertyKeys.DISABLED, "true"); } else { LOG.info("Enabling Camel JMX as DisableJmx annotation was found " + "and disableJmx is set to false."); System.clearProperty(JmxSystemPropertyKeys.DISABLED); } } else { LOG.info("Disabling Camel JMX globally for tests by default. Use the DisableJMX annotation to " + "override the default setting."); System.setProperty(JmxSystemPropertyKeys.DISABLED, "true"); } } /** * Handles the processing of the {@link ProvidesBreakpoint} annotation on a test class. Exists here * as it is needed in * * @param context the initialized Spring context containing the Camel context(s) to insert breakpoints into * @param testClass the test class being processed * @param log the logger to use * @param statics if static methods or instance methods should be processed * * @throws Exception if there is an error processing the class */ protected void handleProvidesBreakpoint(GenericApplicationContext context, Class<?> testClass) throws Exception { Collection<Method> methods = getAllMethods(testClass); final List<Breakpoint> breakpoints = new LinkedList<Breakpoint>(); for (Method method : methods) { if (AnnotationUtils.findAnnotation(method, ProvidesBreakpoint.class) != null) { Class<?>[] argTypes = method.getParameterTypes(); if (argTypes.length != 0) { throw new IllegalArgumentException("Method [" + method.getName() + "] is annotated with ProvidesBreakpoint but is not a no-argument method."); } else if (!Breakpoint.class.isAssignableFrom(method.getReturnType())) { throw new IllegalArgumentException("Method [" + method.getName() + "] is annotated with ProvidesBreakpoint but does not return a Breakpoint."); } else if (!Modifier.isStatic(method.getModifiers())) { throw new IllegalArgumentException("Method [" + method.getName() + "] is annotated with ProvidesBreakpoint but is not static."); } else if (!Modifier.isPublic(method.getModifiers())) { throw new IllegalArgumentException("Method [" + method.getName() + "] is annotated with ProvidesBreakpoint but is not public."); } try { breakpoints.add((Breakpoint) method.invoke(null, new Object[] {})); } catch (Exception e) { throw new RuntimeException("Method [" + method.getName() + "] threw exception during evaluation.", e); } } } if (breakpoints.size() != 0) { CamelSpringTestHelper.doToSpringCamelContexts(context, new DoToSpringCamelContextsStrategy() { @Override public void execute(String contextName, SpringCamelContext camelContext) throws Exception { Debugger debugger = camelContext.getDebugger(); if (debugger == null) { debugger = new DefaultDebugger(); camelContext.setDebugger(debugger); } for (Breakpoint breakpoint : breakpoints) { LOG.info( "Adding Breakpoint [{}] to CamelContext with name [{}].", breakpoint, contextName); debugger.addBreakpoint(breakpoint); } } }); } } /** * Handles updating shutdown timeouts on Camel contexts based on {@link ShutdownTimeout}. * * @param context the initialized Spring context * @param testClass the test class being executed */ protected void handleShutdownTimeout(GenericApplicationContext context, Class<?> testClass) throws Exception { final int shutdownTimeout; final TimeUnit shutdownTimeUnit; if (testClass.isAnnotationPresent(ShutdownTimeout.class)) { shutdownTimeout = testClass.getAnnotation(ShutdownTimeout.class).value(); shutdownTimeUnit = testClass.getAnnotation(ShutdownTimeout.class).timeUnit(); } else { shutdownTimeout = 10; shutdownTimeUnit = TimeUnit.SECONDS; } CamelSpringTestHelper.doToSpringCamelContexts(context, new DoToSpringCamelContextsStrategy() { @Override public void execute(String contextName, SpringCamelContext camelContext) throws Exception { LOG.info( "Setting shutdown timeout to [{} {}] on CamelContext with name [{}].", new Object[] {shutdownTimeout, shutdownTimeUnit, contextName}); camelContext.getShutdownStrategy().setTimeout(shutdownTimeout); camelContext.getShutdownStrategy().setTimeUnit(shutdownTimeUnit); } }); } /** * Handles auto-intercepting of endpoints with mocks based on {@link MockEndpoints}. * * @param context the initialized Spring context * @param testClass the test class being executed */ protected void handleMockEndpoints(GenericApplicationContext context, Class<?> testClass) throws Exception { if (testClass.isAnnotationPresent(MockEndpoints.class)) { final String mockEndpoints = testClass.getAnnotation( MockEndpoints.class).value(); CamelSpringTestHelper.doToSpringCamelContexts(context, new DoToSpringCamelContextsStrategy() { @Override public void execute(String contextName, SpringCamelContext camelContext) throws Exception { LOG.info("Enabling auto mocking of endpoints matching pattern [{}] on " + "CamelContext with name [{}].", mockEndpoints, contextName); camelContext.addRegisterEndpointCallback( new InterceptSendToMockEndpointStrategy(mockEndpoints)); } }); } } /** * Handles auto-intercepting of endpoints with mocks based on {@link MockEndpoints} and skipping the * original endpoint. * * @param context the initialized Spring context * @param testClass the test class being executed */ protected void handleMockEndpointsAndSkip(GenericApplicationContext context, Class<?> testClass) throws Exception { if (testClass.isAnnotationPresent(MockEndpoints.class)) { final String mockEndpoints = testClass.getAnnotation( MockEndpoints.class).value(); CamelSpringTestHelper.doToSpringCamelContexts(context, new DoToSpringCamelContextsStrategy() { @Override public void execute(String contextName, SpringCamelContext camelContext) throws Exception { LOG.info("Enabling auto mocking and skipping of endpoints matching pattern [{}] on " + "CamelContext with name [{}].", mockEndpoints, contextName); camelContext.addRegisterEndpointCallback( new InterceptSendToMockEndpointStrategy(mockEndpoints, true)); } }); } } @SuppressWarnings("deprecation") protected void handleLazyLoadTypeConverters(GenericApplicationContext context, Class<?> testClass) throws Exception { final boolean lazy; if (testClass.isAnnotationPresent(LazyLoadTypeConverters.class)) { lazy = testClass.getAnnotation(LazyLoadTypeConverters.class).value(); } else { lazy = true; } if (lazy) { CamelSpringTestHelper.doToSpringCamelContexts(context, new DoToSpringCamelContextsStrategy() { @Override public void execute(String contextName, SpringCamelContext camelContext) throws Exception { LOG.info("Enabling lazy loading of type converters on " + "CamelContext with name [{}].", contextName); camelContext.setLazyLoadTypeConverters(lazy); } }); } } /** * Handles starting of Camel contexts based on {@link UseAdviceWith} and other state in the JVM. * * @param context the initialized Spring context * @param testClass the test class being executed */ protected void handleCamelContextStartup(GenericApplicationContext context, Class<?> testClass) throws Exception { boolean skip = "true".equalsIgnoreCase(System.getProperty("skipStartingCamelContext")); if (skip) { LOG.info("Skipping starting CamelContext(s) as system property " + "skipStartingCamelContext is set to be true."); } else if (testClass.isAnnotationPresent(UseAdviceWith.class)) { if (testClass.getAnnotation(UseAdviceWith.class).value()) { LOG.info("Skipping starting CamelContext(s) as UseAdviceWith annotation was found " + "and isUseAdviceWith is set to true."); skip = true; } else { LOG.info("Starting CamelContext(s) as UseAdviceWith annotation was found, but " + "isUseAdviceWith is set to false."); skip = false; } } if (!skip) { CamelSpringTestHelper.doToSpringCamelContexts(context, new DoToSpringCamelContextsStrategy() { @Override public void execute(String contextName, SpringCamelContext camelContext) throws Exception { LOG.info("Starting CamelContext with name [{}].", contextName); camelContext.start(); } }); } } /** * Returns the class under test in order to enable inspection of annotations while the * Spring context is being created. * * @return the test class that is being executed * @see CamelSpringTestHelper */ protected Class<?> getTestClass() { return CamelSpringTestHelper.getTestClass(); } }
/** * Copyright 2013 Benjamin Lerer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.horizondb.db.btree; import io.horizondb.db.btree.InternalNode; import io.horizondb.db.btree.LeafNode; import io.horizondb.db.btree.Node; import io.horizondb.db.btree.NodeProxy; import io.horizondb.test.AssertCollections; import java.io.IOException; import java.util.Collections; import java.util.LinkedHashMap; import java.util.Map; import org.junit.Assert; import static java.util.Collections.singletonMap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * @author Benjamin * */ public final class AssertNodes { /** * Assert that the specified node is a leaf node and contains the specified key-value pair. * * @param actual the actual node to check. * @param key the expected key * @param value the expected value * @throws IOException if an I/O problem occurs. */ public static <K extends Comparable<K>, V> void assertLeafNodeContains(Node<K, V> actual, K key, V value) throws IOException { assertLeafNodeContains(actual, singletonMap(key, value)); } /** * Assert that the specified node is a leaf node and contains the specified key-value pair. * * @param actual the actual node to check. * @param key1 the first key * @param value1 the value associated to the first key * @param key2 the second key * @param value2 the value associated to the second key * @throws IOException if an I/O problem occurs. */ public static <K extends Comparable<K>, V> void assertLeafNodeContains(Node<K, V> actual, K key1, V value1, K key2, V value2) throws IOException { assertLeafNodeContains(actual, newMap(key1, value1, key2, value2)); } /** * Assert that the specified node is a leaf node and contains the specified key-value pair. * * @param actual the actual node to check. * @param key1 the first key * @param value1 the value associated to the first key * @param key2 the second key * @param value2 the value associated to the second key * @param key3 the third key * @param value3 the value associated to the third key * @throws IOException if an I/O problem occurs. */ public static <K extends Comparable<K>, V> void assertLeafNodeContains(Node<K, V> actual, K key1, V value1, K key2, V value2, K key3, V value3) throws IOException { assertLeafNodeContains(actual, newMap(key1, value1, key2, value2, key3, value3)); } /** * Assert that the specified node is a leaf node and contains the specified key-value pair. * * @param actual the actual node to check. * @param key1 the first key * @param value1 the value associated to the first key * @param key2 the second key * @param value2 the value associated to the second key * @param key3 the third key * @param value3 the value associated to the third key * @param key4 the fourth key * @param value4 the value associated to the fourth key * @throws IOException if an I/O problem occurs. */ public static <K extends Comparable<K>, V> void assertLeafNodeContains(Node<K, V> actual, K key1, V value1, K key2, V value2, K key3, V value3, K key4, V value4) throws IOException { assertLeafNodeContains(actual, newMap(key1, value1, key2, value2, key3, value3, key4, value4)); } /** * Assert that the specified node is an empty leaf node. * * @param actual the actual node to check. * @throws IOException if an I/O problem occurs. */ public static <K extends Comparable<K>, V> void assertLeafNodeEmpty(Node<K, V> actual) throws IOException { assertLeafNodeContains(actual, Collections.<K, V> emptyMap()); } /** * Assert that the specified node is a leaf node and contains the specified key-value pairs. * * @param actual the actual node to check. * @param expected the expected key-value pairs. * @throws IOException if an I/O problem occurs. */ private static <K extends Comparable<K>, V> void assertLeafNodeContains(Node<K, V> actual, Map<K, V> expected) throws IOException { Node<K, V> actualNode = actual; if (actual instanceof NodeProxy) { actualNode = ((NodeProxy<K, V>) actualNode).loadNode(); } assertTrue("The node must be a leaf node but is: " + actualNode.getClass(), actualNode instanceof LeafNode); LeafNode<K, V> node = (LeafNode<K, V>) actualNode; assertEquals(expected, node.toMap()); } /** * Assert that the specified node is an internal node and contains the specified keys. * * @param actual the actual node to check. * @param keys the expected keys. * @throws IOException if an I/O problem occurs. */ @SafeVarargs public final static <K extends Comparable<K>, V> void assertInternalNode(Node<K, V> actual, K... keys) throws IOException { Node<K, V> actualNode = actual; if (actual instanceof NodeProxy) { actualNode = ((NodeProxy<K, V>) actualNode).loadNode(); } Assert.assertTrue("The node must be an internal node.", actualNode instanceof InternalNode); InternalNode<K, V> node = (InternalNode<K, V>) actualNode; AssertCollections.assertIterableContains(node.getKeys(), keys); } private AssertNodes() { } /** * Creates a new <code>LinkedHashMap</code> with the specified keys and values. * * @param key1 the first key * @param value1 the value associated to the first key * @param key2 the second key * @param value2 the value associated to the second key * @return a new <code>LinkedHashMap</code> with the specified keys and values. */ private static <K, V> Map<K, V> newMap(K key1, V value1, K key2, V value2) { LinkedHashMap<K, V> map = new LinkedHashMap<>(); map.put(key1, value1); map.put(key2, value2); return map; } /** * Creates a new <code>LinkedHashMap</code> with the specified keys and values. * * @param key1 the first key * @param value1 the value associated to the first key * @param key2 the second key * @param value2 the value associated to the second key * @param key3 the third key * @param value3 the value associated to the third key * @return a new <code>LinkedHashMap</code> with the specified keys and values. */ private static <K, V> Map<K, V> newMap(K key1, V value1, K key2, V value2, K key3, V value3) { Map<K, V> map = newMap(key1, value1, key2, value2); map.put(key3, value3); return map; } /** * Creates a new <code>LinkedHashMap</code> with the specified keys and values. * * @param key1 the first key * @param value1 the value associated to the first key * @param key2 the second key * @param value2 the value associated to the second key * @param key3 the third key * @param value3 the value associated to the third key * @param key4 the fourth key * @param value4 the value associated to the fourth key * @return a new <code>LinkedHashMap</code> with the specified keys and values. */ private static <K, V> Map<K, V> newMap(K key1, V value1, K key2, V value2, K key3, V value3, K key4, V value4) { Map<K, V> map = newMap(key1, value1, key2, value2, key3, value3); map.put(key4, value4); return map; } }
/*----------------------------------------------------------------------------*/ /* Copyright (c) FIRST 2008. All Rights Reserved. */ /* Open Source Software - may be modified and shared by FRC teams. The code */ /* must be accompanied by the FIRST BSD license file in the root directory of */ /* the project. */ /*----------------------------------------------------------------------------*/ package edu.wpi.first.wpilibj.defaultCode; import com.sun.squawk.util.MathUtils; import edu.wpi.first.wpilibj.AnalogModule; import edu.wpi.first.wpilibj.CANJaguar; import edu.wpi.first.wpilibj.Compressor; import edu.wpi.first.wpilibj.Dashboard; import edu.wpi.first.wpilibj.DigitalInput; import edu.wpi.first.wpilibj.DigitalModule; import edu.wpi.first.wpilibj.DriverStation; import edu.wpi.first.wpilibj.IterativeRobot; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.RobotDrive; import edu.wpi.first.wpilibj.Servo; import edu.wpi.first.wpilibj.Solenoid; import edu.wpi.first.wpilibj.Timer; import edu.wpi.first.wpilibj.Watchdog; public class DefaultRobot extends IterativeRobot { DriverStation ds; boolean[] dsButtons = new boolean[14]; // Declare variable for the robot drive system //MotorControl m_robotDrive; RobotDrive m_robotDrive; CANJaguar frontLeftJag; CANJaguar frontRightJag; CANJaguar rearLeftJag; CANJaguar rearRightJag; boolean arcadeDrive = true; boolean override = true; int lastArcadeButton = 0; int lastOverButton = 0; int lastGripperButton = 0; CANJaguar boomRightJag; CANJaguar boomLeftJag; CANJaguar stickRightJag; CANJaguar stickLeftJag; CANJaguar wristJag; Compressor compressor; int lastStartingButton = 0; Servo topLeftRoller; Servo topRightRoller; Servo bottomLeftRoller; Servo bottomRightRoller; DigitalInput gripperButton; Solenoid gripperSolenoid; double rollerValue = 0; double boomAngle = 0.5; double stickAngle = 0.5; double wristAngle = 0.5; double lastTargetHeight = 20; double lastTargetLength = 20; double lastTargetWrist = 0; double currentHeight = 20; double currentLength = 20; double targetLength = 20; double targetHeight = 20; double targetWrist = 0; double heightIncrement = 0; double lengthIncrement = 0; double wristIncrement = 0; int middlePeg = 0; int numIncrements = 1; int currentPreset = 10; int presetStage = 0; boolean boomHitTarget = false; boolean stickHitTarget = false; boolean wristHitTarget = false; int boomStillBuffer = 0; int stickStillBuffer = 0; int wristStillBuffer = 0; boolean presetButton = false; double lastWristPot = 1.61; int dT = 1; double integral = 0; double lastError = 0; Servo deployServo; boolean deploy = false; DigitalInput leftLineSensor; DigitalInput middleLineSensor; DigitalInput rightLineSensor; int[] lastThreeLines = new int[3]; int lineLoops = 0; boolean hitY = false; boolean hasStopped = false; int ignoreLines = 0; boolean doAutoArmStuff = false; int yLoops = 0; int dropLoops = 0; boolean droppedTube = false; int backwardsLoops = 0; int m_dsPacketsReceivedInCurrentSecond; // keep track of the ds packets received in the current second // Declare variables for the joystick being used Joystick m_joystick; static final int NUM_BUTTONS = 10; int[] m_buttons = new int[NUM_BUTTONS]; // Local variables to count the number of periodic loops performed int m_autoPeriodicLoops; int m_disabledPeriodicLoops; int m_telePeriodicLoops; /** * Constructor for this "BuiltinDefaultCode" Class. * * The constructor creates all of the objects used for the different inputs and outputs of * the robot. Essentially, the constructor defines the input/output mapping for the robot, * providing named objects for each of the robot interfaces. */ public DefaultRobot() { System.out.println("BuiltinDefaultCode Constructor Started\n"); ds = DriverStation.getInstance(); // Create a robot using standard right/left robot drive on PWMS 1, 2, 3, and #4 try { frontLeftJag = new CANJaguar(4); frontRightJag = new CANJaguar(5); rearLeftJag = new CANJaguar(3); rearRightJag = new CANJaguar(2); } catch (Exception e) { System.out.println(e.getMessage()); } // try { // frontLeftJag = new CANJaguar(4); // frontRightJag = new CANJaguar(5); // rearLeftJag = new CANJaguar(3, CANJaguar.ControlMode.kPosition); // rearRightJag = new CANJaguar(2); // // for(int setThatMode = 0; setThatMode < 5; setThatMode++){ // rearLeftJag.changeControlMode(CANJaguar.ControlMode.kPosition); // Thread.sleep(100); // if(rearLeftJag.getControlMode()==CANJaguar.ControlMode.kPosition); // break; // } // rearLeftJag.setPositionReference(CANJaguar.PositionReference.kQuadEncoder); // rearLeftJag.configEncoderCodesPerRev(720); // rearLeftJag.enableControl(0); // rearLeftJag.disableControl(); // // for(int setThatMode = 0; setThatMode < 5; setThatMode++){ // rearLeftJag.changeControlMode(CANJaguar.ControlMode.kPercentVbus); // Thread.sleep(100); // if(rearLeftJag.getControlMode()==CANJaguar.ControlMode.kPercentVbus) // break; // } // // } catch (Exception e) { // System.out.println(e.getMessage()); // } m_robotDrive = new RobotDrive(frontLeftJag, rearLeftJag, frontRightJag, rearRightJag); m_robotDrive.setInvertedMotor(RobotDrive.MotorType.kFrontLeft, true); m_robotDrive.setInvertedMotor(RobotDrive.MotorType.kRearLeft, true); m_robotDrive.setInvertedMotor(RobotDrive.MotorType.kFrontRight, true); m_robotDrive.setInvertedMotor(RobotDrive.MotorType.kRearRight, true); /* try { frontLeftJag= new CANJaguar(4, CANJaguar.ControlMode.kSpeed); frontRightJag = new CANJaguar(5, CANJaguar.ControlMode.kSpeed); rearLeftJag = new CANJaguar(3, CANJaguar.ControlMode.kSpeed); rearRightJag = new CANJaguar(2, CANJaguar.ControlMode.kSpeed); m_robotDrive = new MotorControl(frontRightJag, rearRightJag, frontLeftJag, rearLeftJag); } catch (Exception e) { System.out.println(e.getMessage()); }*/ try { boomRightJag = new CANJaguar(6, CANJaguar.ControlMode.kPosition); boomLeftJag = new CANJaguar(9, CANJaguar.ControlMode.kPosition); stickRightJag = new CANJaguar(7, CANJaguar.ControlMode.kPosition); stickLeftJag = new CANJaguar(8, CANJaguar.ControlMode.kPosition); wristJag = new CANJaguar(10, CANJaguar.ControlMode.kPosition); for(int setThatMode = 0; setThatMode < 5; setThatMode++){ boomRightJag.changeControlMode(CANJaguar.ControlMode.kPosition); boomLeftJag.changeControlMode(CANJaguar.ControlMode.kPosition); stickRightJag.changeControlMode(CANJaguar.ControlMode.kPosition); stickLeftJag.changeControlMode(CANJaguar.ControlMode.kPosition); wristJag.changeControlMode(CANJaguar.ControlMode.kPosition); Thread.sleep(100); if(boomRightJag.getControlMode()==CANJaguar.ControlMode.kPosition && boomLeftJag.getControlMode()==CANJaguar.ControlMode.kPosition && stickLeftJag.getControlMode()==CANJaguar.ControlMode.kPosition && stickRightJag.getControlMode()==CANJaguar.ControlMode.kPosition && wristJag.getControlMode()==CANJaguar.ControlMode.kPosition); break; } boomRightJag.setPositionReference(CANJaguar.PositionReference.kPotentiometer); boomLeftJag.setPositionReference(CANJaguar.PositionReference.kPotentiometer); stickRightJag.setPositionReference(CANJaguar.PositionReference.kPotentiometer); stickLeftJag.setPositionReference(CANJaguar.PositionReference.kPotentiometer); wristJag.setPositionReference(CANJaguar.PositionReference.kPotentiometer); for(int setThatMode = 0; setThatMode < 5; setThatMode++){ boomRightJag.changeControlMode(CANJaguar.ControlMode.kPercentVbus); boomLeftJag.changeControlMode(CANJaguar.ControlMode.kPercentVbus); stickRightJag.changeControlMode(CANJaguar.ControlMode.kPercentVbus); stickLeftJag.changeControlMode(CANJaguar.ControlMode.kPercentVbus); wristJag.changeControlMode(CANJaguar.ControlMode.kPercentVbus); Thread.sleep(100); if(boomRightJag.getControlMode()==CANJaguar.ControlMode.kPercentVbus && boomLeftJag.getControlMode()==CANJaguar.ControlMode.kPercentVbus && stickLeftJag.getControlMode()==CANJaguar.ControlMode.kPercentVbus && stickRightJag.getControlMode()==CANJaguar.ControlMode.kPercentVbus && wristJag.getControlMode()==CANJaguar.ControlMode.kPercentVbus) break; } } catch (Exception e) { System.out.println(e.getMessage()); } gripperSolenoid = new Solenoid(1); compressor = new Compressor(1, 1); compressor.start(); topLeftRoller = new Servo(2); topRightRoller = new Servo(3); bottomLeftRoller = new Servo(4); bottomRightRoller = new Servo(5); gripperButton = new DigitalInput(2); deployServo = new Servo(1); leftLineSensor = new DigitalInput(14); middleLineSensor = new DigitalInput(13); rightLineSensor = new DigitalInput(12); m_dsPacketsReceivedInCurrentSecond = 0; // Define joysticks being used at USB port #1 and USB port #2 on the Drivers Station m_joystick = new Joystick(1); for (int buttonDex = 0; buttonDex < NUM_BUTTONS; buttonDex++) { m_buttons[buttonDex] = (int)(m_joystick.getRawButton(buttonDex+1)?1:0); // The indices in m_buttons will be one less than the numbers on the controller, since arrays begin at 0 } // Initialize counters to record the number of loops completed in autonomous and teleop modes m_autoPeriodicLoops = 0; m_disabledPeriodicLoops = 0; m_telePeriodicLoops = 0; System.out.println("BuiltinDefaultCode Constructor Completed\n"); } /********************************** Init Routines *************************************/ public void robotInit() { // Actions which would be performed once (and only once) upon initialization of the // robot would be put here. System.out.println("RobotInit() completed.\n"); } public void disabledInit() { m_disabledPeriodicLoops = 0; // Reset the loop counter for disabled mode startSec = (int)(Timer.getUsClock() / 1000000.0); printSec = startSec + 1; deploy = false; } public void autonomousInit() { m_autoPeriodicLoops = 0; /* try{ boomAngle = (boomLeftJag.getPosition()+boomRightJag.getPosition())/2.0 * 180; stickAngle = (stickLeftJag.getPosition()+stickRightJag.getPosition())/2.0 * 180; } catch(Exception e){ System.out.println(e.getMessage()); } if(boomAngle != 0 && stickAngle != 0) doAutoArmStuff = true; */ try{ double bLJP = boomLeftJag.getPosition(); double sLJP = stickRightJag.getPosition(); double wJP = wristJag.getPosition(); boomAngle = (180-(bLJP*270-45)); stickAngle = (180-(sLJP*270-45)); wristAngle = (180-(wJP*270-45)); } catch(Exception e){ System.out.println(e.getMessage()); } double[] armCoords = getWristCoordinates(boomAngle, stickAngle); currentHeight = armCoords[1]; currentLength = armCoords[0]; lineLoops = 0; dropLoops = 0; yLoops = 0; hitY = false; hasStopped = false; droppedTube = false; backwardsLoops = 0; ignoreLines = 0; currentPreset = 10; targetHeight = 20; targetLength = 18; lastTargetHeight = Math.max(currentHeight, 20.0); lastTargetLength = currentLength; lastTargetWrist = wristAngle; targetWrist = 0; middlePeg = 1; int yIncrements = (int)Math.abs(Math.floor((targetHeight - lastTargetHeight)/3.0))+1; int xIncrements = (int)Math.abs(Math.floor((targetLength - lastTargetLength)/1.0))+1; numIncrements = Math.max(xIncrements, yIncrements); // numIncrements *= 2; heightIncrement = (targetHeight - lastTargetHeight)/(double)numIncrements; lengthIncrement = (targetLength - lastTargetLength)/(double)numIncrements; wristIncrement = (targetWrist - lastTargetWrist)/(double)numIncrements; } public void teleopInit() { m_telePeriodicLoops = 0; // Reset the loop counter for teleop mode m_dsPacketsReceivedInCurrentSecond = 0; // Reset the number of dsPackets in current second } /********************************** Periodic Routines *************************************/ static int printSec; static int startSec; public void disabledPeriodic() { updateDashboard(); lastThreeLines[lineLoops%3] = 0; lastThreeLines[lineLoops%3] += (leftLineSensor.get()) ? 0 : 100; lastThreeLines[lineLoops%3] += (middleLineSensor.get()) ? 0 : 10; lastThreeLines[lineLoops%3] += (rightLineSensor.get()) ? 0 : 1; lineLoops++; // feed the user watchdog at every period when disabled Watchdog.getInstance().feed(); // increment the number of disabled periodic loops completed m_disabledPeriodicLoops++; lineLoops = 0; dropLoops = 0; yLoops = 0; hitY = false; hasStopped = false; droppedTube = false; backwardsLoops = 0; ignoreLines = 0; deploy = false; // try{ // float bLJP = (float)rearLeftJag.getPosition(); // System.out.println("Nya? "+bLJP); // } // catch(Exception e){ // System.out.println(e.getMessage()); // } try{ float bLJP = (float)boomLeftJag.getPosition(); float sLJP = (float)stickRightJag.getPosition(); float wJP = (float)wristJag.getPosition(); boomAngle = (180-(bLJP*270-45)); stickAngle = (180-(sLJP*270-45)); wristAngle = (180-(wJP*270-45)); double[] armCoords = getWristCoordinates(boomAngle, stickAngle); //System.out.println(""+(int)armCoords[0]+", "+(int)armCoords[1]+")"); //System.out.print("Boom: "+bLJP+" | Stick: "+sLJP+" | Wrist: "+wJP+"\t|\t"); //System.out.println(""+(float)boomAngle+", "+(float)stickAngle+", "+(float)wristAngle); } catch(Exception e){ System.out.println(e.getMessage()); } double bPot = 0; double sPot = 0; double wPot = 0; try{ bPot = ds.getEnhancedIO().getAnalogIn(2); sPot = ds.getEnhancedIO().getAnalogIn(4); wPot = ds.getEnhancedIO().getAnalogIn(6); //System.out.println(""+(float)bPot+", "+(float)sPot+", "+(float)wPot); } catch(Exception e){ System.out.println(e.getMessage()); } } public void autonomousPeriodic() { updateDashboard(); // feed the user watchdog at every period when in autonomous Watchdog.getInstance().feed(); m_autoPeriodicLoops++; deployServo.setAngle(140); boolean straightLine = true; boolean goLeft = false; boolean doAuto = false; try{ double sPot = ds.getEnhancedIO().getAnalogIn(4); //stick double wPot = ds.getEnhancedIO().getAnalogIn(6); //wrist /*if(sPot < 0.66*3.3){ straightLine = false; if(sPot < 0.33*3.3) goLeft = true; }*/ if(wPot > 0.5*3.3) doAuto = true; } catch(Exception e){ System.out.println(e.getMessage()); } if(doAuto){ if (!(hasStopped || hitY)) { int lineReading = 0; lineReading += (leftLineSensor.get()) ? 0 : 100; lineReading += (middleLineSensor.get()) ? 0 : 10; lineReading += (rightLineSensor.get()) ? 0 : 1; //System.out.print("" + lineReading + " | "); double rightMotors = 0; double leftMotors = 0; switch (lineReading) { case 0: rightMotors = 0.7 * (lastThreeLines[0] % 10 + lastThreeLines[1] % 10 + lastThreeLines[2] % 10) / 3.0; leftMotors = 0.7 * (Math.floor(lastThreeLines[0]/100)%10 + Math.floor(lastThreeLines[1]/100)%10 + Math.floor(lastThreeLines[2]/100)%10)/3.0; //if(leftMotors == 0 && rightMotors == 0) hasStopped = true; break; case 1: leftMotors = 0.55; rightMotors = 0.7; break; case 11: leftMotors = 0.6; rightMotors = 0.7; break; case 10: leftMotors = 0.8; rightMotors = 0.8; break; case 110: leftMotors = 0.7; rightMotors = 0.6; break; case 100: leftMotors = 0.7; rightMotors = 0.55; break; case 111: if (straightLine) { hasStopped = true; System.out.print("STOPPED! | "); dropLoops = m_autoPeriodicLoops; } else { if (!goLeft) { leftMotors = 0.7; rightMotors = -0.7; lineReading = 100; } else { leftMotors = -0.7; rightMotors = 0.7; lineReading = 1; } hitY = true; System.out.print("HIT! | "); yLoops = m_autoPeriodicLoops; } break; case 101: if (!straightLine) { if (!goLeft) { leftMotors = 0.7; rightMotors = -0.7; lineReading = 100; } else { leftMotors = -0.7; rightMotors = 0.7; lineReading = 1; } hitY = true; System.out.print("HIT! | "); yLoops = m_autoPeriodicLoops; } break; default: break; } m_robotDrive.tankDrive(-leftMotors*0.87, -rightMotors*0.87); //System.out.println("Left: " + (float)(leftMotors) + ", Right: " + (float)(rightMotors)); if(lineReading > 0) lastThreeLines[lineLoops%3] = lineReading; lineLoops++; } else if(currentPreset == -1 && m_autoPeriodicLoops < dropLoops + 10){ } else if(!droppedTube){ presetStage = 0; lastTargetHeight = Math.max(currentHeight, 20.0); lastTargetLength = currentLength; lastTargetWrist = wristAngle; targetWrist = 65; middlePeg = 0; targetHeight = currentHeight-6; targetLength = currentLength-6; gripperSolenoid.set(true); droppedTube = true; currentPreset = 0; int yIncrements = (int)Math.abs(Math.floor((targetHeight - lastTargetHeight)/3.0))+1; int xIncrements = (int)Math.abs(Math.floor((targetLength - lastTargetLength)/1.0))+1; numIncrements = Math.max(xIncrements, yIncrements); // numIncrements *= 2; heightIncrement = (targetHeight - lastTargetHeight)/(double)numIncrements; lengthIncrement = (targetLength - lastTargetLength)/(double)numIncrements; wristIncrement = (targetWrist - lastTargetWrist)/(double)numIncrements; } else if(currentPreset == 0){ System.out.println("DERP"); m_robotDrive.tankDrive(0.5, 0.5); backwardsLoops = m_autoPeriodicLoops; } else if(currentPreset == -1){ System.out.println("DERP"); m_robotDrive.tankDrive(0.5, 0.5); } try{ double bLJP = boomLeftJag.getPosition(); double sLJP = stickRightJag.getPosition(); double wJP = wristJag.getPosition(); boomAngle = (180-(bLJP*270-45)); stickAngle = (180-(sLJP*270-45)); wristAngle = (180-(wJP*270-45)); } catch(Exception e){ System.out.println(e.getMessage()); } double[] armCoords = getWristCoordinates(boomAngle, stickAngle); currentHeight = armCoords[1]; currentLength = armCoords[0]; double[] targetAngles = {90, 90}; double targetWristA = 90; if(currentPreset > -1){ if((presetStage+1) % numIncrements == 0){ boolean defaultCase = false; switch(currentPreset){ //case 1: targetHeight = 25+8*middlePeg; targetLength = 2; currentPreset = 11; targetWrist = 45; break; //case 4: targetHeight = 55+8*middlePeg; targetLength = 2; currentPreset = 41; targetWrist = 20; break; case 10: targetHeight = (92+8*middlePeg)/2.0; targetLength = 18; currentPreset = 101; targetWrist = 45; break; case 101: targetHeight = 92+8*middlePeg; targetLength = 17; currentPreset = 102; targetWrist = 110; break; case 0: targetHeight = 20; targetLength = 18; targetWrist = 45; currentPreset = 3; break; case 3: targetHeight = 20; targetLength = 18; targetWrist = 0; gripperSolenoid.set(false); currentPreset = 31; break; default: currentPreset = -1; defaultCase = true; targetAngles = getArmAngles(targetLength, targetHeight); targetWristA = targetWrist; break; } presetStage = 0; if(!defaultCase){ lastTargetHeight = currentHeight; lastTargetLength = currentLength; lastTargetWrist = wristAngle; int yIncrements = (int)Math.abs(Math.floor((targetHeight - lastTargetHeight)/3.0))+1; int xIncrements = (int)Math.abs(Math.floor((targetLength - lastTargetLength)/1.0))+1; numIncrements = Math.max(xIncrements, yIncrements); // numIncrements *= 2; heightIncrement = (targetHeight - lastTargetHeight)/(double)numIncrements; lengthIncrement = (targetLength - lastTargetLength)/(double)numIncrements; wristIncrement = (targetWrist - lastTargetWrist)/(double)numIncrements; targetAngles[0] = boomAngle; targetAngles[1] = stickAngle; targetWristA = wristAngle; } } else{ presetStage++; double tX = lastTargetLength+lengthIncrement*presetStage; double tY = lastTargetHeight+heightIncrement*presetStage; targetWristA = lastTargetWrist+wristIncrement*presetStage; targetAngles = getArmAngles(tX, tY); if((presetStage + 1) % numIncrements == 0){ System.out.println(""+targetAngles[0]+", "+targetAngles[1]+", "+targetWristA); double targetboom = 1-(targetAngles[0] + 45) / 270.0; double targetstick = 1-(targetAngles[1]+45)/270.0; double targetwrist = 1-(targetWristA+45)/270.0; System.out.println("Pots: "+(float)targetboom+", "+(float)targetstick+", "+(float)targetwrist); float TboomAngle = (float)(180-(targetboom*270-45)); float TstickAngle = (float)(180-(targetstick*270-45)); float TwristAngle = (float)(180-(targetwrist*270-45)); System.out.println("Recalculated Angles:"+TboomAngle+", "+TstickAngle+", "+TwristAngle+""); } } } else{ targetAngles = getArmAngles(targetLength, targetHeight); targetWristA = targetWrist; } double boomV = 0; double targetboom = 1-(targetAngles[0] + 45) / 270.0; double boomDiff = 0; try{ if(boomLeftJag.getPosition()!=0) boomDiff = targetboom - boomLeftJag.getPosition(); } catch(Exception e){ System.out.println(e.getMessage()); } double boomGain = 60; if(boomDiff > 0) boomGain = 230; boomV = (-1*boomGain * boomDiff); double boomMaxV = 9*1; if(Math.abs(boomV) > boomMaxV) boomV = (boomV < boomMaxV)?-boomMaxV:boomMaxV; if(Math.abs(boomV) > 1.5){ if(Math.abs(boomV) < 3.25) boomV = (boomV < 0)?-3.25:3.25; } else boomV = 0; try{ boomLeftJag.setX(boomV/12.0); boomRightJag.setX(boomV/12.0); } catch(Exception e){ System.out.println(e.getMessage()); } double stickV = 0; double targetstick = 1-(targetAngles[1]+45)/270.0; double stickDiff = 0; try{ if(stickRightJag.getPosition() != 0) stickDiff = targetstick - stickRightJag.getPosition(); } catch(Exception e){ System.out.println(e.getMessage()); } stickV = 1 * (150 * stickDiff); if(Math.abs(stickV) > 9) stickV = (stickV < 0)?-9:9; if(Math.abs(stickV) > 1.0){ if(Math.abs(stickV) < 3.35) stickV = (stickV < 0)?-3.35:3.35; } else stickV = 0; try{ stickLeftJag.setX(stickV/12.0); stickRightJag.setX(stickV/12.0); } catch(Exception e){ System.out.println(e.getMessage()); } double wristV = 0; double targetwrist = 1-(targetWristA+45)/270.0; double wristDiff = 0; try{ if(wristJag.getPosition() != 0) wristDiff = targetwrist - wristJag.getPosition(); } catch(Exception e){ System.out.println(e.getMessage()); } wristV = -1 * (35 * wristDiff); if(Math.abs(wristV) > 12) wristV = (wristV < 0)?-12:12; if(Math.abs(wristV) > 0.2){ if(Math.abs(wristV) < 1.0) wristV = (wristV < 0)?-1.0:1.0; } else wristV = 0; try{ wristJag.setX(wristV/12.0); } catch(Exception e){ System.out.println(e.getMessage()); } } } public void teleopPeriodic() { // feed the user watchdog at every period when in autonomous Watchdog.getInstance().feed(); // increment the number of teleop periodic loops completed m_telePeriodicLoops++; // increment DS packets received m_dsPacketsReceivedInCurrentSecond++; updateDashboard(); try{ double bLJP = boomLeftJag.getPosition(); double sLJP = stickRightJag.getPosition(); double wJP = wristJag.getPosition(); boomAngle = (180-(bLJP*270-45)); stickAngle = (180-(sLJP*270-45)); wristAngle = (180-(wJP*270-45)); //System.out.println(""+(float)boomAngle+", "+(float)stickAngle+", "+(float)wristAngle); } catch(Exception e){ System.out.println(e.getMessage()); } double[] armCoords = getWristCoordinates(boomAngle, stickAngle); currentHeight = armCoords[1]; currentLength = armCoords[0]; //double[] targetAngles = {90, 90, 90}; // Update which buttons are being pressed lastOverButton = m_buttons[9]; lastArcadeButton = m_buttons[2]; lastStartingButton = m_buttons[3]; lastGripperButton = m_buttons[5]; //lastGripperButton = dsButtons[9]; for (int buttonDex = 0; buttonDex < NUM_BUTTONS; buttonDex++) { m_buttons[buttonDex] = (int)(m_joystick.getRawButton(buttonDex+1)?1:0); } if(m_buttons[2] == 1 && lastArcadeButton == 0){ arcadeDrive = !arcadeDrive; } if(m_buttons[9] == 1 && lastOverButton == 0){ override = !override; } if(m_buttons[5] == 1 && lastGripperButton == 0){ gripperSolenoid.set(!gripperSolenoid.get()); } double bPot = 0.5*3.3; double sPot = 0.5*3.3; double wPot = 0.5*3.3; boolean rotateUp = false; boolean rotateDown = false; boolean deployment = false; try{ ds.getEnhancedIO().setLED(1, arcadeDrive); ds.getEnhancedIO().setLED(2, (m_buttons[6]==1)); ds.getEnhancedIO().setLED(3, override); for (int buttonDex = 0; buttonDex < 14; buttonDex++) { dsButtons[buttonDex] = !ds.getEnhancedIO().getDigital(buttonDex+1); } rotateUp = dsButtons[9]; rotateDown = dsButtons[7]; deployment = dsButtons[11]; //deploy: 12 //down: 8 //up: 10 bPot = ds.getEnhancedIO().getAnalogIn(2); sPot = ds.getEnhancedIO().getAnalogIn(4); wPot = ds.getEnhancedIO().getAnalogIn(6); /* if(Math.abs(wPot-1.61)>0.05 && Math.abs(wPot-lastWristPot)>0.05) targetWrist = 90 + 70*(1.61-wPot); lastWristPot = wPot; if(Math.abs(bPot-1.65)>0.1 || Math.abs(sPot-1.65)>0.1){ currentPreset = -1; presetStage = 0; targetLength = currentLength + 3*(1.65-xPot); targetHeight = currentHeight + 3*(1.65-yPot); }*/ } catch(Exception e){ System.out.println(e.getMessage()); } //m_robotDrive.pidWrite(-m_joystick.getRawAxis(1)); // drive with arcade style double joyAx2 = m_joystick.getRawAxis(2); double joyAx3 = m_joystick.getRawAxis(3); double joyAx4 = m_joystick.getRawAxis(4); if (Math.abs(joyAx2) < 0.15) joyAx2 = 0; if (Math.abs(joyAx3) < 0.15) joyAx3 = 0; if (Math.abs(joyAx4) < 0.15) joyAx4 = 0; double halfArm = 1.0; //if(currentHeight > 50) halfArm = 0.66; if(!override){ if(arcadeDrive){ m_robotDrive.arcadeDrive((1-2*m_buttons[6])*joyAx2*halfArm, joyAx4/(3+(m_buttons[7]*3))*2*halfArm, false); } else{ if(m_buttons[6]==1) m_robotDrive.tankDrive(-joyAx3, -joyAx2); else m_robotDrive.tankDrive(joyAx2, joyAx3); } } // Code for turning the servo to deploy the minibot if (m_buttons[4] == 1 || deployment) deploy = true; //else deploy = false; if (deploy)// && m_telePeriodicLoops > 50*110) deployServo.setAngle(80); else deployServo.setAngle(150); // DAT AAAAARM if(!override){ if(gripperSolenoid.get() != dsButtons[0]){ gripperSolenoid.set(dsButtons[0]); } if(rotateUp || rotateDown){ double rotateValue = 0; rotateValue -= (rotateUp)?0.5:0; rotateValue += (rotateDown)?0.5:0; topLeftRoller.set((0.41452991452991456)+rotateValue); topRightRoller.set(0.44017094017094016-rotateValue); bottomRightRoller.set(0.44017094017094016-rotateValue); bottomLeftRoller.set((0.4230769230769231)+rotateValue); //deploy: 12 //down: 8 //up: 10 } else if(gripperButton.get() && !dsButtons[0]){ topLeftRoller.set((0.41452991452991456)+0.5); topRightRoller.set(0.44017094017094016-0.5); bottomLeftRoller.set(0.44017094017094016-0.5); bottomRightRoller.set((0.4230769230769231)+0.5); } else{ topLeftRoller.set((0.41452991452991456)); topRightRoller.set(0.44017094017094016); bottomLeftRoller.set(0.44017094017094016); bottomRightRoller.set((0.4230769230769231)); } } //Servo center values //topleft 0.41452991452991456-0.41025641025641024 (0.4123901005) //topright 0.44017094017094016 //bottomleft 0.44017094017094016 //bottomright 0.4230769230769231 double boomV = 0; if(!override) { //double targetboom = (targetAngles[0] + 45) / 270.0; double targetboom = (3.3-bPot)/3.71018053 + 0.06610589; //if(targetboom < 0.18) targetboom = 0.18; //if(targetboom > 0.82) targetboom = 0.82; double boomDiff = 0; try{ if(boomLeftJag.getPosition()!=0) boomDiff = targetboom - boomLeftJag.getPosition(); } catch(Exception e){ System.out.println(e.getMessage()); } /*if(Math.abs(boomDiff) > 0.01){ boomHitTarget = false; } if(Math.abs(boomDiff) < 0.005){ boomDiff = 0; if(!boomHitTarget){ boomHitTarget = true; boomStillBuffer = 0; } } if(boomHitTarget){ if(Math.abs(boomDiff) < 0.01){ if(boomStillBuffer % 10 != 0){ boomDiff = 0; } boomStillBuffer ++; } } boomV = boomDiff * 120; double boomMinV = 4.0; if(boomV != 0) if(Math.abs(boomV) < boomMinV) boomV = (boomV < 0)?-boomMinV:boomMinV;*/ double boomGain = 170; if(boomDiff > 0) boomGain = 80; boomV = ((boomGain*-1) * boomDiff); double boomMaxV = 9*1; if(Math.abs(boomV) > boomMaxV) boomV = (boomV < boomMaxV)?-boomMaxV:boomMaxV; if(Math.abs(boomV) > 1.5){ if(Math.abs(boomV) < 3.25) boomV = (boomV < 0)?-3.25:3.25; } else boomV = 0; } else{ if(lastStartingButton == 1){ double targetboom = 0.41152954; double boomDiff = 0; try{ if(boomLeftJag.getPosition() != 0) boomDiff = targetboom - boomLeftJag.getPosition(); } catch(Exception e){ System.out.println(e.getMessage()); } boomV = (-160 * boomDiff); if(Math.abs(boomV) > 9) boomV = (boomV < 0)?-9:9; if(Math.abs(boomV) > 0.65){ if(Math.abs(boomV) < 2.5) boomV = (boomV < 0)?-3.25:3.25; } else boomV = 0; } else boomV = 12 * -m_joystick.getRawAxis(4); } try{ boomLeftJag.setX(boomV/12.0); boomRightJag.setX(boomV/12.0); } catch(Exception e){ System.out.println(e.getMessage()); } double stickV = 0; if(!override){ //double targetstick = (targetAngles[1]+45)/270.0; double targetstick = sPot/3.490056 + 0.01808; //.1155 //if(targetstick < 0.18) targetstick = 0.18; //if(targetstick > 0.82) targetstick = 0.82; double stickDiff = 0; try{ if(stickRightJag.getPosition() != 0) stickDiff = targetstick - stickRightJag.getPosition(); } catch(Exception e){ System.out.println(e.getMessage()); }/* if(Math.abs(stickDiff) > 0.01){ stickHitTarget = false; } if(Math.abs(stickDiff) < 0.005){ stickDiff = 0; if(!stickHitTarget){ stickHitTarget = true; stickStillBuffer = 0; } } if(stickHitTarget){ if(Math.abs(stickDiff) < 0.01){ if(stickStillBuffer % 10 != 0){ stickDiff = 0; } stickStillBuffer ++; } } stickV = stickDiff * -250; double stickMinV = 4.0; if(stickV != 0) if(Math.abs(stickV) < stickMinV) stickV = (stickV < 0)?-stickMinV:stickMinV;*/ stickV = ((150.0*1) * stickDiff); double stickMaxV = 9*1; if(Math.abs(stickV) > stickMaxV) stickV = (stickV < stickMaxV)?-stickMaxV:stickMaxV; if(Math.abs(stickV) > 1.0){ if(Math.abs(stickV) < 3.35) stickV = (stickV < 0)?-3.35:3.35; } else stickV = 0; } else{ if(lastStartingButton == 1){ double targetstick = 0.7986296; double stickDiff = 0; try{ if(stickRightJag.getPosition() != 0) stickDiff = targetstick - stickRightJag.getPosition(); } catch(Exception e){ System.out.println(e.getMessage()); } stickV = 1 * (150 * stickDiff); if(Math.abs(stickV) > 12) stickV = (stickV < 0)?-12:12; if(Math.abs(stickV) > 0.85){ if(Math.abs(stickV) < 3.35) stickV = (stickV < 0)?-3.35:3.35; } else stickV = 0; } else stickV = 7 * -m_joystick.getRawAxis(3); } try{ stickLeftJag.setX(stickV/12.0); stickRightJag.setX(stickV/12.0); } catch(Exception e){ System.out.println(e.getMessage()); } double wristV = 0; if(!override){ //double targetwrist = (targetWristA+45)/270.0; double targetwrist = (1-wPot/3.51774071) - 0.03755193; //if(targetwrist > (45.0+45.0)/270.0 && !gripperSolenoid.get()) //targetwrist = (45.0+45.0)/270.0; double wristDiff = 0; try{ if(wristJag.getPosition() != 0) wristDiff = targetwrist - wristJag.getPosition(); } catch(Exception e){ System.out.println(e.getMessage()); } /* if(Math.abs(wristDiff) > 0.01){ wristHitTarget = false; } if(Math.abs(wristDiff) < 0.005){ wristDiff = 0; if(!wristHitTarget){ wristHitTarget = true; wristStillBuffer = 0; } } if(wristHitTarget){ if(Math.abs(wristDiff) < 0.01){ if(wristStillBuffer % 10 != 0){ wristDiff = 0; } wristStillBuffer ++; } } wristV = wristDiff * -20; if(wristV != 0) if(Math.abs(wristV) < 1.5) wristV = (wristV < 0)?-1.5:1.5;*/ wristV = ((-35.0*1) * wristDiff); double wristMaxV = 9*1; if(Math.abs(wristV) > wristMaxV) wristV = (wristV < wristMaxV)?-wristMaxV:wristMaxV; if(Math.abs(wristV) > 0.2){ if(Math.abs(wristV) < 1.0) wristV = (wristV < 0)?-1.0:1.0; } else wristV = 0; } else{ if(lastStartingButton == 1){ double targetwrist = 0.8758545; double wristDiff = 0; try{ if(wristJag.getPosition() != 0) wristDiff = targetwrist - wristJag.getPosition(); } catch(Exception e){ System.out.println(e.getMessage()); } wristV = -1 * (35 * wristDiff); if(Math.abs(wristV) > 12) wristV = (wristV < 0)?-12:12; if(Math.abs(wristV) > 0.2){ if(Math.abs(wristV) < 1.0) wristV = (wristV < 0)?-1.0:1.0; } else wristV = 0; } else wristV = 4 * -m_joystick.getRawAxis(2); } try{ wristJag.setX(wristV/12.0); } catch(Exception e){ System.out.println(e.getMessage()); } } double[] getArmAngles(double wristX, double wristY){ double hypotenuse = Math.sqrt(MathUtils.pow(wristX, 2) + MathUtils.pow(wristY-23, 2)); double stickBoomAngle = MathUtils.acos( (36*36 + 44*44 - hypotenuse*hypotenuse) / (2*36*44) ); double hypotenuseHorizontalAngle = MathUtils.atan2((wristY-23), wristX); double hypotenuseBoomAngle = MathUtils.acos( (36*36 + hypotenuse*hypotenuse - 44*44) / (2*36*hypotenuse) ); double boomHorizontalAngle = (hypotenuseHorizontalAngle + hypotenuseBoomAngle); double[] armAngles = {boomHorizontalAngle/Math.PI*180, stickBoomAngle/Math.PI*180}; return armAngles; } double[] getWristCoordinates(double boomAngle, double stickAngle){ double hypotenuse = Math.sqrt(36*36 + 44*44 - 2*36*44*Math.cos(stickAngle/180.0*Math.PI)); double hypotenuseBoomAngle = MathUtils.acos( (36*36 + hypotenuse*hypotenuse - 44*44) / (2*36*hypotenuse) ); double hypotenuseHorizontalAngle = boomAngle/180.0*Math.PI - hypotenuseBoomAngle; double xCoord = hypotenuse*Math.cos(hypotenuseHorizontalAngle); double yCoord = hypotenuse*Math.sin(hypotenuseHorizontalAngle) + 23; double[] wristCoords = {xCoord, yCoord}; return wristCoords; } int GetLoopsPerSec() { return 20; } void updateDashboard() { Dashboard lowDashData = ds.getDashboardPackerLow(); lowDashData.addCluster(); { lowDashData.addCluster(); { //analog modules lowDashData.addCluster(); { for (int i = 1; i <= 8; i++) { lowDashData.addFloat((float) AnalogModule.getInstance(1).getAverageVoltage(i)); } } lowDashData.finalizeCluster(); /*lowDashData.addCluster(); { for (int i = 1; i <= 8; i++) { lowDashData.addFloat((float) AnalogModule.getInstance(2).getAverageVoltage(i)); } } lowDashData.finalizeCluster();*/ } lowDashData.finalizeCluster(); lowDashData.addCluster(); { //digital modules lowDashData.addCluster(); { lowDashData.addCluster(); { int module = 1; lowDashData.addByte(DigitalModule.getInstance(module).getRelayForward()); lowDashData.addByte(DigitalModule.getInstance(module).getRelayForward()); lowDashData.addShort(DigitalModule.getInstance(module).getAllDIO()); lowDashData.addShort(DigitalModule.getInstance(module).getDIODirection()); lowDashData.addCluster(); { for (int i = 1; i <= 10; i++) { lowDashData.addByte((byte) DigitalModule.getInstance(module).getPWM(i)); } } lowDashData.finalizeCluster(); } lowDashData.finalizeCluster(); /*lowDashData.addCluster(); { int module = 2; lowDashData.addByte(DigitalModule.getInstance(module).getRelayForward()); lowDashData.addByte(DigitalModule.getInstance(module).getRelayForward()); lowDashData.addShort(DigitalModule.getInstance(module).getAllDIO()); lowDashData.addShort(DigitalModule.getInstance(module).getDIODirection()); lowDashData.addCluster(); { for (int i = 1; i <= 10; i++) { lowDashData.addByte((byte) DigitalModule.getInstance(module).getPWM(i)); } } lowDashData.finalizeCluster(); } lowDashData.finalizeCluster();*/ } lowDashData.finalizeCluster(); } lowDashData.finalizeCluster(); } lowDashData.finalizeCluster(); lowDashData.addByte(Solenoid.getAllFromDefaultModule()); lowDashData.commit(); } }
/* Derby - Class org.apache.derby.impl.store.raw.data.UpdateOperation Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.impl.store.raw.data; import org.apache.derby.iapi.reference.SQLState; import org.apache.derby.impl.store.raw.data.BasePage; import org.apache.derby.iapi.services.sanity.SanityManager; import org.apache.derby.iapi.services.io.FormatIdUtil; import org.apache.derby.iapi.services.io.StoredFormatIds; import org.apache.derby.iapi.error.StandardException; import org.apache.derby.iapi.store.raw.Page; import org.apache.derby.iapi.store.raw.RecordHandle; import org.apache.derby.iapi.store.raw.Transaction; import org.apache.derby.iapi.store.raw.log.LogInstant; import org.apache.derby.iapi.store.raw.xact.RawTransaction; import org.apache.derby.iapi.types.DataValueDescriptor; import org.apache.derby.iapi.services.io.FormatableBitSet; import org.apache.derby.iapi.util.ByteArray; import org.apache.derby.iapi.services.io.CompressedNumber; import org.apache.derby.iapi.services.io.DynamicByteArrayOutputStream; import org.apache.derby.iapi.services.io.DynamicByteArrayOutputStream; import java.io.OutputStream; import java.io.ObjectOutput; import java.io.ObjectInput; import java.io.IOException; import org.apache.derby.iapi.services.io.LimitObjectInput; /** Represents the update of a particular row on a page. <PRE> @derby.formatId LOGOP_UPDATE the formatId is written by FormatIdOutputStream when this object is written out by writeObject @derby.purpose update a record on the page @derby.upgrade @derby.diskLayout PhysicalPageOperation the super class doMeSlot(CompressedInt) the slot the updated record is in recordId(CompressedInt) the recordId of the updated record OptionalData The new image of the record (length included), follow by the old image of the record (length included) @derby.endFormat </PRE> */ public final class UpdateOperation extends PhysicalPageOperation { protected int doMeSlot; // record slot - only valid during a doMe() operation protected int recordId; // record id transient protected int nextColumn; // next column that needs to be updated in a row. transient protected ByteArray preparedLog; public UpdateOperation( RawTransaction t, BasePage page, int slot, int recordId, Object[] row, FormatableBitSet validColumns, int realStartColumn, DynamicByteArrayOutputStream logBuffer, int realSpaceOnPage, RecordHandle headRowHandle) throws StandardException { super(page); this.doMeSlot = slot; this.recordId = recordId; this.nextColumn = -1; // RESOLVE SRW-DJD/YYZ try { writeOptionalDataToBuffer(t, (DynamicByteArrayOutputStream) logBuffer, row, validColumns, realStartColumn, realSpaceOnPage, headRowHandle); } catch (IOException ioe) { throw StandardException.newException( SQLState.DATA_UNEXPECTED_EXCEPTION, ioe); } } /* * Formatable methods */ // no-arg constructor, required by Formatable public UpdateOperation() { super(); } public void writeExternal(ObjectOutput out) throws IOException { super.writeExternal(out); CompressedNumber.writeInt(out, doMeSlot); CompressedNumber.writeInt(out, recordId); } /** Read this in @exception IOException error reading from log stream @exception ClassNotFoundException log stream corrupted */ public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { super.readExternal(in); doMeSlot = CompressedNumber.readInt(in); recordId = CompressedNumber.readInt(in); } /** Return my format identifier. */ public int getTypeFormatId() { return StoredFormatIds.LOGOP_UPDATE; } /** Return the last column of the row this operation logged */ public int getNextStartColumn() { return nextColumn; } /* * Loggable methods */ /** Store the new record directly over the old record, the implementation of storeRecord is responsible for removing any old data. @exception StandardException Thrown by methods I call @exception IOException Thrown by methods I call @see BasePage#storeRecord @see org.apache.derby.iapi.store.raw.Loggable#doMe */ public void doMe(Transaction xact, LogInstant instant, LimitObjectInput in) throws StandardException, IOException { this.page.storeRecord(instant, doMeSlot, false, in); } /* * PhysicalPageOperation methods */ /** Store the old record directly over the new record, the implementation of storeRecord is responsible for removing any new data. @exception StandardException Thrown by methods I call @exception IOException Thrown by methods I call @see BasePage#storeRecord @see PhysicalPageOperation#undoMe */ public void undoMe(Transaction xact, BasePage undoPage, LogInstant CLRInstant, LimitObjectInput in) throws StandardException, IOException { int slot = undoPage.findRecordById(recordId, Page.FIRST_SLOT_NUMBER); // skip the after image of the record undoPage.skipRecord(in); undoPage.storeRecord(CLRInstant, slot, false, in); undoPage.setAuxObject(null); } /* methods to support prepared log the following two methods should not be called during recover */ public ByteArray getPreparedLog() { return (this.preparedLog); } /** Write out the changed colums of new record (from the row) followed by changed columns of the old record (from the page). @exception StandardException Thrown by methods I call @exception IOException Thrown by methods I call */ private void writeOptionalDataToBuffer( RawTransaction t, DynamicByteArrayOutputStream logBuffer, Object[] row, FormatableBitSet validColumns, int realStartColumn, int realSpaceOnPage, RecordHandle headRowHandle) throws StandardException, IOException { if (SanityManager.DEBUG) { SanityManager.ASSERT(this.page != null); } if (realStartColumn == (-1)) { logBuffer = t.getLogBuffer(); } int optionalDataStart = logBuffer.getPosition(); if (SanityManager.DEBUG) { SanityManager.ASSERT( (realStartColumn != -1 || optionalDataStart == 0), "Buffer for writing optional data should start at position 0"); } this.nextColumn = this.page.logRow( doMeSlot, false, recordId, row, validColumns, logBuffer, 0, Page.INSERT_OVERFLOW, realStartColumn, realSpaceOnPage, 100); FormatableBitSet loggedColumns = validColumns; // If this update results in moving columns off the current page to // another page, then we must log the before image values of the columns // being moved (deleted from this page) in addition to logging the // columns actually being changed as part of the update. if ((nextColumn != -1) && (validColumns != null)) { // if nextColumn is not -1, then this must be an update which moves // columns off of the current page. If validColumns == null then // we are logging all of the before image columns anyway. // get total number of fields of the old record. int numberFields = page.getHeaderAtSlot(doMeSlot).getNumberFields(); // create new bit map, copying all bits that were set in original loggedColumns = new FormatableBitSet(validColumns); // make sure there is room in the bit map to add the columns being // deleted from the end of the row. // The important thing is that endField must be at least as big as // the number of columns in the entire record (including previous // pages of a long row) up to the end of this page. int endField = nextColumn + numberFields; loggedColumns.grow(endField); // now include all columns being deleted. // This actually sets too many bits in this bit set but // logRecord will just ignore the extra bits. for (int i = nextColumn; i < endField; i++) { loggedColumns.set(i); } } // log the old version of the changed data this.page.logRecord( doMeSlot, BasePage.LOG_RECORD_FOR_UPDATE, recordId, loggedColumns, logBuffer, headRowHandle); // get length of all the optional data. optionalDataStart = logBuffer.getBeginPosition(); int optionalDataLength = logBuffer.getPosition() - optionalDataStart; // set the position to the beginning of the buffer logBuffer.setPosition(optionalDataStart); this.preparedLog = new ByteArray( logBuffer.getByteArray(), optionalDataStart, optionalDataLength); } /* * PageBasicOperation */ /** * restore the before image of the page * * @exception StandardException Standard Derby Error Policy * @exception IOException problem reading the complete log record from the * input stream */ public void restoreMe(Transaction xact, BasePage undoPage, LogInstant CLRInstant, LimitObjectInput in) throws StandardException, IOException { undoMe(xact, undoPage, CLRInstant, in); } public String toString() { if (SanityManager.DEBUG) { return super.toString() + "Update " + " Slot=" + doMeSlot + " recordId=" + recordId; } else return null; } }
// Copyright 2018 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe.serialization; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ClassToInstanceMap; import com.google.common.collect.ImmutableClassToInstanceMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.MoreExecutors; import com.google.devtools.build.lib.skyframe.serialization.Memoizer.Serializer; import com.google.devtools.build.lib.skyframe.serialization.ObjectCodec.MemoizationStrategy; import com.google.devtools.build.lib.skyframe.serialization.SerializationException.NoCodecException; import com.google.protobuf.CodedOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.annotation.CheckReturnValue; import javax.annotation.Nullable; /** * Stateful class for providing additional context to a single serialization "session". This class * is thread-safe so long as {@link #serializer} is null (which also implies that {@link * #allowFuturesToBlockWritingOn}) is false). If it is not null, this class is not thread-safe and * should only be accessed on a single thread for serializing one object (that may involve * serializing other objects contained in it). */ public class SerializationContext implements SerializationDependencyProvider { private final ObjectCodecRegistry registry; private final ImmutableClassToInstanceMap<Object> dependencies; @Nullable private final Memoizer.Serializer serializer; private final Set<Class<?>> explicitlyAllowedClasses; /** Initialized lazily. */ @Nullable private List<ListenableFuture<Void>> futuresToBlockWritingOn; private final boolean allowFuturesToBlockWritingOn; private SerializationContext( ObjectCodecRegistry registry, ImmutableClassToInstanceMap<Object> dependencies, @Nullable Serializer serializer, boolean allowFuturesToBlockWritingOn) { this.registry = registry; this.dependencies = dependencies; this.serializer = serializer; this.allowFuturesToBlockWritingOn = allowFuturesToBlockWritingOn; explicitlyAllowedClasses = serializer != null ? new HashSet<>() : ImmutableSet.of(); } @VisibleForTesting public SerializationContext( ObjectCodecRegistry registry, ImmutableClassToInstanceMap<Object> dependencies) { this(registry, dependencies, /*serializer=*/ null, /*allowFuturesToBlockWritingOn=*/ false); } @VisibleForTesting public SerializationContext(ImmutableClassToInstanceMap<Object> dependencies) { this(AutoRegistry.get(), dependencies); } // TODO(shahan): consider making codedOut a member of this class. public void serialize(Object object, CodedOutputStream codedOut) throws IOException, SerializationException { serializeInternal(object, /*customMemoizationStrategy=*/ null, codedOut); } void serializeWithAdHocMemoizationStrategy( Object object, MemoizationStrategy memoizationStrategy, CodedOutputStream codedOut) throws IOException, SerializationException { serializeInternal(object, memoizationStrategy, codedOut); } private void serializeInternal( Object object, @Nullable MemoizationStrategy customMemoizationStrategy, CodedOutputStream codedOut) throws IOException, SerializationException { ObjectCodecRegistry.CodecDescriptor descriptor = recordAndGetDescriptorIfNotConstantMemoizedOrNull(object, codedOut); if (descriptor != null) { if (serializer == null) { descriptor.serialize(this, object, codedOut); } else { @SuppressWarnings("unchecked") ObjectCodec<Object> castCodec = (ObjectCodec<Object>) descriptor.getCodec(); MemoizationStrategy memoizationStrategy = customMemoizationStrategy != null ? customMemoizationStrategy : castCodec.getStrategy(); serializer.serialize(this, object, castCodec, codedOut, memoizationStrategy); } } } @Override public <T> T getDependency(Class<T> type) { return checkNotNull(dependencies.getInstance(type), "Missing dependency of type %s", type); } /** * Returns a {@link SerializationContext} that will memoize values it encounters (using reference * equality) in a new memoization table. The returned context should be used instead of the * original: memoization may only occur when using the returned context. Calls must be in pairs * with {@link DeserializationContext#getMemoizingContext} in the corresponding deserialization * code. * * <p>This method is idempotent: calling it on an already memoizing context will return the same * context. */ @CheckReturnValue public SerializationContext getMemoizingContext() { if (serializer != null) { return this; } return getNewMemoizingContext(/*allowFuturesToBlockWritingOn=*/ false); } /** * Returns a {@link SerializationContext} that will memoize values as described in {@link * #getMemoizingContext} and additionally permits attaching futures through {@link * #addFutureToBlockWritingOn}. */ @CheckReturnValue public SerializationContext getMemoizingAndBlockingOnWriteContext() { checkState(serializer == null, "Should only be called on base serializationContext"); checkState(!allowFuturesToBlockWritingOn, "Should only be called on base serializationContext"); return getNewMemoizingContext(/*allowFuturesToBlockWritingOn=*/ true); } /** * Returns a memoizing {@link SerializationContext}, as getMemoizingContext above. Unlike * getMemoizingContext, this method is not idempotent - the returned context will always be fresh. */ public SerializationContext getNewMemoizingContext() { return getNewMemoizingContext(allowFuturesToBlockWritingOn); } private SerializationContext getNewMemoizingContext(boolean allowFuturesToBlockWritingOn) { return new SerializationContext( registry, dependencies, new Memoizer.Serializer(), allowFuturesToBlockWritingOn); } /** * Returns a new {@link SerializationContext} mostly identical to this one, but with a dependency * map composed by applying overrides to this context's dependencies. * * <p>The given {@code dependencyOverrides} may contain keys already present (in which case the * dependency will be replaced) or new keys (in which case the dependency will be added). * * <p>Must only be called on a base context (no memoization state), since changing dependencies * may change deserialization semantics. */ @CheckReturnValue public SerializationContext withDependencyOverrides(ClassToInstanceMap<?> dependencyOverrides) { checkState(serializer == null, "Must only be called on base SerializationContext"); return new SerializationContext( registry, ImmutableClassToInstanceMap.builder() .putAll(Maps.filterKeys(dependencies, k -> !dependencyOverrides.containsKey(k))) .putAll(dependencyOverrides) .build(), /*serializer=*/ null, allowFuturesToBlockWritingOn); } /** * Register a {@link ListenableFuture} that must complete successfully before the serialized bytes * generated using this context can be written remotely. Failure of the future implies a bug or * other unrecoverable error that should crash this JVM, which is done by invoking {@link * FutureCallback#onFailure} on the given {@code crashTerminatingCallback}. */ public void addFutureToBlockWritingOn( ListenableFuture<Void> future, FutureCallback<Void> crashTerminatingCallback) { checkState(allowFuturesToBlockWritingOn, "This context cannot block on a future"); if (futuresToBlockWritingOn == null) { futuresToBlockWritingOn = new ArrayList<>(); } Futures.addCallback(future, crashTerminatingCallback, MoreExecutors.directExecutor()); futuresToBlockWritingOn.add(future); } /** * Creates a future that succeeds when all futures stored in this context via {@link * #addFutureToBlockWritingOn} have succeeded, or null if no such futures were stored. */ @Nullable public ListenableFuture<Void> createFutureToBlockWritingOn() { return futuresToBlockWritingOn != null ? Futures.whenAllSucceed(futuresToBlockWritingOn) .call(() -> null, MoreExecutors.directExecutor()) : null; } /** * Asserts during serialization that the encoded class of this codec has been explicitly * whitelisted for serialization (using {@link #addExplicitlyAllowedClass}). Codecs for objects * that are expensive to serialize and that should only be encountered in a limited number of * types of {@link com.google.devtools.build.skyframe.SkyValue}s should call this method to check * that the object is being serialized as part of an expected {@link * com.google.devtools.build.skyframe.SkyValue}, like {@link * com.google.devtools.build.lib.packages.Package} inside {@link * com.google.devtools.build.lib.skyframe.PackageValue}. */ public <T> void checkClassExplicitlyAllowed(Class<T> allowedClass, T objectForDebugging) throws SerializationException { if (serializer == null) { throw new SerializationException( "Cannot check explicitly allowed class " + allowedClass + " without memoization (" + objectForDebugging + ")"); } if (!explicitlyAllowedClasses.contains(allowedClass)) { throw new SerializationException( allowedClass + " not explicitly allowed (allowed classes were: " + explicitlyAllowedClasses + ") and object is " + objectForDebugging); } } /** * Adds an explicitly allowed class for this serialization context, which must be a memoizing * context. Must be called by any codec that transitively serializes an object whose codec calls * {@link #checkClassExplicitlyAllowed}. * * <p>Normally called by codecs for {@link com.google.devtools.build.skyframe.SkyValue} subclasses * that know they may encounter an object that is expensive to serialize, like {@link * com.google.devtools.build.lib.skyframe.PackageValue} and {@link * com.google.devtools.build.lib.packages.Package} or {@link * com.google.devtools.build.lib.analysis.ConfiguredTargetValue} and {@link * com.google.devtools.build.lib.analysis.configuredtargets.RuleConfiguredTarget}. * * <p>In case of an unexpected failure from {@link #checkClassExplicitlyAllowed}, it should first * be determined if the inclusion of the expensive object is legitimate, before it is whitelisted * using this method. */ public void addExplicitlyAllowedClass(Class<?> allowedClass) throws SerializationException { if (serializer == null) { throw new SerializationException( "Cannot add explicitly allowed class %s without memoization: " + allowedClass); } explicitlyAllowedClasses.add(allowedClass); } private boolean writeNullOrConstant(@Nullable Object object, CodedOutputStream codedOut) throws IOException { if (object == null) { codedOut.writeSInt32NoTag(0); return true; } Integer tag = registry.maybeGetTagForConstant(object); if (tag != null) { codedOut.writeSInt32NoTag(tag); return true; } return false; } @Nullable private ObjectCodecRegistry.CodecDescriptor recordAndGetDescriptorIfNotConstantMemoizedOrNull( @Nullable Object object, CodedOutputStream codedOut) throws IOException, NoCodecException { if (writeNullOrConstant(object, codedOut)) { return null; } if (serializer != null) { Integer memoizedIndex = serializer.getMemoizedIndex(object); if (memoizedIndex != null) { // Subtract 1 so it will be negative and not collide with null. codedOut.writeSInt32NoTag(-memoizedIndex - 1); return null; } } ObjectCodecRegistry.CodecDescriptor descriptor = registry.getCodecDescriptorForObject(object); codedOut.writeSInt32NoTag(descriptor.getTag()); return descriptor; } }
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package sg.atom.core.datastructure.collection; import java.util.Iterator; import java.util.NoSuchElementException; import sg.atom.corex.math.MathUtils; /** An unordered map where the values are ints. This implementation is a cuckoo hash map using 3 hashes, random walking, and a * small stash for problematic keys. Null keys are not allowed. No allocation is done except when growing the table size. <br> * <br> * This map performs very fast get, containsKey, and remove (typically O(1), worst case O(log(n))). Put may be a bit slower, * depending on hash collisions. Load factors greater than 0.91 greatly increase the chances the map will have to rehash to the * next higher POT size. * @author Nathan Sweet */ public class ObjectIntMap<K> implements Iterable<ObjectIntMap.Entry<K>> { private static final int PRIME1 = 0xbe1f14b1; private static final int PRIME2 = 0xb4b82e39; private static final int PRIME3 = 0xced1c241; public int size; K[] keyTable; int[] valueTable; int capacity, stashSize; private float loadFactor; private int hashShift, mask, threshold; private int stashCapacity; private int pushIterations; private Entries entries1, entries2; private Values values1, values2; private Keys keys1, keys2; /** Creates a new map with an initial capacity of 32 and a load factor of 0.8. This map will hold 25 items before growing the * backing table. */ public ObjectIntMap () { this(32, 0.8f); } /** Creates a new map with a load factor of 0.8. This map will hold initialCapacity * 0.8 items before growing the backing * table. */ public ObjectIntMap (int initialCapacity) { this(initialCapacity, 0.8f); } /** Creates a new map with the specified initial capacity and load factor. This map will hold initialCapacity * loadFactor items * before growing the backing table. */ public ObjectIntMap (int initialCapacity, float loadFactor) { if (initialCapacity < 0) throw new IllegalArgumentException("initialCapacity must be >= 0: " + initialCapacity); if (initialCapacity > 1 << 30) throw new IllegalArgumentException("initialCapacity is too large: " + initialCapacity); capacity = MathUtils.nextPowerOfTwo(initialCapacity); if (loadFactor <= 0) throw new IllegalArgumentException("loadFactor must be > 0: " + loadFactor); this.loadFactor = loadFactor; threshold = (int)(capacity * loadFactor); mask = capacity - 1; hashShift = 31 - Integer.numberOfTrailingZeros(capacity); stashCapacity = Math.max(3, (int)Math.ceil(Math.log(capacity)) * 2); pushIterations = Math.max(Math.min(capacity, 8), (int)Math.sqrt(capacity) / 8); keyTable = (K[])new Object[capacity + stashCapacity]; valueTable = new int[keyTable.length]; } /** Creates a new map identical to the specified map. */ public ObjectIntMap (ObjectIntMap<? extends K> map) { this(map.capacity, map.loadFactor); stashSize = map.stashSize; System.arraycopy(map.keyTable, 0, keyTable, 0, map.keyTable.length); System.arraycopy(map.valueTable, 0, valueTable, 0, map.valueTable.length); size = map.size; } public void put (K key, int value) { if (key == null) throw new IllegalArgumentException("key cannot be null."); K[] keyTable = this.keyTable; // Check for existing keys. int hashCode = key.hashCode(); int index1 = hashCode & mask; K key1 = keyTable[index1]; if (key.equals(key1)) { valueTable[index1] = value; return; } int index2 = hash2(hashCode); K key2 = keyTable[index2]; if (key.equals(key2)) { valueTable[index2] = value; return; } int index3 = hash3(hashCode); K key3 = keyTable[index3]; if (key.equals(key3)) { valueTable[index3] = value; return; } // Update key in the stash. for (int i = capacity, n = i + stashSize; i < n; i++) { if (key.equals(keyTable[i])) { valueTable[i] = value; return; } } // Check for empty buckets. if (key1 == null) { keyTable[index1] = key; valueTable[index1] = value; if (size++ >= threshold) resize(capacity << 1); return; } if (key2 == null) { keyTable[index2] = key; valueTable[index2] = value; if (size++ >= threshold) resize(capacity << 1); return; } if (key3 == null) { keyTable[index3] = key; valueTable[index3] = value; if (size++ >= threshold) resize(capacity << 1); return; } push(key, value, index1, key1, index2, key2, index3, key3); } public void putAll (ObjectIntMap<K> map) { for (Entry<K> entry : map.entries()) put(entry.key, entry.value); } /** Skips checks for existing keys. */ private void putResize (K key, int value) { // Check for empty buckets. int hashCode = key.hashCode(); int index1 = hashCode & mask; K key1 = keyTable[index1]; if (key1 == null) { keyTable[index1] = key; valueTable[index1] = value; if (size++ >= threshold) resize(capacity << 1); return; } int index2 = hash2(hashCode); K key2 = keyTable[index2]; if (key2 == null) { keyTable[index2] = key; valueTable[index2] = value; if (size++ >= threshold) resize(capacity << 1); return; } int index3 = hash3(hashCode); K key3 = keyTable[index3]; if (key3 == null) { keyTable[index3] = key; valueTable[index3] = value; if (size++ >= threshold) resize(capacity << 1); return; } push(key, value, index1, key1, index2, key2, index3, key3); } private void push (K insertKey, int insertValue, int index1, K key1, int index2, K key2, int index3, K key3) { K[] keyTable = this.keyTable; int[] valueTable = this.valueTable; int mask = this.mask; // Push keys until an empty bucket is found. K evictedKey; int evictedValue; int i = 0, pushIterations = this.pushIterations; do { // Replace the key and value for one of the hashes. switch (MathUtils.random(2)) { case 0: evictedKey = key1; evictedValue = valueTable[index1]; keyTable[index1] = insertKey; valueTable[index1] = insertValue; break; case 1: evictedKey = key2; evictedValue = valueTable[index2]; keyTable[index2] = insertKey; valueTable[index2] = insertValue; break; default: evictedKey = key3; evictedValue = valueTable[index3]; keyTable[index3] = insertKey; valueTable[index3] = insertValue; break; } // If the evicted key hashes to an empty bucket, put it there and stop. int hashCode = evictedKey.hashCode(); index1 = hashCode & mask; key1 = keyTable[index1]; if (key1 == null) { keyTable[index1] = evictedKey; valueTable[index1] = evictedValue; if (size++ >= threshold) resize(capacity << 1); return; } index2 = hash2(hashCode); key2 = keyTable[index2]; if (key2 == null) { keyTable[index2] = evictedKey; valueTable[index2] = evictedValue; if (size++ >= threshold) resize(capacity << 1); return; } index3 = hash3(hashCode); key3 = keyTable[index3]; if (key3 == null) { keyTable[index3] = evictedKey; valueTable[index3] = evictedValue; if (size++ >= threshold) resize(capacity << 1); return; } if (++i == pushIterations) break; insertKey = evictedKey; insertValue = evictedValue; } while (true); putStash(evictedKey, evictedValue); } private void putStash (K key, int value) { if (stashSize == stashCapacity) { // Too many pushes occurred and the stash is full, increase the table size. resize(capacity << 1); put(key, value); return; } // Store key in the stash. int index = capacity + stashSize; keyTable[index] = key; valueTable[index] = value; stashSize++; size++; } /** @param defaultValue Returned if the key was not associated with a value. */ public int get (K key, int defaultValue) { int hashCode = key.hashCode(); int index = hashCode & mask; if (!key.equals(keyTable[index])) { index = hash2(hashCode); if (!key.equals(keyTable[index])) { index = hash3(hashCode); if (!key.equals(keyTable[index])) return getStash(key, defaultValue); } } return valueTable[index]; } private int getStash (K key, int defaultValue) { K[] keyTable = this.keyTable; for (int i = capacity, n = i + stashSize; i < n; i++) if (key.equals(keyTable[i])) return valueTable[i]; return defaultValue; } /** Returns the key's current value and increments the stored value. If the key is not in the map, defaultValue + increment is * put into the map. */ public int getAndIncrement (K key, int defaultValue, int increment) { int hashCode = key.hashCode(); int index = hashCode & mask; if (!key.equals(keyTable[index])) { index = hash2(hashCode); if (!key.equals(keyTable[index])) { index = hash3(hashCode); if (!key.equals(keyTable[index])) return getAndIncrementStash(key, defaultValue, increment); } } int value = valueTable[index]; valueTable[index] = value + increment; return value; } private int getAndIncrementStash (K key, int defaultValue, int increment) { K[] keyTable = this.keyTable; for (int i = capacity, n = i + stashSize; i < n; i++) if (key.equals(keyTable[i])) { int value = valueTable[i]; valueTable[i] = value + increment; return value; } put(key, defaultValue + increment); return defaultValue; } public int remove (K key, int defaultValue) { int hashCode = key.hashCode(); int index = hashCode & mask; if (key.equals(keyTable[index])) { keyTable[index] = null; int oldValue = valueTable[index]; size--; return oldValue; } index = hash2(hashCode); if (key.equals(keyTable[index])) { keyTable[index] = null; int oldValue = valueTable[index]; size--; return oldValue; } index = hash3(hashCode); if (key.equals(keyTable[index])) { keyTable[index] = null; int oldValue = valueTable[index]; size--; return oldValue; } return removeStash(key, defaultValue); } int removeStash (K key, int defaultValue) { K[] keyTable = this.keyTable; for (int i = capacity, n = i + stashSize; i < n; i++) { if (key.equals(keyTable[i])) { int oldValue = valueTable[i]; removeStashIndex(i); size--; return oldValue; } } return defaultValue; } void removeStashIndex (int index) { // If the removed location was not last, move the last tuple to the removed location. stashSize--; int lastIndex = capacity + stashSize; if (index < lastIndex) { keyTable[index] = keyTable[lastIndex]; valueTable[index] = valueTable[lastIndex]; } } /** Reduces the size of the backing arrays to be the specified capacity or less. If the capacity is already less, nothing is * done. If the map contains more items than the specified capacity, the next highest power of two capacity is used instead. */ public void shrink (int maximumCapacity) { if (maximumCapacity < 0) throw new IllegalArgumentException("maximumCapacity must be >= 0: " + maximumCapacity); if (size > maximumCapacity) maximumCapacity = size; if (capacity <= maximumCapacity) return; maximumCapacity = MathUtils.nextPowerOfTwo(maximumCapacity); resize(maximumCapacity); } /** Clears the map and reduces the size of the backing arrays to be the specified capacity if they are larger. */ public void clear (int maximumCapacity) { if (capacity <= maximumCapacity) { clear(); return; } size = 0; resize(maximumCapacity); } public void clear () { if (size == 0) return; K[] keyTable = this.keyTable; for (int i = capacity + stashSize; i-- > 0;) keyTable[i] = null; size = 0; stashSize = 0; } /** Returns true if the specified value is in the map. Note this traverses the entire map and compares every value, which may be * an expensive operation. */ public boolean containsValue (int value) { int[] valueTable = this.valueTable; for (int i = capacity + stashSize; i-- > 0;) if (valueTable[i] == value) return true; return false; } public boolean containsKey (K key) { int hashCode = key.hashCode(); int index = hashCode & mask; if (!key.equals(keyTable[index])) { index = hash2(hashCode); if (!key.equals(keyTable[index])) { index = hash3(hashCode); if (!key.equals(keyTable[index])) return containsKeyStash(key); } } return true; } private boolean containsKeyStash (K key) { K[] keyTable = this.keyTable; for (int i = capacity, n = i + stashSize; i < n; i++) if (key.equals(keyTable[i])) return true; return false; } /** Returns the key for the specified value, or null if it is not in the map. Note this traverses the entire map and compares * every value, which may be an expensive operation. */ public K findKey (int value) { int[] valueTable = this.valueTable; for (int i = capacity + stashSize; i-- > 0;) if (valueTable[i] == value) return keyTable[i]; return null; } /** Increases the size of the backing array to accommodate the specified number of additional items. Useful before adding many * items to avoid multiple backing array resizes. */ public void ensureCapacity (int additionalCapacity) { int sizeNeeded = size + additionalCapacity; if (sizeNeeded >= threshold) resize(MathUtils.nextPowerOfTwo((int)(sizeNeeded / loadFactor))); } private void resize (int newSize) { int oldEndIndex = capacity + stashSize; capacity = newSize; threshold = (int)(newSize * loadFactor); mask = newSize - 1; hashShift = 31 - Integer.numberOfTrailingZeros(newSize); stashCapacity = Math.max(3, (int)Math.ceil(Math.log(newSize)) * 2); pushIterations = Math.max(Math.min(newSize, 8), (int)Math.sqrt(newSize) / 8); K[] oldKeyTable = keyTable; int[] oldValueTable = valueTable; keyTable = (K[])new Object[newSize + stashCapacity]; valueTable = new int[newSize + stashCapacity]; int oldSize = size; size = 0; stashSize = 0; if (oldSize > 0) { for (int i = 0; i < oldEndIndex; i++) { K key = oldKeyTable[i]; if (key != null) putResize(key, oldValueTable[i]); } } } private int hash2 (int h) { h *= PRIME2; return (h ^ h >>> hashShift) & mask; } private int hash3 (int h) { h *= PRIME3; return (h ^ h >>> hashShift) & mask; } public String toString () { if (size == 0) return "{}"; StringBuilder buffer = new StringBuilder(32); buffer.append('{'); K[] keyTable = this.keyTable; int[] valueTable = this.valueTable; int i = keyTable.length; while (i-- > 0) { K key = keyTable[i]; if (key == null) continue; buffer.append(key); buffer.append('='); buffer.append(valueTable[i]); break; } while (i-- > 0) { K key = keyTable[i]; if (key == null) continue; buffer.append(", "); buffer.append(key); buffer.append('='); buffer.append(valueTable[i]); } buffer.append('}'); return buffer.toString(); } public Entries<K> iterator () { return entries(); } /** Returns an iterator for the entries in the map. Remove is supported. Note that the same iterator instance is returned each * time this method is called. Use the {@link Entries} constructor for nested or multithreaded iteration. */ public Entries<K> entries () { if (entries1 == null) { entries1 = new Entries(this); entries2 = new Entries(this); } if (!entries1.valid) { entries1.reset(); entries1.valid = true; entries2.valid = false; return entries1; } entries2.reset(); entries2.valid = true; entries1.valid = false; return entries2; } /** Returns an iterator for the values in the map. Remove is supported. Note that the same iterator instance is returned each * time this method is called. Use the {@link Entries} constructor for nested or multithreaded iteration. */ public Values values () { if (values1 == null) { values1 = new Values(this); values2 = new Values(this); } if (!values1.valid) { values1.reset(); values1.valid = true; values2.valid = false; return values1; } values2.reset(); values2.valid = true; values1.valid = false; return values2; } /** Returns an iterator for the keys in the map. Remove is supported. Note that the same iterator instance is returned each time * this method is called. Use the {@link Entries} constructor for nested or multithreaded iteration. */ public Keys<K> keys () { if (keys1 == null) { keys1 = new Keys(this); keys2 = new Keys(this); } if (!keys1.valid) { keys1.reset(); keys1.valid = true; keys2.valid = false; return keys1; } keys2.reset(); keys2.valid = true; keys1.valid = false; return keys2; } static public class Entry<K> { public K key; public int value; public String toString () { return key + "=" + value; } } static private class MapIterator<K> { public boolean hasNext; final ObjectIntMap<K> map; int nextIndex, currentIndex; boolean valid = true; public MapIterator (ObjectIntMap<K> map) { this.map = map; reset(); } public void reset () { currentIndex = -1; nextIndex = -1; findNextIndex(); } void findNextIndex () { hasNext = false; K[] keyTable = map.keyTable; for (int n = map.capacity + map.stashSize; ++nextIndex < n;) { if (keyTable[nextIndex] != null) { hasNext = true; break; } } } public void remove () { if (currentIndex < 0) throw new IllegalStateException("next must be called before remove."); if (currentIndex >= map.capacity) { map.removeStashIndex(currentIndex); nextIndex = currentIndex - 1; findNextIndex(); } else { map.keyTable[currentIndex] = null; } currentIndex = -1; map.size--; } } static public class Entries<K> extends MapIterator<K> implements Iterable<Entry<K>>, Iterator<Entry<K>> { private Entry<K> entry = new Entry(); public Entries (ObjectIntMap<K> map) { super(map); } /** Note the same entry instance is returned each time this method is called. */ public Entry<K> next () { if (!hasNext) throw new NoSuchElementException(); if (!valid) throw new RuntimeException("#iterator() cannot be used nested."); K[] keyTable = map.keyTable; entry.key = keyTable[nextIndex]; entry.value = map.valueTable[nextIndex]; currentIndex = nextIndex; findNextIndex(); return entry; } public boolean hasNext () { if (!valid) throw new RuntimeException("#iterator() cannot be used nested."); return hasNext; } public Entries<K> iterator () { return this; } public void remove () { super.remove(); } } static public class Values extends MapIterator<Object> { public Values (ObjectIntMap<?> map) { super((ObjectIntMap<Object>)map); } public boolean hasNext () { if (!valid) throw new RuntimeException("#iterator() cannot be used nested."); return hasNext; } public int next () { if (!hasNext) throw new NoSuchElementException(); if (!valid) throw new RuntimeException("#iterator() cannot be used nested."); int value = map.valueTable[nextIndex]; currentIndex = nextIndex; findNextIndex(); return value; } /** Returns a new array containing the remaining values. */ public IntArray toArray () { IntArray array = new IntArray(true, map.size); while (hasNext) array.add(next()); return array; } } static public class Keys<K> extends MapIterator<K> implements Iterable<K>, Iterator<K> { public Keys (ObjectIntMap<K> map) { super((ObjectIntMap<K>)map); } public boolean hasNext () { if (!valid) throw new RuntimeException("#iterator() cannot be used nested."); return hasNext; } public K next () { if (!hasNext) throw new NoSuchElementException(); if (!valid) throw new RuntimeException("#iterator() cannot be used nested."); K key = map.keyTable[nextIndex]; currentIndex = nextIndex; findNextIndex(); return key; } public Keys<K> iterator () { return this; } /** Returns a new array containing the remaining keys. */ public Array<K> toArray () { Array array = new Array(true, map.size); while (hasNext) array.add(next()); return array; } /** Adds the remaining keys to the array. */ public Array<K> toArray (Array<K> array) { while (hasNext) array.add(next()); return array; } public void remove () { super.remove(); } } }
/* * Copyright (c) 2004, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.tools.jconsole; import java.awt.*; import java.awt.event.*; import javax.swing.*; import javax.swing.border.*; import javax.swing.plaf.*; import javax.swing.plaf.basic.BasicGraphicsUtils; import static javax.swing.SwingConstants.*; import static sun.tools.jconsole.JConsole.*; @SuppressWarnings("serial") public class BorderedComponent extends JPanel implements ActionListener { JButton moreOrLessButton; String valueLabelStr; JLabel label; JComponent comp; boolean collapsed = false; private Icon collapseIcon; private Icon expandIcon; private static Image getImage(String name) { Toolkit tk = Toolkit.getDefaultToolkit(); name = "resources/" + name + ".png"; return tk.getImage(BorderedComponent.class.getResource(name)); } public BorderedComponent(String text) { this(text, null, false); } public BorderedComponent(String text, JComponent comp) { this(text, comp, false); } public BorderedComponent(String text, JComponent comp, boolean collapsible) { super(null); this.comp = comp; // Only add border if text is not null if (text != null) { TitledBorder border; if (collapsible) { final JLabel textLabel = new JLabel(text); JPanel borderLabel = new JPanel(new FlowLayout(FlowLayout.LEFT, 2, 0)) { public int getBaseline(int w, int h) { Dimension dim = textLabel.getPreferredSize(); return textLabel.getBaseline(dim.width, dim.height) + textLabel.getY(); } }; borderLabel.add(textLabel); border = new LabeledBorder(borderLabel); textLabel.setForeground(border.getTitleColor()); if (IS_WIN) { collapseIcon = new ImageIcon(getImage("collapse-winlf")); expandIcon = new ImageIcon(getImage("expand-winlf")); } else { collapseIcon = new ArrowIcon(SOUTH, textLabel); expandIcon = new ArrowIcon(EAST, textLabel); } moreOrLessButton = new JButton(collapseIcon); moreOrLessButton.setContentAreaFilled(false); moreOrLessButton.setBorderPainted(false); moreOrLessButton.setMargin(new Insets(0, 0, 0, 0)); moreOrLessButton.addActionListener(this); String toolTip = Messages.BORDERED_COMPONENT_MORE_OR_LESS_BUTTON_TOOLTIP; moreOrLessButton.setToolTipText(toolTip); borderLabel.add(moreOrLessButton); borderLabel.setSize(borderLabel.getPreferredSize()); add(borderLabel); } else { border = new TitledBorder(text); } setBorder(new CompoundBorder(new FocusBorder(this), border)); } else { setBorder(new FocusBorder(this)); } if (comp != null) { add(comp); } } public void setComponent(JComponent comp) { if (this.comp != null) { remove(this.comp); } this.comp = comp; if (!collapsed) { LayoutManager lm = getLayout(); if (lm instanceof BorderLayout) { add(comp, BorderLayout.CENTER); } else { add(comp); } } revalidate(); } public void setValueLabel(String str) { this.valueLabelStr = str; if (label != null) { label.setText(Resources.format(Messages.CURRENT_VALUE, valueLabelStr)); } } public void actionPerformed(ActionEvent ev) { if (collapsed) { if (label != null) { remove(label); } add(comp); moreOrLessButton.setIcon(collapseIcon); } else { remove(comp); if (valueLabelStr != null) { if (label == null) { label = new JLabel(Resources.format(Messages.CURRENT_VALUE, valueLabelStr)); } add(label); } moreOrLessButton.setIcon(expandIcon); } collapsed = !collapsed; JComponent container = (JComponent)getParent(); if (container != null && container.getLayout() instanceof VariableGridLayout) { ((VariableGridLayout)container.getLayout()).setFillRow(this, !collapsed); container.revalidate(); } } public Dimension getMinimumSize() { if (getLayout() != null) { // A layout manager has been set, so delegate to it return super.getMinimumSize(); } if (moreOrLessButton != null) { Dimension d = moreOrLessButton.getMinimumSize(); Insets i = getInsets(); d.width += i.left + i.right; d.height += i.top + i.bottom; return d; } else { return super.getMinimumSize(); } } public void doLayout() { if (getLayout() != null) { // A layout manager has been set, so delegate to it super.doLayout(); return; } Dimension d = getSize(); Insets i = getInsets(); if (collapsed) { if (label != null) { Dimension p = label.getPreferredSize(); label.setBounds(i.left, i.top + (d.height - i.top - i.bottom - p.height) / 2, p.width, p.height); } } else { if (comp != null) { comp.setBounds(i.left, i.top, d.width - i.left - i.right, d.height - i.top - i.bottom); } } } private static class ArrowIcon implements Icon { private int direction; private JLabel textLabel; public ArrowIcon(int direction, JLabel textLabel) { this.direction = direction; this.textLabel = textLabel; } public void paintIcon(Component c, Graphics g, int x, int y) { int w = getIconWidth(); int h = w; Polygon p = new Polygon(); switch (direction) { case EAST: p.addPoint(x + 2, y); p.addPoint(x + w - 2, y + h / 2); p.addPoint(x + 2, y + h - 1); break; case SOUTH: p.addPoint(x, y + 2); p.addPoint(x + w / 2, y + h - 2); p.addPoint(x + w - 1, y + 2); break; } g.fillPolygon(p); } public int getIconWidth() { return getIconHeight(); } public int getIconHeight() { Graphics g = textLabel.getGraphics(); if (g != null) { int h = g.getFontMetrics(textLabel.getFont()).getAscent() * 6/10; if (h % 2 == 0) { h += 1; // Make it odd } return h; } else { return 7; } } } /** * A subclass of <code>TitledBorder</code> which implements an arbitrary border * with the addition of a JComponent (JLabel, JPanel, etc) in the * default position. * <p> * If the border property value is not * specified in the constructor or by invoking the appropriate * set method, the property value will be defined by the current * look and feel, using the following property name in the * Defaults Table: * <ul> * <li>&quot;TitledBorder.border&quot; * </ul> */ protected static class LabeledBorder extends TitledBorder { protected JComponent label; private Point compLoc = new Point(); /** * Creates a LabeledBorder instance. * * @param label the label the border should display */ public LabeledBorder(JComponent label) { this(null, label); } /** * Creates a LabeledBorder instance with the specified border * and an empty label. * * @param border the border */ public LabeledBorder(Border border) { this(border, null); } /** * Creates a LabeledBorder instance with the specified border and * label. * * @param border the border * @param label the label the border should display */ public LabeledBorder(Border border, JComponent label) { super(border); this.label = label; if (label instanceof JLabel && label.getForeground() instanceof ColorUIResource) { label.setForeground(getTitleColor()); } } /** * Paints the border for the specified component with the * specified position and size. * @param c the component for which this border is being painted * @param g the paint graphics * @param x the x position of the painted border * @param y the y position of the painted border * @param width the width of the painted border * @param height the height of the painted border */ public void paintBorder(Component c, Graphics g, int x, int y, int width, int height) { Border border = getBorder(); if (label == null) { if (border != null) { border.paintBorder(c, g, x, y, width, height); } return; } Rectangle grooveRect = new Rectangle(x + EDGE_SPACING, y + EDGE_SPACING, width - (EDGE_SPACING * 2), height - (EDGE_SPACING * 2)); Dimension labelDim = label.getPreferredSize(); int baseline = label.getBaseline(labelDim.width, labelDim.height); int ascent = Math.max(0, baseline); int descent = labelDim.height - ascent; int diff; Insets insets; if (border != null) { insets = border.getBorderInsets(c); } else { insets = new Insets(0, 0, 0, 0); } diff = Math.max(0, ascent/2 + TEXT_SPACING - EDGE_SPACING); grooveRect.y += diff; grooveRect.height -= diff; compLoc.y = grooveRect.y + insets.top/2 - (ascent + descent) / 2 - 1; int justification; if (c.getComponentOrientation().isLeftToRight()) { justification = LEFT; } else { justification = RIGHT; } switch (justification) { case LEFT: compLoc.x = grooveRect.x + TEXT_INSET_H + insets.left; break; case RIGHT: compLoc.x = (grooveRect.x + grooveRect.width - (labelDim.width + TEXT_INSET_H + insets.right)); break; } // If title is positioned in middle of border AND its fontsize // is greater than the border's thickness, we'll need to paint // the border in sections to leave space for the component's background // to show through the title. // if (border != null) { if (grooveRect.y > compLoc.y - ascent) { Rectangle clipRect = new Rectangle(); // save original clip Rectangle saveClip = g.getClipBounds(); // paint strip left of text clipRect.setBounds(saveClip); if (computeIntersection(clipRect, x, y, compLoc.x-1-x, height)) { g.setClip(clipRect); border.paintBorder(c, g, grooveRect.x, grooveRect.y, grooveRect.width, grooveRect.height); } // paint strip right of text clipRect.setBounds(saveClip); if (computeIntersection(clipRect, compLoc.x+ labelDim.width +1, y, x+width-(compLoc.x+ labelDim.width +1), height)) { g.setClip(clipRect); border.paintBorder(c, g, grooveRect.x, grooveRect.y, grooveRect.width, grooveRect.height); } // paint strip below text clipRect.setBounds(saveClip); if (computeIntersection(clipRect, compLoc.x - 1, compLoc.y + ascent + descent, labelDim.width + 2, y + height - compLoc.y - ascent - descent)) { g.setClip(clipRect); border.paintBorder(c, g, grooveRect.x, grooveRect.y, grooveRect.width, grooveRect.height); } // restore clip g.setClip(saveClip); } else { border.paintBorder(c, g, grooveRect.x, grooveRect.y, grooveRect.width, grooveRect.height); } label.setLocation(compLoc); label.setSize(labelDim); } } /** * Reinitialize the insets parameter with this Border's current Insets. * @param c the component for which this border insets value applies * @param insets the object to be reinitialized */ public Insets getBorderInsets(Component c, Insets insets) { Border border = getBorder(); if (border != null) { if (border instanceof AbstractBorder) { ((AbstractBorder)border).getBorderInsets(c, insets); } else { // Can't reuse border insets because the Border interface // can't be enhanced. Insets i = border.getBorderInsets(c); insets.top = i.top; insets.right = i.right; insets.bottom = i.bottom; insets.left = i.left; } } else { insets.left = insets.top = insets.right = insets.bottom = 0; } insets.left += EDGE_SPACING + TEXT_SPACING; insets.right += EDGE_SPACING + TEXT_SPACING; insets.top += EDGE_SPACING + TEXT_SPACING; insets.bottom += EDGE_SPACING + TEXT_SPACING; if (c == null || label == null) { return insets; } insets.top += label.getHeight(); return insets; } /** * Returns the label of the labeled border. */ public JComponent getLabel() { return label; } /** * Sets the title of the titled border. * param title the title for the border */ public void setLabel(JComponent label) { this.label = label; } /** * Returns the minimum dimensions this border requires * in order to fully display the border and title. * @param c the component where this border will be drawn */ public Dimension getMinimumSize(Component c) { Insets insets = getBorderInsets(c); Dimension minSize = new Dimension(insets.right + insets.left, insets.top + insets.bottom); minSize.width += label.getWidth(); return minSize; } private static boolean computeIntersection(Rectangle dest, int rx, int ry, int rw, int rh) { int x1 = Math.max(rx, dest.x); int x2 = Math.min(rx + rw, dest.x + dest.width); int y1 = Math.max(ry, dest.y); int y2 = Math.min(ry + rh, dest.y + dest.height); dest.x = x1; dest.y = y1; dest.width = x2 - x1; dest.height = y2 - y1; if (dest.width <= 0 || dest.height <= 0) { return false; } return true; } } protected static class FocusBorder extends AbstractBorder implements FocusListener { private Component comp; private Color focusColor; private boolean focusLostTemporarily = false; public FocusBorder(Component comp) { this.comp = comp; comp.addFocusListener(this); // This is the best guess for a L&F specific color focusColor = UIManager.getColor("TabbedPane.focus"); } public void paintBorder(Component c, Graphics g, int x, int y, int width, int height) { if (comp.hasFocus() || focusLostTemporarily) { Color color = g.getColor(); g.setColor(focusColor); BasicGraphicsUtils.drawDashedRect(g, x, y, width, height); g.setColor(color); } } public Insets getBorderInsets(Component c, Insets insets) { insets.set(2, 2, 2, 2); return insets; } public void focusGained(FocusEvent e) { comp.repaint(); } public void focusLost(FocusEvent e) { // We will still paint focus even if lost temporarily focusLostTemporarily = e.isTemporary(); if (!focusLostTemporarily) { comp.repaint(); } } } }
// Copyright (C) 2009 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.auth.ldap; import com.google.common.base.Throwables; import com.google.common.cache.Cache; import com.google.common.collect.ImmutableSet; import com.google.gerrit.common.data.ParameterizedString; import com.google.gerrit.reviewdb.client.AccountGroup; import com.google.gerrit.server.account.AccountException; import com.google.gerrit.server.account.AuthenticationFailedException; import com.google.gerrit.server.auth.NoSuchUserException; import com.google.gerrit.server.config.ConfigUtil; import com.google.gerrit.server.config.GerritServerConfig; import com.google.gerrit.util.ssl.BlindSSLSocketFactory; import com.google.inject.Inject; import com.google.inject.Singleton; import com.google.inject.name.Named; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Properties; import java.util.Set; import java.util.concurrent.TimeUnit; import javax.naming.CompositeName; import javax.naming.Context; import javax.naming.Name; import javax.naming.NamingEnumeration; import javax.naming.NamingException; import javax.naming.PartialResultException; import javax.naming.directory.Attribute; import javax.naming.directory.DirContext; import javax.naming.directory.InitialDirContext; import javax.net.ssl.SSLSocketFactory; import javax.security.auth.Subject; import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginException; import org.eclipse.jgit.lib.Config; @Singleton class Helper { static final String LDAP_UUID = "ldap:"; private final Cache<String, ImmutableSet<String>> parentGroups; private final Config config; private final String server; private final String username; private final String password; private final String referral; private final boolean sslVerify; private final String authentication; private volatile LdapSchema ldapSchema; private final String readTimeoutMillis; private final String connectTimeoutMillis; private final boolean useConnectionPooling; private final boolean groupsVisibleToAll; @Inject Helper( @GerritServerConfig Config config, @Named(LdapModule.PARENT_GROUPS_CACHE) Cache<String, ImmutableSet<String>> parentGroups) { this.config = config; this.server = LdapRealm.optional(config, "server"); this.username = LdapRealm.optional(config, "username"); this.password = LdapRealm.optional(config, "password", ""); this.referral = LdapRealm.optional(config, "referral", "ignore"); this.sslVerify = config.getBoolean("ldap", "sslverify", true); this.groupsVisibleToAll = config.getBoolean("ldap", "groupsVisibleToAll", false); this.authentication = LdapRealm.optional(config, "authentication", "simple"); String readTimeout = LdapRealm.optional(config, "readTimeout"); if (readTimeout != null) { readTimeoutMillis = Long.toString(ConfigUtil.getTimeUnit(readTimeout, 0, TimeUnit.MILLISECONDS)); } else { readTimeoutMillis = null; } String connectTimeout = LdapRealm.optional(config, "connectTimeout"); if (connectTimeout != null) { connectTimeoutMillis = Long.toString(ConfigUtil.getTimeUnit(connectTimeout, 0, TimeUnit.MILLISECONDS)); } else { connectTimeoutMillis = null; } this.parentGroups = parentGroups; this.useConnectionPooling = LdapRealm.optional(config, "useConnectionPooling", false); } private Properties createContextProperties() { final Properties env = new Properties(); env.put(Context.INITIAL_CONTEXT_FACTORY, LdapRealm.LDAP); env.put(Context.PROVIDER_URL, server); if (server.startsWith("ldaps:") && !sslVerify) { Class<? extends SSLSocketFactory> factory = BlindSSLSocketFactory.class; env.put("java.naming.ldap.factory.socket", factory.getName()); } if (readTimeoutMillis != null) { env.put("com.sun.jndi.ldap.read.timeout", readTimeoutMillis); } if (connectTimeoutMillis != null) { env.put("com.sun.jndi.ldap.connect.timeout", connectTimeoutMillis); } if (useConnectionPooling) { env.put("com.sun.jndi.ldap.connect.pool", "true"); } return env; } DirContext open() throws NamingException, LoginException { final Properties env = createContextProperties(); env.put(Context.SECURITY_AUTHENTICATION, authentication); env.put(Context.REFERRAL, referral); if ("GSSAPI".equals(authentication)) { return kerberosOpen(env); } if (username != null) { env.put(Context.SECURITY_PRINCIPAL, username); env.put(Context.SECURITY_CREDENTIALS, password); } return new InitialDirContext(env); } private DirContext kerberosOpen(Properties env) throws LoginException, NamingException { LoginContext ctx = new LoginContext("KerberosLogin"); ctx.login(); Subject subject = ctx.getSubject(); try { return Subject.doAs( subject, new PrivilegedExceptionAction<DirContext>() { @Override public DirContext run() throws NamingException { return new InitialDirContext(env); } }); } catch (PrivilegedActionException e) { Throwables.throwIfInstanceOf(e.getException(), NamingException.class); Throwables.throwIfInstanceOf(e.getException(), RuntimeException.class); LdapRealm.log.warn("Internal error", e.getException()); return null; } finally { ctx.logout(); } } DirContext authenticate(String dn, String password) throws AccountException { final Properties env = createContextProperties(); env.put(Context.SECURITY_AUTHENTICATION, "simple"); env.put(Context.SECURITY_PRINCIPAL, dn); env.put(Context.SECURITY_CREDENTIALS, password); env.put(Context.REFERRAL, referral); try { return new InitialDirContext(env); } catch (NamingException e) { throw new AuthenticationFailedException("Incorrect username or password", e); } } LdapSchema getSchema(DirContext ctx) { if (ldapSchema == null) { synchronized (this) { if (ldapSchema == null) { ldapSchema = new LdapSchema(ctx); } } } return ldapSchema; } LdapQuery.Result findAccount( Helper.LdapSchema schema, DirContext ctx, String username, boolean fetchMemberOf) throws NamingException, AccountException { final HashMap<String, String> params = new HashMap<>(); params.put(LdapRealm.USERNAME, username); List<LdapQuery> accountQueryList; if (fetchMemberOf && schema.type.accountMemberField() != null) { accountQueryList = schema.accountWithMemberOfQueryList; } else { accountQueryList = schema.accountQueryList; } for (LdapQuery accountQuery : accountQueryList) { List<LdapQuery.Result> res = accountQuery.query(ctx, params); if (res.size() == 1) { return res.get(0); } else if (res.size() > 1) { throw new AccountException("Duplicate users: " + username); } } throw new NoSuchUserException(username); } Set<AccountGroup.UUID> queryForGroups( final DirContext ctx, String username, LdapQuery.Result account) throws NamingException { final LdapSchema schema = getSchema(ctx); final Set<String> groupDNs = new HashSet<>(); if (!schema.groupMemberQueryList.isEmpty()) { final HashMap<String, String> params = new HashMap<>(); if (account == null) { try { account = findAccount(schema, ctx, username, false); } catch (AccountException e) { return Collections.emptySet(); } } for (String name : schema.groupMemberQueryList.get(0).getParameters()) { params.put(name, account.get(name)); } params.put(LdapRealm.USERNAME, username); for (LdapQuery groupMemberQuery : schema.groupMemberQueryList) { for (LdapQuery.Result r : groupMemberQuery.query(ctx, params)) { recursivelyExpandGroups(groupDNs, schema, ctx, r.getDN()); } } } if (schema.accountMemberField != null) { if (account == null || account.getAll(schema.accountMemberField) == null) { try { account = findAccount(schema, ctx, username, true); } catch (AccountException e) { return Collections.emptySet(); } } final Attribute groupAtt = account.getAll(schema.accountMemberField); if (groupAtt != null) { final NamingEnumeration<?> groups = groupAtt.getAll(); try { while (groups.hasMore()) { final String nextDN = (String) groups.next(); recursivelyExpandGroups(groupDNs, schema, ctx, nextDN); } } catch (PartialResultException e) { // Ignored } } } final Set<AccountGroup.UUID> actual = new HashSet<>(); for (String dn : groupDNs) { actual.add(new AccountGroup.UUID(LDAP_UUID + dn)); } if (actual.isEmpty()) { return Collections.emptySet(); } return ImmutableSet.copyOf(actual); } private void recursivelyExpandGroups( final Set<String> groupDNs, final LdapSchema schema, final DirContext ctx, final String groupDN) { if (groupDNs.add(groupDN) && schema.accountMemberField != null && schema.accountMemberExpandGroups) { ImmutableSet<String> cachedParentsDNs = parentGroups.getIfPresent(groupDN); if (cachedParentsDNs == null) { // Recursively identify the groups it is a member of. ImmutableSet.Builder<String> dns = ImmutableSet.builder(); try { final Name compositeGroupName = new CompositeName().add(groupDN); final Attribute in = ctx.getAttributes(compositeGroupName, schema.accountMemberFieldArray) .get(schema.accountMemberField); if (in != null) { final NamingEnumeration<?> groups = in.getAll(); try { while (groups.hasMore()) { dns.add((String) groups.next()); } } catch (PartialResultException e) { // Ignored } } } catch (NamingException e) { LdapRealm.log.warn("Could not find group " + groupDN, e); } cachedParentsDNs = dns.build(); parentGroups.put(groupDN, cachedParentsDNs); } for (String dn : cachedParentsDNs) { recursivelyExpandGroups(groupDNs, schema, ctx, dn); } } } public boolean groupsVisibleToAll() { return this.groupsVisibleToAll; } class LdapSchema { final LdapType type; final ParameterizedString accountFullName; final ParameterizedString accountEmailAddress; final ParameterizedString accountSshUserName; final String accountMemberField; final boolean accountMemberExpandGroups; final String[] accountMemberFieldArray; final List<LdapQuery> accountQueryList; final List<LdapQuery> accountWithMemberOfQueryList; final List<String> groupBases; final SearchScope groupScope; final ParameterizedString groupPattern; final ParameterizedString groupName; final List<LdapQuery> groupMemberQueryList; LdapSchema(DirContext ctx) { type = discoverLdapType(ctx); groupMemberQueryList = new ArrayList<>(); accountQueryList = new ArrayList<>(); accountWithMemberOfQueryList = new ArrayList<>(); final Set<String> accountAtts = new HashSet<>(); // Group query // groupBases = LdapRealm.optionalList(config, "groupBase"); groupScope = LdapRealm.scope(config, "groupScope"); groupPattern = LdapRealm.paramString(config, "groupPattern", type.groupPattern()); groupName = LdapRealm.paramString(config, "groupName", type.groupName()); final String groupMemberPattern = LdapRealm.optdef(config, "groupMemberPattern", type.groupMemberPattern()); for (String groupBase : groupBases) { if (groupMemberPattern != null) { final LdapQuery groupMemberQuery = new LdapQuery( groupBase, groupScope, new ParameterizedString(groupMemberPattern), Collections.<String>emptySet()); if (groupMemberQuery.getParameters().isEmpty()) { throw new IllegalArgumentException("No variables in ldap.groupMemberPattern"); } for (String name : groupMemberQuery.getParameters()) { accountAtts.add(name); } groupMemberQueryList.add(groupMemberQuery); } } // Account query // accountFullName = LdapRealm.paramString(config, "accountFullName", type.accountFullName()); if (accountFullName != null) { accountAtts.addAll(accountFullName.getParameterNames()); } accountEmailAddress = LdapRealm.paramString(config, "accountEmailAddress", type.accountEmailAddress()); if (accountEmailAddress != null) { accountAtts.addAll(accountEmailAddress.getParameterNames()); } accountSshUserName = LdapRealm.paramString(config, "accountSshUserName", type.accountSshUserName()); if (accountSshUserName != null) { accountAtts.addAll(accountSshUserName.getParameterNames()); } accountMemberField = LdapRealm.optdef(config, "accountMemberField", type.accountMemberField()); if (accountMemberField != null) { accountMemberFieldArray = new String[] {accountMemberField}; } else { accountMemberFieldArray = null; } accountMemberExpandGroups = LdapRealm.optional(config, "accountMemberExpandGroups", type.accountMemberExpandGroups()); final SearchScope accountScope = LdapRealm.scope(config, "accountScope"); final String accountPattern = LdapRealm.reqdef(config, "accountPattern", type.accountPattern()); Set<String> accountWithMemberOfAtts; if (accountMemberField != null) { accountWithMemberOfAtts = new HashSet<>(accountAtts); accountWithMemberOfAtts.add(accountMemberField); } else { accountWithMemberOfAtts = null; } for (String accountBase : LdapRealm.requiredList(config, "accountBase")) { LdapQuery accountQuery = new LdapQuery( accountBase, accountScope, new ParameterizedString(accountPattern), accountAtts); if (accountQuery.getParameters().isEmpty()) { throw new IllegalArgumentException("No variables in ldap.accountPattern"); } accountQueryList.add(accountQuery); if (accountWithMemberOfAtts != null) { LdapQuery accountWithMemberOfQuery = new LdapQuery( accountBase, accountScope, new ParameterizedString(accountPattern), accountWithMemberOfAtts); accountWithMemberOfQueryList.add(accountWithMemberOfQuery); } } } LdapType discoverLdapType(DirContext ctx) { try { return LdapType.guessType(ctx); } catch (NamingException e) { LdapRealm.log.warn( "Cannot discover type of LDAP server at " + server + ", assuming the server is RFC 2307 compliant.", e); return LdapType.RFC_2307; } } } }
/* * (c) 2015 CenturyLink. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.centurylink.cloud.sdk.core.client; /** * @author Ilya Drabenia */ import com.centurylink.cloud.sdk.core.client.retry.ClcRetryStrategy; import org.apache.http.HttpHost; import org.apache.http.auth.AuthScope; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.HttpClient; import org.apache.http.client.params.HttpClientParams; import org.apache.http.conn.ClientConnectionManager; import org.apache.http.conn.scheme.PlainSocketFactory; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.conn.ssl.AllowAllHostnameVerifier; import org.apache.http.conn.ssl.BrowserCompatHostnameVerifier; import org.apache.http.conn.ssl.StrictHostnameVerifier; import org.apache.http.conn.ssl.X509HostnameVerifier; import org.apache.http.impl.client.AutoRetryHttpClient; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.conn.BasicClientConnectionManager; import org.apache.http.impl.conn.PoolingClientConnectionManager; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.HttpConnectionParams; import org.jboss.resteasy.client.jaxrs.ClientHttpEngine; import org.jboss.resteasy.client.jaxrs.ResteasyClient; import org.jboss.resteasy.client.jaxrs.engines.ApacheHttpClient4Engine; import org.jboss.resteasy.client.jaxrs.engines.PassthroughTrustManager; import org.jboss.resteasy.client.jaxrs.internal.ClientConfiguration; import org.jboss.resteasy.plugins.providers.RegisterBuiltin; import org.jboss.resteasy.spi.ResteasyProviderFactory; import javax.net.ssl.*; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.core.Configuration; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.security.KeyStore; import java.security.SecureRandom; import java.security.cert.X509Certificate; import java.util.HashMap; import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; /** * Abstraction for creating Clients. Allows SSL configuration. Uses Apache Http Client under * the covers. If used with other ClientHttpEngines though, all configuration options are ignored. * * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ @SuppressWarnings("deprecated") public class SdkClientBuilder extends ClientBuilder { public static enum HostnameVerificationPolicy { /** * Hostname verification is not done on the server's certificate */ ANY, /** * Allows wildcards in subdomain names i.e. *.foo.com */ WILDCARD, /** * CN must match hostname connecting to */ STRICT } protected KeyStore truststore; protected KeyStore clientKeyStore; protected String clientPrivateKeyPassword; protected boolean disableTrustManager; protected HostnameVerificationPolicy policy = HostnameVerificationPolicy.WILDCARD; protected ResteasyProviderFactory providerFactory; protected ExecutorService asyncExecutor; protected SSLContext sslContext; protected Map<String, Object> properties = new HashMap<String, Object>(); protected ClientHttpEngine httpEngine; protected int connectionPoolSize; protected int maxPooledPerRoute = 0; protected long connectionTTL = -1; protected TimeUnit connectionTTLUnit = TimeUnit.MILLISECONDS; protected long socketTimeout = -1; protected TimeUnit socketTimeoutUnits = TimeUnit.MILLISECONDS; protected long establishConnectionTimeout = -1; protected TimeUnit establishConnectionTimeoutUnits = TimeUnit.MILLISECONDS; protected int connectionCheckoutTimeoutMs = -1; protected HostnameVerifier verifier = null; protected HttpHost defaultProxy; protected int responseBufferSize; protected Integer maxRetries; protected UsernamePasswordCredentials proxyCredentials; /** * Changing the providerFactory will wipe clean any registered components or properties. * * @param providerFactory the provider factory * @return current instance of SDK client builder */ public SdkClientBuilder providerFactory(ResteasyProviderFactory providerFactory) { this.providerFactory = providerFactory; return this; } /** * Executor to use to run AsyncInvoker invocations * * @param asyncExecutor the executor service * @return current instance of SDK client builder */ public SdkClientBuilder asyncExecutor(ExecutorService asyncExecutor) { this.asyncExecutor = asyncExecutor; return this; } /** * If there is a connection pool, set the time to live in the pool. * * @param ttl the time to live * @param unit the time unit * @return current instance of SDK client builder */ public SdkClientBuilder connectionTTL(long ttl, TimeUnit unit) { this.connectionTTL = ttl; this.connectionTTLUnit = unit; return this; } /** * Socket inactivity timeout * * @param timeout the inactivity timeout * @param unit the time unit * @return current instance of SDK client builder */ public SdkClientBuilder socketTimeout(long timeout, TimeUnit unit) { this.socketTimeout = timeout; this.socketTimeoutUnits = unit; return this; } /** * When trying to make an initial socket connection, what is the timeout? * * @param timeout the timeout * @param unit the time unit * @return current instance of SDK client builder */ public SdkClientBuilder establishConnectionTimeout(long timeout, TimeUnit unit) { this.establishConnectionTimeout = timeout; this.establishConnectionTimeoutUnits = unit; return this; } /** * If connection pooling enabled, how many connections to pool per url? * * @param maxPooledPerRoute the maximum number of connections per pool * @return current instance of SDK client builder */ public SdkClientBuilder maxPooledPerRoute(int maxPooledPerRoute) { this.maxPooledPerRoute = maxPooledPerRoute; return this; } public SdkClientBuilder maxRetries(int maxRetries) { this.maxRetries = maxRetries; return this; } public SdkClientBuilder proxyConfig(String proxyHost, int proxyPort, String proxyScheme, String proxyUsername, String proxyPassword) { if (proxyHost != null) { defaultProxy(proxyHost, proxyPort, proxyScheme); if (proxyUsername != null) { proxyCredentials(proxyUsername, proxyPassword); } } return this; } /** * If connection pooling is enabled, how long will we wait to get a connection? * @param timeout the timeout * @param unit the units the timeout is in * @return this builder */ public SdkClientBuilder connectionCheckoutTimeout(long timeout, TimeUnit unit) { this.connectionCheckoutTimeoutMs = (int) TimeUnit.MILLISECONDS.convert(timeout, unit); return this; } /** * Number of connections allowed to pool * * @param connectionPoolSize the number of connections * @return current instance of SDK client builder */ public SdkClientBuilder connectionPoolSize(int connectionPoolSize) { this.connectionPoolSize = connectionPoolSize; return this; } /** * Response stream is wrapped in a BufferedInputStream. Default is 8192. Value of 0 will not wrap it. * Value of -1 will use a SelfExpandingBufferedInputStream * * @param size the response buffer size * @return current instance of SDK client builder */ public SdkClientBuilder responseBufferSize(int size) { this.responseBufferSize = size; return this; } /** * Disable trust management and hostname verification. <i>NOTE</i> this is a security * hole, so only set this option if you cannot or do not want to verify the identity of the * host you are communicating with. * @return current instance of SDK client builder */ public SdkClientBuilder disableTrustManager() { this.disableTrustManager = true; return this; } /** * SSL policy used to verify hostnames * * @param policy the hostname verification policy * @return current instance of SDK client builder */ public SdkClientBuilder hostnameVerification(HostnameVerificationPolicy policy) { this.policy = policy; return this; } /** * Negates all ssl and connection specific configuration * * @param httpEngine the client http engine * @return current instance of SDK client builder */ public SdkClientBuilder httpEngine(ClientHttpEngine httpEngine) { this.httpEngine = httpEngine; return this; } @Override public SdkClientBuilder sslContext(SSLContext sslContext) { this.sslContext = sslContext; return this; } @Override public SdkClientBuilder trustStore(KeyStore truststore) { this.truststore = truststore; return this; } @Override public SdkClientBuilder keyStore(KeyStore keyStore, String password) { this.clientKeyStore = keyStore; this.clientPrivateKeyPassword = password; return this; } @Override public SdkClientBuilder keyStore(KeyStore keyStore, char[] password) { this.clientKeyStore = keyStore; this.clientPrivateKeyPassword = new String(password); return this; } @Override public SdkClientBuilder property(String name, Object value) { getProviderFactory().property(name, value); return this; } /** * Specify a default proxy. Default port and schema will be used * * @param hostname the proxy hostname * @return current instance of SDK client builder */ public SdkClientBuilder defaultProxy(String hostname) { return defaultProxy(hostname, -1, null); } /** * Specify a default proxy host and port. Default schema will be used * * @param hostname the proxy hostname * @param port the proxy port number * @return current instance of SDK client builder */ public SdkClientBuilder defaultProxy(String hostname, int port) { return defaultProxy(hostname, port, null); } /** * Specify default proxy. * * @param hostname the proxy hostname * @param port the proxy port number * @param scheme the proxy scheme * @return current instance of SDK client builder */ public SdkClientBuilder defaultProxy(String hostname, int port, final String scheme) { this.defaultProxy = new HttpHost(hostname, port, scheme); return this; } /** * Specify proxy credentials. * @param user user * @param password password * @return current client builder */ public SdkClientBuilder proxyCredentials(String user, String password) { this.proxyCredentials = new UsernamePasswordCredentials(user, password); return this; } protected ResteasyProviderFactory getProviderFactory() { if (providerFactory == null) { // create a new one providerFactory = new ResteasyProviderFactory(); RegisterBuiltin.register(providerFactory); } return providerFactory; } @Override public ResteasyClient build() { ClientConfiguration config = new ClientConfiguration(getProviderFactory()); for (Map.Entry<String, Object> entry : properties.entrySet()) { config.property(entry.getKey(), entry.getValue()); } ExecutorService executor = asyncExecutor; boolean cleanupExecutor = false; if (executor == null) { cleanupExecutor = true; executor = Executors.newFixedThreadPool(10); } ClientHttpEngine engine = httpEngine; if (engine == null) { engine = initDefaultEngine(); } try { Constructor<ResteasyClient> constructor = ResteasyClient.class.getDeclaredConstructor( ClientHttpEngine.class, ExecutorService.class, boolean.class, ClientConfiguration.class); constructor.setAccessible(true); return constructor.newInstance(engine, executor, cleanupExecutor, config); } catch (NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException ex) { throw new ClcClientException(ex); } } static class VerifierWrapper implements X509HostnameVerifier { protected HostnameVerifier verifier; VerifierWrapper(HostnameVerifier verifier) { this.verifier = verifier; } @Override public void verify(String host, SSLSocket ssl) throws IOException { if (!verifier.verify(host, ssl.getSession())) { throw new SSLException("Hostname verification failure"); } } @Override public void verify(String host, X509Certificate cert) throws SSLException { throw new SSLException("This verification path not implemented"); } @Override public void verify(String host, String[] cns, String[] subjectAlts) throws SSLException { throw new SSLException("This verification path not implemented"); } @Override public boolean verify(String s, SSLSession sslSession) { return verifier.verify(s, sslSession); } } protected ClientHttpEngine initDefaultEngine() { HttpClient httpClient; X509HostnameVerifier verifier = initHostnameVerifier(); try { org.apache.http.conn.ssl.SSLSocketFactory sslsf; SSLContext theContext = sslContext; if (disableTrustManager) { theContext = SSLContext.getInstance("SSL"); theContext.init(null, new TrustManager[]{new PassthroughTrustManager()}, new SecureRandom()); verifier = new AllowAllHostnameVerifier(); sslsf = new org.apache.http.conn.ssl.SSLSocketFactory(theContext, verifier); } else if (theContext != null) { sslsf = new org.apache.http.conn.ssl.SSLSocketFactory(theContext, verifier); } else if (clientKeyStore != null || truststore != null) { sslsf = new org.apache.http.conn.ssl.SSLSocketFactory(org.apache.http.conn.ssl.SSLSocketFactory.TLS, clientKeyStore, clientPrivateKeyPassword, truststore, null, verifier); } else { final SSLContext tlsContext = SSLContext.getInstance(org.apache.http.conn.ssl.SSLSocketFactory.TLS); tlsContext.init(null, null, null); sslsf = new org.apache.http.conn.ssl.SSLSocketFactory(tlsContext, verifier); } SchemeRegistry registry = new SchemeRegistry(); registry.register( new Scheme("http", 80, PlainSocketFactory.getSocketFactory())); Scheme httpsScheme = new Scheme("https", 443, sslsf); registry.register(httpsScheme); httpClient = new AutoRetryHttpClient( new DefaultHttpClient(initClientConnectionManager(registry), initHttpParams()) {{ if (proxyCredentials != null) { setCredentialsProvider(new BasicCredentialsProvider() {{ setCredentials( new AuthScope(defaultProxy.getHostName(), defaultProxy.getPort()), proxyCredentials ); }}); } }}, new ClcRetryStrategy(3, 1000) ); ApacheHttpClient4Engine engine = new ApacheHttpClient4Engine(httpClient, true); engine.setResponseBufferSize(responseBufferSize); engine.setHostnameVerifier(verifier); // this may be null. We can't really support this with Apache Client. engine.setSslContext(theContext); engine.setDefaultProxy(defaultProxy); return engine; } catch (Exception e) { throw new RuntimeException(e); } } private X509HostnameVerifier initHostnameVerifier() { X509HostnameVerifier verifier = null; if (this.verifier != null) { verifier = new VerifierWrapper(this.verifier); } else { switch (policy) { case ANY: verifier = new AllowAllHostnameVerifier(); break; case WILDCARD: verifier = new BrowserCompatHostnameVerifier(); break; case STRICT: verifier = new StrictHostnameVerifier(); break; } } return verifier; } private ClientConnectionManager initClientConnectionManager(SchemeRegistry registry) { ClientConnectionManager cm; if (connectionPoolSize > 0) { PoolingClientConnectionManager tcm = new PoolingClientConnectionManager(registry, connectionTTL, connectionTTLUnit); tcm.setMaxTotal(connectionPoolSize); if (maxPooledPerRoute == 0) { maxPooledPerRoute = connectionPoolSize; } tcm.setDefaultMaxPerRoute(maxPooledPerRoute); cm = tcm; } else { cm = new BasicClientConnectionManager(registry); } return cm; } private BasicHttpParams initHttpParams() { BasicHttpParams params = new BasicHttpParams(); if (socketTimeout > -1) { HttpConnectionParams.setSoTimeout(params, (int) socketTimeoutUnits.toMillis(socketTimeout)); } if (establishConnectionTimeout > -1) { HttpConnectionParams.setConnectionTimeout(params, (int)establishConnectionTimeoutUnits.toMillis(establishConnectionTimeout) ); } if (connectionCheckoutTimeoutMs > -1) { HttpClientParams.setConnectionManagerTimeout(params, connectionCheckoutTimeoutMs); } return params; } @Override public SdkClientBuilder hostnameVerifier(HostnameVerifier verifier) { this.verifier = verifier; return this; } @Override public Configuration getConfiguration() { return getProviderFactory().getConfiguration(); } @Override public SdkClientBuilder register(Class<?> componentClass) { getProviderFactory().register(componentClass); return this; } @Override public SdkClientBuilder register(Class<?> componentClass, int priority) { getProviderFactory().register(componentClass, priority); return this; } @Override public SdkClientBuilder register(Class<?> componentClass, Class<?>... contracts) { getProviderFactory().register(componentClass, contracts); return this; } @Override public SdkClientBuilder register(Class<?> componentClass, Map<Class<?>, Integer> contracts) { getProviderFactory().register(componentClass, contracts); return this; } @Override public SdkClientBuilder register(Object component) { getProviderFactory().register(component); return this; } @Override public SdkClientBuilder register(Object component, int priority) { getProviderFactory().register(component, priority); return this; } @Override public SdkClientBuilder register(Object component, Class<?>... contracts) { getProviderFactory().register(component, contracts); return this; } @Override public SdkClientBuilder register(Object component, Map<Class<?>, Integer> contracts) { getProviderFactory().register(component, contracts); return this; } @Override public SdkClientBuilder withConfig(Configuration config) { providerFactory = new ResteasyProviderFactory(); providerFactory.setProperties(config.getProperties()); for (Class clazz : config.getClasses()) { Map<Class<?>, Integer> contracts = config.getContracts(clazz); try { register(clazz, contracts); } catch (RuntimeException e) { throw new RuntimeException("failed on registering class: " + clazz.getName(), e); } } for (Object obj : config.getInstances()) { Map<Class<?>, Integer> contracts = config.getContracts(obj.getClass()); register(obj, contracts); } return this; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.coprocessor; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver; import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.Region.RowLock; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.util.EnvironmentEdgeManager; import org.apache.phoenix.exception.SQLExceptionCode; import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData; import org.apache.phoenix.query.QueryConstants; import org.apache.phoenix.schema.types.PBoolean; import org.apache.phoenix.schema.types.PInteger; import org.apache.phoenix.schema.types.PDataType; import org.apache.phoenix.schema.types.PLong; import org.apache.phoenix.schema.Sequence; import org.apache.phoenix.schema.SortOrder; import org.apache.phoenix.util.ByteUtil; import org.apache.phoenix.util.KeyValueUtil; import org.apache.phoenix.util.MetaDataUtil; import org.apache.phoenix.util.SequenceUtil; import org.apache.phoenix.util.ServerUtil; import com.google.common.collect.Lists; /** * * Region observer coprocessor for sequence operations: * 1) For creating a sequence, as checkAndPut does not allow us to scope the * Get done for the check with a TimeRange. * 2) For incrementing a sequence, as increment does not a) allow us to set the * timestamp of the key value being incremented and b) recognize when the key * value being incremented does not exist * 3) For deleting a sequence, as checkAndDelete does not allow us to scope * the Get done for the check with a TimeRange. * * * @since 3.0.0 */ public class SequenceRegionObserver extends BaseRegionObserver { public static final String OPERATION_ATTRIB = "SEQUENCE_OPERATION"; public static final String MAX_TIMERANGE_ATTRIB = "MAX_TIMERANGE"; public static final String CURRENT_VALUE_ATTRIB = "CURRENT_VALUE"; private static final byte[] SUCCESS_VALUE = PInteger.INSTANCE.toBytes(Integer.valueOf(Sequence.SUCCESS)); private static Result getErrorResult(byte[] row, long timestamp, int errorCode) { byte[] errorCodeBuf = new byte[PInteger.INSTANCE.getByteSize()]; PInteger.INSTANCE.getCodec().encodeInt(errorCode, errorCodeBuf, 0); return Result.create(Collections.singletonList( (Cell)KeyValueUtil.newKeyValue(row, PhoenixDatabaseMetaData.SEQUENCE_FAMILY_BYTES, QueryConstants.EMPTY_COLUMN_BYTES, timestamp, errorCodeBuf))); } private static void acquireLock(Region region, byte[] key, List<RowLock> locks) throws IOException { RowLock rowLock = region.getRowLock(key, true); if (rowLock == null) { throw new IOException("Failed to acquire lock on " + Bytes.toStringBinary(key)); } locks.add(rowLock); } /** * Use PreIncrement hook of BaseRegionObserver to overcome deficiencies in Increment * implementation (HBASE-10254): * 1) Lack of recognition and identification of when the key value to increment doesn't exist * 2) Lack of the ability to set the timestamp of the updated key value. * Works the same as existing region.increment(), except assumes there is a single column to * increment and uses Phoenix LONG encoding. * * @since 3.0.0 */ @Override public Result preIncrement(final ObserverContext<RegionCoprocessorEnvironment> e, final Increment increment) throws IOException { RegionCoprocessorEnvironment env = e.getEnvironment(); // We need to set this to prevent region.increment from being called e.bypass(); e.complete(); Region region = env.getRegion(); byte[] row = increment.getRow(); List<RowLock> locks = Lists.newArrayList(); TimeRange tr = increment.getTimeRange(); region.startRegionOperation(); try { acquireLock(region, row, locks); try { long maxTimestamp = tr.getMax(); boolean validateOnly = true; Get get = new Get(row); get.setTimeRange(tr.getMin(), tr.getMax()); for (Map.Entry<byte[], List<Cell>> entry : increment.getFamilyCellMap().entrySet()) { byte[] cf = entry.getKey(); for (Cell cq : entry.getValue()) { long value = Bytes.toLong(cq.getValueArray(), cq.getValueOffset()); get.addColumn(cf, CellUtil.cloneQualifier(cq)); validateOnly &= (Sequence.ValueOp.VALIDATE_SEQUENCE.ordinal() == value); } } Result result = region.get(get); if (result.isEmpty()) { return getErrorResult(row, maxTimestamp, SQLExceptionCode.SEQUENCE_UNDEFINED.getErrorCode()); } if (validateOnly) { return result; } KeyValue currentValueKV = Sequence.getCurrentValueKV(result); KeyValue incrementByKV = Sequence.getIncrementByKV(result); KeyValue cacheSizeKV = Sequence.getCacheSizeKV(result); long currentValue = PLong.INSTANCE.getCodec().decodeLong(currentValueKV.getValueArray(), currentValueKV.getValueOffset(), SortOrder.getDefault()); long incrementBy = PLong.INSTANCE.getCodec().decodeLong(incrementByKV.getValueArray(), incrementByKV.getValueOffset(), SortOrder.getDefault()); long cacheSize = PLong.INSTANCE.getCodec().decodeLong(cacheSizeKV.getValueArray(), cacheSizeKV.getValueOffset(), SortOrder.getDefault()); // Hold timestamp constant for sequences, so that clients always only see the latest // value regardless of when they connect. long timestamp = currentValueKV.getTimestamp(); Put put = new Put(row, timestamp); int numIncrementKVs = increment.getFamilyCellMap().get(PhoenixDatabaseMetaData.SEQUENCE_FAMILY_BYTES).size(); // creates the list of KeyValues used for the Result that will be returned List<Cell> cells = Sequence.getCells(result, numIncrementKVs); //if client is 3.0/4.0 preserve the old behavior (older clients won't have newer columns present in the increment) if (numIncrementKVs != Sequence.NUM_SEQUENCE_KEY_VALUES) { currentValue += incrementBy * cacheSize; // Hold timestamp constant for sequences, so that clients always only see the latest value // regardless of when they connect. KeyValue newCurrentValueKV = createKeyValue(row, PhoenixDatabaseMetaData.CURRENT_VALUE_BYTES, currentValue, timestamp); put.add(newCurrentValueKV); Sequence.replaceCurrentValueKV(cells, newCurrentValueKV); } else { KeyValue cycleKV = Sequence.getCycleKV(result); KeyValue limitReachedKV = Sequence.getLimitReachedKV(result); KeyValue minValueKV = Sequence.getMinValueKV(result); KeyValue maxValueKV = Sequence.getMaxValueKV(result); boolean increasingSeq = incrementBy > 0 ? true : false; // if the minValue, maxValue, cycle and limitReached is null this sequence has been upgraded from // a lower version. Set minValue, maxValue, cycle and limitReached to Long.MIN_VALUE, Long.MAX_VALUE, true and false // respectively in order to maintain existing behavior and also update the KeyValues on the server boolean limitReached; if (limitReachedKV == null) { limitReached = false; KeyValue newLimitReachedKV = createKeyValue(row, PhoenixDatabaseMetaData.LIMIT_REACHED_FLAG_BYTES, limitReached, timestamp); put.add(newLimitReachedKV); Sequence.replaceLimitReachedKV(cells, newLimitReachedKV); } else { limitReached = (Boolean) PBoolean.INSTANCE.toObject(limitReachedKV.getValueArray(), limitReachedKV.getValueOffset(), limitReachedKV.getValueLength()); } long minValue; if (minValueKV == null) { minValue = Long.MIN_VALUE; KeyValue newMinValueKV = createKeyValue(row, PhoenixDatabaseMetaData.MIN_VALUE_BYTES, minValue, timestamp); put.add(newMinValueKV); Sequence.replaceMinValueKV(cells, newMinValueKV); } else { minValue = PLong.INSTANCE.getCodec().decodeLong(minValueKV.getValueArray(), minValueKV.getValueOffset(), SortOrder.getDefault()); } long maxValue; if (maxValueKV == null) { maxValue = Long.MAX_VALUE; KeyValue newMaxValueKV = createKeyValue(row, PhoenixDatabaseMetaData.MAX_VALUE_BYTES, maxValue, timestamp); put.add(newMaxValueKV); Sequence.replaceMaxValueKV(cells, newMaxValueKV); } else { maxValue = PLong.INSTANCE.getCodec().decodeLong(maxValueKV.getValueArray(), maxValueKV.getValueOffset(), SortOrder.getDefault()); } boolean cycle; if (cycleKV == null) { cycle = false; KeyValue newCycleKV = createKeyValue(row, PhoenixDatabaseMetaData.CYCLE_FLAG_BYTES, cycle, timestamp); put.add(newCycleKV); Sequence.replaceCycleValueKV(cells, newCycleKV); } else { cycle = (Boolean) PBoolean.INSTANCE.toObject(cycleKV.getValueArray(), cycleKV.getValueOffset(), cycleKV.getValueLength()); } // return if we have run out of sequence values if (limitReached) { if (cycle) { // reset currentValue of the Sequence row to minValue/maxValue currentValue = increasingSeq ? minValue : maxValue; } else { SQLExceptionCode code = increasingSeq ? SQLExceptionCode.SEQUENCE_VAL_REACHED_MAX_VALUE : SQLExceptionCode.SEQUENCE_VAL_REACHED_MIN_VALUE; return getErrorResult(row, maxTimestamp, code.getErrorCode()); } } // check if the limit was reached limitReached = SequenceUtil.checkIfLimitReached(currentValue, minValue, maxValue, incrementBy, cacheSize); // update currentValue currentValue += incrementBy * cacheSize; // update the currentValue of the Result row KeyValue newCurrentValueKV = createKeyValue(row, PhoenixDatabaseMetaData.CURRENT_VALUE_BYTES, currentValue, timestamp); Sequence.replaceCurrentValueKV(cells, newCurrentValueKV); put.add(newCurrentValueKV); // set the LIMIT_REACHED column to true, so that no new values can be used KeyValue newLimitReachedKV = createKeyValue(row, PhoenixDatabaseMetaData.LIMIT_REACHED_FLAG_BYTES, limitReached, timestamp); put.add(newLimitReachedKV); } // update the KeyValues on the server Mutation[] mutations = new Mutation[]{put}; region.batchMutate(mutations, HConstants.NO_NONCE, HConstants.NO_NONCE); // return a Result with the updated KeyValues return Result.create(cells); } finally { region.releaseRowLocks(locks); } } catch (Throwable t) { ServerUtil.throwIOException("Increment of sequence " + Bytes.toStringBinary(row), t); return null; // Impossible } finally { region.closeRegionOperation(); } } /** * Creates a new KeyValue for a long value * * @param key * key used while creating KeyValue * @param cqBytes * column qualifier of KeyValue * @return return the KeyValue that was created */ KeyValue createKeyValue(byte[] key, byte[] cqBytes, long value, long timestamp) { byte[] valueBuffer = new byte[PLong.INSTANCE.getByteSize()]; PLong.INSTANCE.getCodec().encodeLong(value, valueBuffer, 0); return KeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SEQUENCE_FAMILY_BYTES, cqBytes, timestamp, valueBuffer); } /** * Creates a new KeyValue for a boolean value and adds it to the given put * * @param key * key used while creating KeyValue * @param cqBytes * column qualifier of KeyValue * @return return the KeyValue that was created */ private KeyValue createKeyValue(byte[] key, byte[] cqBytes, boolean value, long timestamp) throws IOException { // create new key value for put return KeyValueUtil.newKeyValue(key, PhoenixDatabaseMetaData.SEQUENCE_FAMILY_BYTES, cqBytes, timestamp, value ? PDataType.TRUE_BYTES : PDataType.FALSE_BYTES); } /** * Override the preAppend for checkAndPut and checkAndDelete, as we need the ability to * a) set the TimeRange for the Get being done and * b) return something back to the client to indicate success/failure */ @SuppressWarnings("deprecation") @Override public Result preAppend(final ObserverContext<RegionCoprocessorEnvironment> e, final Append append) throws IOException { byte[] opBuf = append.getAttribute(OPERATION_ATTRIB); if (opBuf == null) { return null; } Sequence.MetaOp op = Sequence.MetaOp.values()[opBuf[0]]; Cell keyValue = append.getFamilyCellMap().values().iterator().next().iterator().next(); long clientTimestamp = HConstants.LATEST_TIMESTAMP; long minGetTimestamp = MetaDataProtocol.MIN_TABLE_TIMESTAMP; long maxGetTimestamp = HConstants.LATEST_TIMESTAMP; boolean hadClientTimestamp; byte[] clientTimestampBuf = null; if (op == Sequence.MetaOp.RETURN_SEQUENCE) { // When returning sequences, this allows us to send the expected timestamp // of the sequence to make sure we don't reset any other sequence hadClientTimestamp = true; clientTimestamp = minGetTimestamp = keyValue.getTimestamp(); maxGetTimestamp = minGetTimestamp + 1; } else { clientTimestampBuf = append.getAttribute(MAX_TIMERANGE_ATTRIB); if (clientTimestampBuf != null) { clientTimestamp = maxGetTimestamp = Bytes.toLong(clientTimestampBuf); } hadClientTimestamp = (clientTimestamp != HConstants.LATEST_TIMESTAMP); if (hadClientTimestamp) { // Prevent race condition of creating two sequences at the same timestamp // by looking for a sequence at or after the timestamp at which it'll be // created. if (op == Sequence.MetaOp.CREATE_SEQUENCE) { maxGetTimestamp = clientTimestamp + 1; } } else { clientTimestamp = maxGetTimestamp = EnvironmentEdgeManager.currentTimeMillis(); clientTimestampBuf = Bytes.toBytes(clientTimestamp); } } RegionCoprocessorEnvironment env = e.getEnvironment(); // We need to set this to prevent region.append from being called e.bypass(); e.complete(); Region region = env.getRegion(); byte[] row = append.getRow(); List<RowLock> locks = Lists.newArrayList(); region.startRegionOperation(); try { acquireLock(region, row, locks); try { byte[] family = CellUtil.cloneFamily(keyValue); byte[] qualifier = CellUtil.cloneQualifier(keyValue); Get get = new Get(row); get.setTimeRange(minGetTimestamp, maxGetTimestamp); get.addColumn(family, qualifier); Result result = region.get(get); if (result.isEmpty()) { if (op == Sequence.MetaOp.DROP_SEQUENCE || op == Sequence.MetaOp.RETURN_SEQUENCE) { return getErrorResult(row, clientTimestamp, SQLExceptionCode.SEQUENCE_UNDEFINED.getErrorCode()); } } else { if (op == Sequence.MetaOp.CREATE_SEQUENCE) { return getErrorResult(row, clientTimestamp, SQLExceptionCode.SEQUENCE_ALREADY_EXIST.getErrorCode()); } } Mutation m = null; switch (op) { case RETURN_SEQUENCE: KeyValue currentValueKV = result.raw()[0]; long expectedValue = PLong.INSTANCE.getCodec().decodeLong(append.getAttribute(CURRENT_VALUE_ATTRIB), 0, SortOrder.getDefault()); long value = PLong.INSTANCE.getCodec().decodeLong(currentValueKV.getValueArray(), currentValueKV.getValueOffset(), SortOrder.getDefault()); // Timestamp should match exactly, or we may have the wrong sequence if (expectedValue != value || currentValueKV.getTimestamp() != clientTimestamp) { return Result.create(Collections.singletonList( (Cell)KeyValueUtil.newKeyValue(row, PhoenixDatabaseMetaData.SEQUENCE_FAMILY_BYTES, QueryConstants.EMPTY_COLUMN_BYTES, currentValueKV.getTimestamp(), ByteUtil.EMPTY_BYTE_ARRAY))); } m = new Put(row, currentValueKV.getTimestamp()); m.getFamilyCellMap().putAll(append.getFamilyCellMap()); break; case DROP_SEQUENCE: m = new Delete(row, clientTimestamp); break; case CREATE_SEQUENCE: m = new Put(row, clientTimestamp); m.getFamilyCellMap().putAll(append.getFamilyCellMap()); break; } if (!hadClientTimestamp) { for (List<Cell> kvs : m.getFamilyCellMap().values()) { for (Cell kv : kvs) { ((KeyValue)kv).updateLatestStamp(clientTimestampBuf); } } } Mutation[] mutations = new Mutation[]{m}; region.batchMutate(mutations, HConstants.NO_NONCE, HConstants.NO_NONCE); long serverTimestamp = MetaDataUtil.getClientTimeStamp(m); // Return result with single KeyValue. The only piece of information // the client cares about is the timestamp, which is the timestamp of // when the mutation was actually performed (useful in the case of . return Result.create(Collections.singletonList( (Cell)KeyValueUtil.newKeyValue(row, PhoenixDatabaseMetaData.SEQUENCE_FAMILY_BYTES, QueryConstants.EMPTY_COLUMN_BYTES, serverTimestamp, SUCCESS_VALUE))); } finally { region.releaseRowLocks(locks); } } catch (Throwable t) { ServerUtil.throwIOException("Increment of sequence " + Bytes.toStringBinary(row), t); return null; // Impossible } finally { region.closeRegionOperation(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package arq; import static org.apache.jena.atlas.logging.LogCtl.setLogging; import java.net.URL; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.io.FileInputStream; import org.apache.jena.rdf.model.*; import org.apache.jena.sparql.util.Closure; /** * A program which read two RDF models and provides a basic triple level diff * * <p> * This program will read two RDF models, in a variety of languages, and compare * them providing a basic triple level diff output. Since blank nodes are a * complicating factor diffs for blank node containing portions of the graph are * reported in terms of sub-graphs rather than individual triples. * </p> * <p> * Input can be read either from a URL or from a file. The program writes its * results to the standard output stream and sets its exit code to 0 if the * models are equal, to 1 if they are not and to -1 if it encounters an error. * </p> * * <p> * </p> * * <pre> * java jena.rdfdiff model1 model2 lang1? lang2? base1? base2? * </pre> */ public class rdfdiff extends java.lang.Object { static { setLogging(); } /** * @param args * the command line arguments */ public static void main(String... args) { if (args.length < 2 || args.length > 6) { usage(); System.exit(-1); } String in1 = args[0]; String in2 = args[1]; String lang1 = "RDF/XML"; if (args.length >= 3) { lang1 = args[2]; } String lang2 = "N-TRIPLE"; if (args.length >= 4) { lang2 = args[3]; } String base1 = null; if (args.length >= 5) { base1 = args[4]; } String base2 = base1; if (args.length >= 6) { base2 = args[5]; } //System.out.println(in1 + " " + in2 + " " + lang1 + " " + lang2 + " " + base1 + " " + base2); try { Model m1 = ModelFactory.createDefaultModel(); Model m2 = ModelFactory.createDefaultModel(); read(m1, in1, lang1, base1); read(m2, in2, lang2, base2); if (m1.isIsomorphicWith(m2)) { System.out.println("models are equal"); System.out.println(); System.exit(0); } else { System.out.println("models are unequal"); System.out.println(); if (m1.size() != m2.size()) { System.out.println(String.format("< %,d triples", m1.size())); System.out.println(String.format("> %,d triples", m2.size())); } // Calculate differences Map<AnonId, Model> m1SubGraphs = new HashMap<>(); StmtIterator iter = m1.listStatements(); while (iter.hasNext()) { Statement stmt = iter.next(); if (stmt.asTriple().isConcrete()) { if (!m2.contains(stmt)) { System.out.print("< "); System.out.println(stmt.toString()); } } else { // Handle blank nodes via sub-graphs addToSubGraph(stmt, m1SubGraphs); } } Map<AnonId, Model> m2SubGraphs = new HashMap<>(); iter = m2.listStatements(); while (iter.hasNext()) { Statement stmt = iter.next(); if (stmt.asTriple().isConcrete()) { if (!m1.contains(stmt)) { System.out.print("> "); System.out.println(stmt.toString()); } } else { // Handle blank nodes via sub-graphs addToSubGraph(stmt, m2SubGraphs); } } // Compute sub-graph differences // Reduce to sets Set<Model> m1SubGraphSet = new TreeSet<>(new ModelReferenceComparator()); m1SubGraphSet.addAll(m1SubGraphs.values()); Set<Model> m2SubGraphSet = new TreeSet<>(new ModelReferenceComparator()); m2SubGraphSet.addAll(m2SubGraphs.values()); if (m1SubGraphSet.size() != m2SubGraphSet.size()) { System.out.println("< " + m1SubGraphs.size() + " sub-graphs"); System.out.println("> " + m2SubGraphs.size() + " sub-graphs"); } if (m1SubGraphSet.size() > 0) { diffSubGraphs(m1SubGraphSet, m2SubGraphSet, "< "); } if (m2SubGraphSet.size() > 0) { diffSubGraphs(m2SubGraphSet, m1SubGraphSet, "> "); } System.exit(1); } } catch (Exception e) { System.err.println("Unhandled exception:"); System.err.println(" " + e.toString()); System.exit(-1); } } private static void diffSubGraphs(Set<Model> m1SubGraphSet, Set<Model> m2SubGraphSet, String prefix) { for (Model subGraph : m1SubGraphSet) { // Find candidate matches List<Model> candidates = new ArrayList<>(); for (Model subGraphCandidate : m2SubGraphSet) { if (subGraph.size() == subGraphCandidate.size()) { candidates.add(subGraph); } } if (candidates.size() == 0) { // No match printNonMatchingSubGraph(prefix, subGraph); } else if (candidates.size() == 1) { // Precisely 1 candidate if (!subGraph.isIsomorphicWith(candidates.get(0))) { printNonMatchingSubGraph(prefix, subGraph); } else { m2SubGraphSet.remove(candidates.get(0)); } } else { // Multiple candidates boolean matched = false; for (Model subGraphCandidate : candidates) { if (subGraph.isIsomorphicWith(subGraphCandidate)) { // Found a match matched = true; m2SubGraphSet.remove(subGraphCandidate); break; } } if (!matched) { // Didn't find a match printNonMatchingSubGraph(prefix, subGraph); } } } } private static void printNonMatchingSubGraph(String prefix, Model subGraph) { StmtIterator sIter = subGraph.listStatements(); while (sIter.hasNext()) { System.out.print(prefix); System.out.println(sIter.next().toString()); } } private static void addToSubGraph(Statement stmt, Map<AnonId, Model> subGraphs) { Set<AnonId> ids = new HashSet<>(); addToIdList(stmt, ids); // Here we take a copy of the IDs Model subGraph = null; for (AnonId id : ids) { if (!subGraphs.containsKey(id)) { subGraph = Closure.closure(stmt); subGraph.add(stmt); break; } } // May already have built the sub-graph that includes this statement if (subGraph == null) return; // Find any further IDs that occur in the sub-graph StmtIterator sIter = subGraph.listStatements(); while (sIter.hasNext()) { addToIdList(sIter.next(), ids); } // Associate the sub-graph with all mentioned blank node IDs for (AnonId id : ids) { if (subGraphs.containsKey(id)) throw new IllegalStateException(String.format("ID %s occurs in multiple sub-graphs", id)); subGraphs.put(id, subGraph); } } private static void addToIdList(Statement stmt, Set<AnonId> ids) { if (stmt.getSubject().isAnon()) { ids.add(stmt.getSubject().getId()); } if (stmt.getObject().isAnon()) { ids.add(stmt.getObject().asResource().getId()); } } protected static void usage() { System.err.println("usage:"); System.err.println(" java jena.rdfdiff source1 source2 [lang1 [lang2 [base1 [base2]]]]"); System.err.println(); System.err.println(" source1 and source2 can be URL's or filenames"); System.err.println(" lang1 and lang2 can take values:"); System.err.println(" RDF/XML"); System.err.println(" N-TRIPLE"); System.err.println(" TTL"); System.err.println(" lang1 defaults to RDF/XML, lang2 to N-TRIPLE"); System.err.println(" base1 and base2 are URIs"); System.err.println(" base1 defaults to null"); System.err.println(" base2 defaults to base1"); System.err.println(" If no base URIs are specified Jena determines the base URI based on the input source"); System.err.println(); } protected static void read(Model model, String in, String lang, String base) throws java.io.FileNotFoundException { try { URL url = new URL(in); model.read(in, base, lang); } catch (java.net.MalformedURLException e) { model.read(new FileInputStream(in), base, lang); } } private static class ModelReferenceComparator implements Comparator<Model> { @Override public int compare(Model o1, Model o2) { if (o1 == o2) return 0; int h1 = System.identityHashCode(o1); int h2 = System.identityHashCode(o2); if (h1 == h2) return 0; return h1 < h2 ? -1 : 1; } } }
// Copyright (C) 2009 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.query.change; import static com.google.gerrit.server.ApprovalsUtil.sortApprovals; import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.SetMultimap; import com.google.gerrit.common.data.SubmitRecord; import com.google.gerrit.common.data.SubmitTypeRecord; import com.google.gerrit.reviewdb.client.Account; import com.google.gerrit.reviewdb.client.Change; import com.google.gerrit.reviewdb.client.ChangeMessage; import com.google.gerrit.reviewdb.client.Patch; import com.google.gerrit.reviewdb.client.PatchLineComment; import com.google.gerrit.reviewdb.client.PatchSet; import com.google.gerrit.reviewdb.client.PatchSetApproval; import com.google.gerrit.reviewdb.server.ReviewDb; import com.google.gerrit.server.ApprovalsUtil; import com.google.gerrit.server.ChangeMessagesUtil; import com.google.gerrit.server.CurrentUser; import com.google.gerrit.server.IdentifiedUser; import com.google.gerrit.server.PatchLineCommentsUtil; import com.google.gerrit.server.change.MergeabilityCache; import com.google.gerrit.server.git.GitRepositoryManager; import com.google.gerrit.server.git.MergeUtil; import com.google.gerrit.server.notedb.ChangeNotes; import com.google.gerrit.server.notedb.NotesMigration; import com.google.gerrit.server.notedb.ReviewerState; import com.google.gerrit.server.patch.PatchList; import com.google.gerrit.server.patch.PatchListCache; import com.google.gerrit.server.patch.PatchListEntry; import com.google.gerrit.server.patch.PatchListNotAvailableException; import com.google.gerrit.server.project.ChangeControl; import com.google.gerrit.server.project.NoSuchChangeException; import com.google.gerrit.server.project.ProjectCache; import com.google.gerrit.server.project.SubmitRuleEvaluator; import com.google.gwtorm.server.OrmException; import com.google.gwtorm.server.ResultSet; import com.google.inject.assistedinject.Assisted; import com.google.inject.assistedinject.AssistedInject; import org.eclipse.jgit.errors.IncorrectObjectTypeException; import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.errors.RepositoryNotFoundException; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.revwalk.FooterLine; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevWalk; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; public class ChangeData { public static List<Change> asChanges(List<ChangeData> changeDatas) throws OrmException { List<Change> result = new ArrayList<>(changeDatas.size()); for (ChangeData cd : changeDatas) { result.add(cd.change()); } return result; } public static Map<Change.Id, ChangeData> asMap(List<ChangeData> changes) { Map<Change.Id, ChangeData> result = Maps.newHashMapWithExpectedSize(changes.size()); for (ChangeData cd : changes) { result.put(cd.getId(), cd); } return result; } public static void ensureChangeLoaded(Iterable<ChangeData> changes) throws OrmException { Map<Change.Id, ChangeData> missing = Maps.newHashMap(); for (ChangeData cd : changes) { if (cd.change == null) { missing.put(cd.getId(), cd); } } if (!missing.isEmpty()) { ChangeData first = missing.values().iterator().next(); if (!first.notesMigration.readChanges()) { ReviewDb db = missing.values().iterator().next().db; for (Change change : db.changes().get(missing.keySet())) { missing.get(change.getId()).change = change; } } else { for (ChangeData cd : missing.values()) { cd.change(); } } } } public static void ensureAllPatchSetsLoaded(Iterable<ChangeData> changes) throws OrmException { for (ChangeData cd : changes) { cd.patches(); } } public static void ensureCurrentPatchSetLoaded(Iterable<ChangeData> changes) throws OrmException { Map<PatchSet.Id, ChangeData> missing = Maps.newHashMap(); for (ChangeData cd : changes) { if (cd.currentPatchSet == null && cd.patches == null) { missing.put(cd.change().currentPatchSetId(), cd); } } if (!missing.isEmpty()) { ReviewDb db = missing.values().iterator().next().db; for (PatchSet ps : db.patchSets().get(missing.keySet())) { ChangeData cd = missing.get(ps.getId()); cd.currentPatchSet = ps; } } } public static void ensureCurrentApprovalsLoaded(Iterable<ChangeData> changes) throws OrmException { List<ResultSet<PatchSetApproval>> pending = Lists.newArrayList(); for (ChangeData cd : changes) { if (!cd.notesMigration.readChanges()) { if (cd.currentApprovals == null) { pending.add(cd.db.patchSetApprovals() .byPatchSet(cd.change().currentPatchSetId())); } } else { cd.currentApprovals(); } } if (!pending.isEmpty()) { int idx = 0; for (ChangeData cd : changes) { if (cd.currentApprovals == null) { cd.currentApprovals = sortApprovals(pending.get(idx++)); } } } } public interface Factory { ChangeData create(ReviewDb db, Change.Id id); ChangeData create(ReviewDb db, Change c); ChangeData create(ReviewDb db, ChangeControl c); } /** * Create an instance for testing only. * <p> * Attempting to lazy load data will fail with NPEs. * * @param id change ID * @return instance for testing. */ static ChangeData createForTest(Change.Id id, int currentPatchSetId) { ChangeData cd = new ChangeData(null, null, null, null, null, null, null, null, null, null, null, null, null, id); cd.currentPatchSet = new PatchSet(new PatchSet.Id(id, currentPatchSetId)); return cd; } private final ReviewDb db; private final GitRepositoryManager repoManager; private final ChangeControl.GenericFactory changeControlFactory; private final IdentifiedUser.GenericFactory userFactory; private final ProjectCache projectCache; private final MergeUtil.Factory mergeUtilFactory; private final ChangeNotes.Factory notesFactory; private final ApprovalsUtil approvalsUtil; private final ChangeMessagesUtil cmUtil; private final PatchLineCommentsUtil plcUtil; private final PatchListCache patchListCache; private final NotesMigration notesMigration; private final MergeabilityCache mergeabilityCache; private final Change.Id legacyId; private ChangeDataSource returnedBySource; private Change change; private ChangeNotes notes; private String commitMessage; private List<FooterLine> commitFooters; private PatchSet currentPatchSet; private Collection<PatchSet> patches; private ListMultimap<PatchSet.Id, PatchSetApproval> allApprovals; private List<PatchSetApproval> currentApprovals; private Map<Integer, List<String>> files = new HashMap<>(); private Collection<PatchLineComment> publishedComments; private CurrentUser visibleTo; private ChangeControl changeControl; private List<ChangeMessage> messages; private List<SubmitRecord> submitRecords; private ChangedLines changedLines; private Boolean mergeable; @AssistedInject private ChangeData( GitRepositoryManager repoManager, ChangeControl.GenericFactory changeControlFactory, IdentifiedUser.GenericFactory userFactory, ProjectCache projectCache, MergeUtil.Factory mergeUtilFactory, ChangeNotes.Factory notesFactory, ApprovalsUtil approvalsUtil, ChangeMessagesUtil cmUtil, PatchLineCommentsUtil plcUtil, PatchListCache patchListCache, NotesMigration notesMigration, MergeabilityCache mergeabilityCache, @Assisted ReviewDb db, @Assisted Change.Id id) { this.db = db; this.repoManager = repoManager; this.changeControlFactory = changeControlFactory; this.userFactory = userFactory; this.projectCache = projectCache; this.mergeUtilFactory = mergeUtilFactory; this.notesFactory = notesFactory; this.approvalsUtil = approvalsUtil; this.cmUtil = cmUtil; this.plcUtil = plcUtil; this.patchListCache = patchListCache; this.notesMigration = notesMigration; this.mergeabilityCache = mergeabilityCache; legacyId = id; } @AssistedInject private ChangeData( GitRepositoryManager repoManager, ChangeControl.GenericFactory changeControlFactory, IdentifiedUser.GenericFactory userFactory, ProjectCache projectCache, MergeUtil.Factory mergeUtilFactory, ChangeNotes.Factory notesFactory, ApprovalsUtil approvalsUtil, ChangeMessagesUtil cmUtil, PatchLineCommentsUtil plcUtil, PatchListCache patchListCache, NotesMigration notesMigration, MergeabilityCache mergeabilityCache, @Assisted ReviewDb db, @Assisted Change c) { this.db = db; this.repoManager = repoManager; this.changeControlFactory = changeControlFactory; this.userFactory = userFactory; this.projectCache = projectCache; this.mergeUtilFactory = mergeUtilFactory; this.notesFactory = notesFactory; this.approvalsUtil = approvalsUtil; this.cmUtil = cmUtil; this.plcUtil = plcUtil; this.patchListCache = patchListCache; this.notesMigration = notesMigration; this.mergeabilityCache = mergeabilityCache; legacyId = c.getId(); change = c; } @AssistedInject private ChangeData( GitRepositoryManager repoManager, ChangeControl.GenericFactory changeControlFactory, IdentifiedUser.GenericFactory userFactory, ProjectCache projectCache, MergeUtil.Factory mergeUtilFactory, ChangeNotes.Factory notesFactory, ApprovalsUtil approvalsUtil, ChangeMessagesUtil cmUtil, PatchLineCommentsUtil plcUtil, PatchListCache patchListCache, NotesMigration notesMigration, MergeabilityCache mergeabilityCache, @Assisted ReviewDb db, @Assisted ChangeControl c) { this.db = db; this.repoManager = repoManager; this.changeControlFactory = changeControlFactory; this.userFactory = userFactory; this.projectCache = projectCache; this.mergeUtilFactory = mergeUtilFactory; this.notesFactory = notesFactory; this.approvalsUtil = approvalsUtil; this.cmUtil = cmUtil; this.plcUtil = plcUtil; this.patchListCache = patchListCache; this.notesMigration = notesMigration; this.mergeabilityCache = mergeabilityCache; legacyId = c.getChange().getId(); change = c.getChange(); changeControl = c; notes = c.getNotes(); } public ReviewDb db() { return db; } public boolean isFromSource(ChangeDataSource s) { return s == returnedBySource; } public void cacheFromSource(ChangeDataSource s) { returnedBySource = s; } public void setCurrentFilePaths(List<String> filePaths) throws OrmException { PatchSet ps = currentPatchSet(); if (ps != null) { files.put(ps.getPatchSetId(), ImmutableList.copyOf(filePaths)); } } public List<String> currentFilePaths() throws OrmException { PatchSet ps = currentPatchSet(); if (ps == null) { return null; } return filePaths(currentPatchSet); } public List<String> filePaths(PatchSet ps) throws OrmException { if (!files.containsKey(ps.getPatchSetId())) { Change c = change(); if (c == null) { return null; } PatchList p; try { p = patchListCache.get(c, ps); } catch (PatchListNotAvailableException e) { List<String> emptyFileList = Collections.emptyList(); files.put(ps.getPatchSetId(), emptyFileList); return emptyFileList; } List<String> r = new ArrayList<>(p.getPatches().size()); for (PatchListEntry e : p.getPatches()) { if (Patch.COMMIT_MSG.equals(e.getNewName())) { continue; } switch (e.getChangeType()) { case ADDED: case MODIFIED: case DELETED: case COPIED: case REWRITE: r.add(e.getNewName()); break; case RENAMED: r.add(e.getOldName()); r.add(e.getNewName()); break; } } Collections.sort(r); files.put(ps.getPatchSetId(), Collections.unmodifiableList(r)); } return files.get(ps.getPatchSetId()); } public ChangedLines changedLines() throws OrmException { if (changedLines == null) { Change c = change(); if (c == null) { return null; } PatchSet ps = currentPatchSet(); if (ps == null) { return null; } PatchList p; try { p = patchListCache.get(c, ps); } catch (PatchListNotAvailableException e) { return null; } changedLines = new ChangedLines(p.getInsertions(), p.getDeletions()); } return changedLines; } public void setChangedLines(int insertions, int deletions) { changedLines = new ChangedLines(insertions, deletions); } public Change.Id getId() { return legacyId; } boolean fastIsVisibleTo(CurrentUser user) { return visibleTo == user; } public boolean hasChangeControl() { return changeControl != null; } public ChangeControl changeControl() throws OrmException { if (changeControl == null) { Change c = change(); try { changeControl = changeControlFactory.controlFor(c, userFactory.create(c.getOwner())); } catch (NoSuchChangeException e) { throw new OrmException(e); } } return changeControl; } void cacheVisibleTo(ChangeControl ctl) { visibleTo = ctl.getCurrentUser(); changeControl = ctl; } public Change change() throws OrmException { if (change == null) { reloadChange(); } return change; } public Change reloadChange() throws OrmException { change = db.changes().get(legacyId); return change; } public ChangeNotes notes() throws OrmException { if (notes == null) { notes = notesFactory.create(change()); } return notes; } public PatchSet currentPatchSet() throws OrmException { if (currentPatchSet == null) { Change c = change(); if (c == null) { return null; } for (PatchSet p : patches()) { if (p.getId().equals(c.currentPatchSetId())) { currentPatchSet = p; return p; } } } return currentPatchSet; } public List<PatchSetApproval> currentApprovals() throws OrmException { if (currentApprovals == null) { Change c = change(); if (c == null) { currentApprovals = Collections.emptyList(); } else { currentApprovals = ImmutableList.copyOf(approvalsUtil.byPatchSet( db, changeControl(), c.currentPatchSetId())); } } return currentApprovals; } public void setCurrentApprovals(List<PatchSetApproval> approvals) { currentApprovals = approvals; } public String commitMessage() throws IOException, OrmException { if (commitMessage == null) { if (!loadCommitData()) { return null; } } return commitMessage; } public List<FooterLine> commitFooters() throws IOException, OrmException { if (commitFooters == null) { if (!loadCommitData()) { return null; } } return commitFooters; } private boolean loadCommitData() throws OrmException, RepositoryNotFoundException, IOException, MissingObjectException, IncorrectObjectTypeException { PatchSet.Id psId = change().currentPatchSetId(); PatchSet ps = db.patchSets().get(psId); if (ps == null) { return false; } String sha1 = ps.getRevision().get(); Repository repo = repoManager.openRepository(change().getProject()); try { RevWalk walk = new RevWalk(repo); try { RevCommit c = walk.parseCommit(ObjectId.fromString(sha1)); commitMessage = c.getFullMessage(); commitFooters = c.getFooterLines(); } finally { walk.release(); } } finally { repo.close(); } return true; } /** * @return patches for the change. * @throws OrmException an error occurred reading the database. */ public Collection<PatchSet> patches() throws OrmException { if (patches == null) { patches = db.patchSets().byChange(legacyId).toList(); } return patches; } /** * @return patch with the given ID, or null if it does not exist. * @throws OrmException an error occurred reading the database. */ public PatchSet patch(PatchSet.Id psId) throws OrmException { if (currentPatchSet != null && currentPatchSet.getId().equals(psId)) { return currentPatchSet; } for (PatchSet ps : patches()) { if (ps.getId().equals(psId)) { return ps; } } return null; } /** * @return all patch set approvals for the change, keyed by ID, ordered by * timestamp within each patch set. * @throws OrmException an error occurred reading the database. */ public ListMultimap<PatchSet.Id, PatchSetApproval> approvals() throws OrmException { if (allApprovals == null) { allApprovals = approvalsUtil.byChange(db, notes()); } return allApprovals; } public SetMultimap<ReviewerState, Account.Id> reviewers() throws OrmException { return approvalsUtil.getReviewers(notes(), approvals().values()); } public Collection<PatchLineComment> publishedComments() throws OrmException { if (publishedComments == null) { publishedComments = plcUtil.publishedByChange(db, notes()); } return publishedComments; } public List<ChangeMessage> messages() throws OrmException { if (messages == null) { messages = cmUtil.byChange(db, notes()); } return messages; } public void setSubmitRecords(List<SubmitRecord> records) { submitRecords = records; } public List<SubmitRecord> getSubmitRecords() { return submitRecords; } public void setMergeable(boolean mergeable) { this.mergeable = mergeable; } public Boolean isMergeable() throws OrmException { if (mergeable == null) { Change c = change(); if (c == null) { return null; } if (c.getStatus() == Change.Status.MERGED) { mergeable = true; } else { PatchSet ps = currentPatchSet(); if (ps == null) { return null; } Repository repo = null; try { repo = repoManager.openRepository(c.getProject()); Ref ref = repo.getRef(c.getDest().get()); SubmitTypeRecord rec = new SubmitRuleEvaluator(this) .getSubmitType(); if (rec.status != SubmitTypeRecord.Status.OK) { throw new OrmException( "Error in mergeability check: " + rec.errorMessage); } String mergeStrategy = mergeUtilFactory .create(projectCache.get(c.getProject())) .mergeStrategyName(); mergeable = mergeabilityCache.get( ObjectId.fromString(ps.getRevision().get()), ref, rec.type, mergeStrategy, c.getDest(), repo, db); } catch (IOException e) { throw new OrmException(e); } finally { if (repo != null) { repo.close(); } } } } return mergeable; } @Override public String toString() { MoreObjects.ToStringHelper h = MoreObjects.toStringHelper(this); if (change != null) { h.addValue(change); } else { h.addValue(legacyId); } return h.toString(); } public static class ChangedLines { public final int insertions; public final int deletions; ChangedLines(int insertions, int deletions) { this.insertions = insertions; this.deletions = deletions; } } }
package com.fasterxml.jackson.databind.deser; import java.io.IOException; import java.lang.annotation.Annotation; import com.fasterxml.jackson.core.*; import com.fasterxml.jackson.core.util.InternCache; import com.fasterxml.jackson.databind.*; import com.fasterxml.jackson.databind.deser.impl.NullProvider; import com.fasterxml.jackson.databind.introspect.AnnotatedMember; import com.fasterxml.jackson.databind.introspect.BeanPropertyDefinition; import com.fasterxml.jackson.databind.jsontype.TypeDeserializer; import com.fasterxml.jackson.databind.util.Annotations; import com.fasterxml.jackson.databind.util.ViewMatcher; /** * Base class for deserilizable properties of a bean: contains * both type and name definitions, and reflection-based set functionality. * Concrete sub-classes implement details, so that field- and * setter-backed properties, as well as a few more esoteric variations, * can be handled. */ public abstract class SettableBeanProperty implements BeanProperty, java.io.Serializable { private static final long serialVersionUID = -1026580169193933453L; /** * Logical name of the property (often but not always derived * from the setter method name) */ protected final String _propName; /** * Base type for property; may be a supertype of actual value. */ protected final JavaType _type; /** * Class that contains this property (either class that declares * the property or one of its subclasses), class that is * deserialized using deserializer that contains this property. */ protected final transient Annotations _contextAnnotations; /** * Deserializer used for handling property value. */ protected JsonDeserializer<Object> _valueDeserializer; /** * If value will contain type information (to support * polymorphic handling), this is the type deserializer * used to handle type resolution. */ protected TypeDeserializer _valueTypeDeserializer; /** * Object used to figure out value to be used when 'null' literal is encountered in JSON. * For most types simply Java null, but for primitive types must * be a non-null value (like Integer.valueOf(0) for int). */ protected NullProvider _nullProvider; /** * If property represents a managed (forward) reference * (see [JACKSON-235]), we will need name of reference for * later linking. */ protected String _managedReferenceName; /** * Helper object used for checking whether this property is to * be included in the active view, if property is view-specific; * null otherwise. */ protected ViewMatcher _viewMatcher; /** * Index of property (within all property of a bean); assigned * when all properties have been collected. Order of entries * is arbitrary, but once indexes are assigned they are not * changed. */ protected int _propertyIndex = -1; /* /********************************************************** /* Life-cycle (construct & configure) /********************************************************** */ protected SettableBeanProperty(BeanPropertyDefinition propDef, JavaType type, TypeDeserializer typeDeser, Annotations contextAnnotations) { this(propDef.getName(), type, typeDeser, contextAnnotations); } protected SettableBeanProperty(String propName, JavaType type, TypeDeserializer typeDeser, Annotations contextAnnotations) { /* 09-Jan-2009, tatu: Intern()ing makes sense since Jackson parsed * field names are (usually) interned too, hence lookups will be faster. */ // 23-Oct-2009, tatu: should this be disabled wrt [JACKSON-180]? /* Probably need not, given that namespace of field/method names * is not unbounded, unlike potential JSON names. */ if (propName == null || propName.length() == 0) { _propName = ""; } else { _propName = InternCache.instance.intern(propName); } _type = type; _contextAnnotations = contextAnnotations; _viewMatcher = null; // 30-Jan-2012, tatu: Important: contextualize TypeDeserializer now... if (typeDeser != null) { typeDeser = typeDeser.forProperty(this); } _valueTypeDeserializer = typeDeser; } /** * Basic copy-constructor for sub-classes to use. */ protected SettableBeanProperty(SettableBeanProperty src) { _propName = src._propName; _type = src._type; _contextAnnotations = src._contextAnnotations; _valueDeserializer = src._valueDeserializer; _valueTypeDeserializer = src._valueTypeDeserializer; _nullProvider = src._nullProvider; _managedReferenceName = src._managedReferenceName; _propertyIndex = src._propertyIndex; _viewMatcher = src._viewMatcher; } /** * Copy-with-deserializer-change constructor for sub-classes to use. */ @SuppressWarnings("unchecked") protected SettableBeanProperty(SettableBeanProperty src, JsonDeserializer<?> deser) { _propName = src._propName; _type = src._type; _contextAnnotations = src._contextAnnotations; _valueTypeDeserializer = src._valueTypeDeserializer; _managedReferenceName = src._managedReferenceName; _propertyIndex = src._propertyIndex; _valueDeserializer = (JsonDeserializer<Object>) deser; if (deser == null) { _nullProvider = null; } else { Object nvl = deser.getNullValue(); _nullProvider = (nvl == null) ? null : new NullProvider(_type, nvl); } _viewMatcher = src._viewMatcher; } /** * Copy-with-deserializer-change constructor for sub-classes to use. */ protected SettableBeanProperty(SettableBeanProperty src, String newName) { _propName = newName; _type = src._type; _contextAnnotations = src._contextAnnotations; _valueDeserializer = src._valueDeserializer; _valueTypeDeserializer = src._valueTypeDeserializer; _nullProvider = src._nullProvider; _managedReferenceName = src._managedReferenceName; _propertyIndex = src._propertyIndex; _viewMatcher = src._viewMatcher; } /** * Fluent factory method for constructing and returning a new instance * with specified value deserializer. * Note that this method should NOT change configuration of this instance. * * @param deser Deserializer to assign to the new property instance * * @return Newly constructed instance, if value deserializer differs from the * one used for this instance; or 'this' if not. */ public abstract SettableBeanProperty withValueDeserializer(JsonDeserializer<?> deser); /** * Fluent factory method for constructing and returning a new instance * with specified propert name. * Note that this method should NOT change configuration of this instance. * * @param newName Name to use for the new instance. * * @return Newly constructed instance, if property name differs from the * one used for this instance; or 'this' if not. */ public abstract SettableBeanProperty withName(String newName); public void setManagedReferenceName(String n) { _managedReferenceName = n; } public void setViews(Class<?>[] views) { if (views == null) { _viewMatcher = null; } else { _viewMatcher = ViewMatcher.construct(views); } } /** * Method used to assign index for property. */ public void assignIndex(int index) { if (_propertyIndex != -1) { throw new IllegalStateException("Property '"+getName()+"' already had index ("+_propertyIndex+"), trying to assign "+index); } _propertyIndex = index; } /* /********************************************************** /* BeanProperty impl /********************************************************** */ // @Override public final String getName() { return _propName; } // @Override public JavaType getType() { return _type; } public abstract <A extends Annotation> A getAnnotation(Class<A> acls); // @Override public abstract AnnotatedMember getMember(); public <A extends Annotation> A getContextAnnotation(Class<A> acls) { return _contextAnnotations.get(acls); } /* /********************************************************** /* Accessors /********************************************************** */ protected final Class<?> getDeclaringClass() { return getMember().getDeclaringClass(); } public String getManagedReferenceName() { return _managedReferenceName; } public boolean hasValueDeserializer() { return (_valueDeserializer != null); } public boolean hasValueTypeDeserializer() { return (_valueTypeDeserializer != null); } public JsonDeserializer<Object> getValueDeserializer() { return _valueDeserializer; } public TypeDeserializer getValueTypeDeserializer() { return _valueTypeDeserializer; } public boolean visibleInView(Class<?> activeView) { return (_viewMatcher == null) || _viewMatcher.isVisibleForView(activeView); } public boolean hasViews() { return _viewMatcher != null; } /** * Method for accessing unique index of this property; indexes are * assigned once all properties of a {@link BeanDeserializer} have * been collected. * * @return Index of this property */ public int getPropertyIndex() { return _propertyIndex; } /** * Method for accessing index of the creator property: for other * types of properties will simply return -1. * * @since 2.1 */ public int getCreatorIndex() { return -1; } /** * Accessor for id of injectable value, if this bean property supports * value injection. */ public Object getInjectableValueId() { return null; } /* /********************************************************** /* Public API /********************************************************** */ /** * Method called to deserialize appropriate value, given parser (and * context), and set it using appropriate mechanism. * Pre-condition is that passed parser must point to the first token * that should be consumed to produce the value (the only value for * scalars, multiple for Objects and Arrays). */ public abstract void deserializeAndSet(JsonParser jp, DeserializationContext ctxt, Object instance) throws IOException, JsonProcessingException; /** * Alternative to {@link #deserializeAndSet} that returns * either return value of setter method called (if one is), * or null to indicate that no return value is available. * Mostly used to support Builder style deserialization. * * @since 2.0 */ public abstract Object deserializeSetAndReturn(JsonParser jp, DeserializationContext ctxt, Object instance) throws IOException, JsonProcessingException; /** * Method called to assign given value to this property, on * specified Object. *<p> * Note: this is an optional operation, not supported by all * implementations, creator-backed properties for example do not * support this method. */ public abstract void set(Object instance, Object value) throws IOException; /** * Method called to assign given value to this property, on * specified Object, and return whatever delegating accessor * returned (if anything) *<p> * Note: this is an optional operation, not supported by all * implementations, creator-backed properties for example do not * support this method. * * @since 2.0 */ public abstract Object setAndReturn(Object instance, Object value) throws IOException; /** * This method is needed by some specialized bean deserializers, * and also called by some {@link #deserializeAndSet} implementations. *<p> * Pre-condition is that passed parser must point to the first token * that should be consumed to produce the value (the only value for * scalars, multiple for Objects and Arrays). *<p> * Note that this method is final for performance reasons: to override * functionality you must override other methods that call this method; * this method should also not be called directly unless you really know * what you are doing (and probably not even then). */ public final Object deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException { JsonToken t = jp.getCurrentToken(); if (t == JsonToken.VALUE_NULL) { return (_nullProvider == null) ? null : _nullProvider.nullValue(ctxt); } if (_valueTypeDeserializer != null) { return _valueDeserializer.deserializeWithType(jp, ctxt, _valueTypeDeserializer); } return _valueDeserializer.deserialize(jp, ctxt); } /* /********************************************************** /* Helper methods /********************************************************** */ /** * Method that takes in exception of any type, and casts or wraps it * to an IOException or its subclass. */ protected void _throwAsIOE(Exception e, Object value) throws IOException { if (e instanceof IllegalArgumentException) { String actType = (value == null) ? "[NULL]" : value.getClass().getName(); StringBuilder msg = new StringBuilder("Problem deserializing property '").append(getName()); msg.append("' (expected type: ").append(getType()); msg.append("; actual type: ").append(actType).append(")"); String origMsg = e.getMessage(); if (origMsg != null) { msg.append(", problem: ").append(origMsg); } else { msg.append(" (no error message provided)"); } throw new JsonMappingException(msg.toString(), null, e); } _throwAsIOE(e); } protected IOException _throwAsIOE(Exception e) throws IOException { if (e instanceof IOException) { throw (IOException) e; } if (e instanceof RuntimeException) { throw (RuntimeException) e; } // let's wrap the innermost problem Throwable th = e; while (th.getCause() != null) { th = th.getCause(); } throw new JsonMappingException(th.getMessage(), null, th); } @Override public String toString() { return "[property '"+getName()+"']"; } }
/* =========================================================== * JFreeChart : a free chart library for the Java(tm) platform * =========================================================== * * (C) Copyright 2000-2007, by Object Refinery Limited and Contributors. * * Project Info: http://www.jfree.org/jfreechart/index.html * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, * USA. * * [Java is a trademark or registered trademark of Sun Microsystems, Inc. * in the United States and other countries.] * * --------------------- * LookupPaintScale.java * --------------------- * (C) Copyright 2006, 2007, by Object Refinery Limited. * * Original Author: David Gilbert (for Object Refinery Limited); * Contributor(s): -; * * $Id: LookupPaintScale.java,v 1.1.2.1 2007/01/31 14:15:16 mungady Exp $ * * Changes * ------- * 05-Jul-2006 : Version 1 (DG); * 31-Jan-2007 : Fixed serialization support (DG); * */ package org.jfree.chart.renderer; import java.awt.Color; import java.awt.Paint; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.List; import org.jfree.io.SerialUtilities; import org.jfree.util.PaintUtilities; import org.jfree.util.PublicCloneable; /** * A paint scale that uses a lookup table to associate paint instances * with data value ranges. * * @since 1.0.4 */ public class LookupPaintScale implements PaintScale, PublicCloneable, Serializable { /** * Stores the paint for a value. */ class PaintItem implements Serializable { /** The value. */ Number value; /** The paint. */ transient Paint paint; /** * Creates a new instance. * * @param value the value. * @param paint the paint. */ public PaintItem(Number value, Paint paint) { this.value = value; this.paint = paint; } /** * Tests this item for equality with an arbitrary object. * * @param obj the object (<code>null</code> permitted). * * @return A boolean. */ public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof PaintItem)) { return false; } PaintItem that = (PaintItem) obj; if (!this.value.equals(that.value)) { return false; } if (!PaintUtilities.equal(this.paint, that.paint)) { return false; } return true; } /** * Provides serialization support. * * @param stream the output stream. * * @throws IOException if there is an I/O error. */ private void writeObject(ObjectOutputStream stream) throws IOException { stream.defaultWriteObject(); SerialUtilities.writePaint(this.paint, stream); } /** * Provides serialization support. * * @param stream the input stream. * * @throws IOException if there is an I/O error. * @throws ClassNotFoundException if there is a classpath problem. */ private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); this.paint = SerialUtilities.readPaint(stream); } } /** The lower bound. */ private double lowerBound; /** The upper bound. */ private double upperBound; /** The default paint. */ private transient Paint defaultPaint; /** The lookup table. */ private List lookupTable; /** * Creates a new paint scale. */ public LookupPaintScale() { this(0.0, 1.0, Color.lightGray); } /** * Creates a new paint scale with the specified default paint. * * @param lowerBound the lower bound. * @param upperBound the upper bound. * @param defaultPaint the default paint (<code>null</code> not * permitted). */ public LookupPaintScale(double lowerBound, double upperBound, Paint defaultPaint) { if (lowerBound >= upperBound) { throw new IllegalArgumentException( "Requires lowerBound < upperBound."); } if (defaultPaint == null) { throw new IllegalArgumentException("Null 'paint' argument."); } this.defaultPaint = defaultPaint; this.lookupTable = new java.util.ArrayList(); } /** * Returns the default paint (never <code>null</code>). * * @return The default paint. */ public Paint getDefaultPaint() { return this.defaultPaint; } /** * Returns the lower bound. * * @return The lower bound. */ public double getLowerBound() { return this.lowerBound; } /** * Returns the upper bound. * * @return The upper bound. */ public double getUpperBound() { return this.upperBound; } /** * Adds an entry to the lookup table. * * @param n the data value. * @param p the paint. */ public void add(Number n, Paint p) { this.lookupTable.add(new PaintItem(n, p)); } /** * Returns the paint associated with the specified value. * * @param value the value. * * @return The paint. */ public Paint getPaint(double value) { Paint result = defaultPaint; int index = this.lookupTable.size(); boolean done = false; while (index > 0 && !done) { PaintItem item = (PaintItem) lookupTable.get(--index); if (value >= item.value.doubleValue()) { result = item.paint; done = true; } } return result; } /** * Tests this instance for equality with an arbitrary object. * * @param obj the object (<code>null</code> permitted). * * @return A boolean. */ public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof LookupPaintScale)) { return false; } LookupPaintScale that = (LookupPaintScale) obj; if (this.lowerBound != that.lowerBound) { return false; } if (this.upperBound != that.upperBound) { return false; } if (!PaintUtilities.equal(this.defaultPaint, that.defaultPaint)) { return false; } if (!this.lookupTable.equals(that.lookupTable)) { return false; } return true; } /** * Returns a clone of the instance. * * @return A clone. * * @throws CloneNotSupportedException if there is a problem cloning the * instance. */ public Object clone() throws CloneNotSupportedException { return super.clone(); } /** * Provides serialization support. * * @param stream the output stream. * * @throws IOException if there is an I/O error. */ private void writeObject(ObjectOutputStream stream) throws IOException { stream.defaultWriteObject(); SerialUtilities.writePaint(this.defaultPaint, stream); } /** * Provides serialization support. * * @param stream the input stream. * * @throws IOException if there is an I/O error. * @throws ClassNotFoundException if there is a classpath problem. */ private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); this.defaultPaint = SerialUtilities.readPaint(stream); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.azurebfs.services; import java.io.IOException; import java.lang.reflect.Field; import java.util.Map; import java.util.Random; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.azurebfs.AbfsConfiguration; import org.apache.hadoop.fs.azurebfs.AbstractAbfsIntegrationTest; import org.apache.hadoop.fs.azurebfs.AzureBlobFileSystem; import org.apache.hadoop.fs.azurebfs.AzureBlobFileSystemStore; import org.apache.hadoop.fs.azurebfs.utils.TracingContext; import org.junit.Test; import static org.apache.hadoop.fs.azurebfs.constants.FileSystemConfigurations.ONE_MB; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.spy; public class ITestAbfsInputStream extends AbstractAbfsIntegrationTest { protected static final int HUNDRED = 100; public ITestAbfsInputStream() throws Exception { } @Test public void testWithNoOptimization() throws Exception { for (int i = 2; i <= 7; i++) { int fileSize = i * ONE_MB; final AzureBlobFileSystem fs = getFileSystem(false, false, fileSize); String fileName = methodName.getMethodName() + i; byte[] fileContent = getRandomBytesArray(fileSize); Path testFilePath = createFileWithContent(fs, fileName, fileContent); testWithNoOptimization(fs, testFilePath, HUNDRED, fileContent); } } protected void testWithNoOptimization(final FileSystem fs, final Path testFilePath, final int seekPos, final byte[] fileContent) throws IOException { FSDataInputStream iStream = fs.open(testFilePath); try { AbfsInputStream abfsInputStream = (AbfsInputStream) iStream .getWrappedStream(); iStream = new FSDataInputStream(abfsInputStream); seek(iStream, seekPos); long totalBytesRead = 0; int length = HUNDRED * HUNDRED; do { byte[] buffer = new byte[length]; int bytesRead = iStream.read(buffer, 0, length); totalBytesRead += bytesRead; if ((totalBytesRead + seekPos) >= fileContent.length) { length = (fileContent.length - seekPos) % length; } assertEquals(length, bytesRead); assertContentReadCorrectly(fileContent, (int) (seekPos + totalBytesRead - length), length, buffer); assertTrue(abfsInputStream.getFCursor() >= seekPos + totalBytesRead); assertTrue(abfsInputStream.getFCursorAfterLastRead() >= seekPos + totalBytesRead); assertTrue(abfsInputStream.getBCursor() >= totalBytesRead % abfsInputStream.getBufferSize()); assertTrue(abfsInputStream.getLimit() >= totalBytesRead % abfsInputStream.getBufferSize()); } while (totalBytesRead + seekPos < fileContent.length); } finally { iStream.close(); } } @Test public void testExceptionInOptimization() throws Exception { for (int i = 2; i <= 7; i++) { int fileSize = i * ONE_MB; final AzureBlobFileSystem fs = getFileSystem(true, true, fileSize); String fileName = methodName.getMethodName() + i; byte[] fileContent = getRandomBytesArray(fileSize); Path testFilePath = createFileWithContent(fs, fileName, fileContent); testExceptionInOptimization(fs, testFilePath, fileSize - HUNDRED, fileSize / 4, fileContent); } } private void testExceptionInOptimization(final FileSystem fs, final Path testFilePath, final int seekPos, final int length, final byte[] fileContent) throws IOException { FSDataInputStream iStream = fs.open(testFilePath); try { AbfsInputStream abfsInputStream = (AbfsInputStream) iStream .getWrappedStream(); abfsInputStream = spy(abfsInputStream); doThrow(new IOException()) .doCallRealMethod() .when(abfsInputStream) .readRemote(anyLong(), any(), anyInt(), anyInt(), any(TracingContext.class)); iStream = new FSDataInputStream(abfsInputStream); verifyBeforeSeek(abfsInputStream); seek(iStream, seekPos); byte[] buffer = new byte[length]; int bytesRead = iStream.read(buffer, 0, length); long actualLength = length; if (seekPos + length > fileContent.length) { long delta = seekPos + length - fileContent.length; actualLength = length - delta; } assertEquals(bytesRead, actualLength); assertContentReadCorrectly(fileContent, seekPos, (int) actualLength, buffer); assertEquals(fileContent.length, abfsInputStream.getFCursor()); assertEquals(fileContent.length, abfsInputStream.getFCursorAfterLastRead()); assertEquals(actualLength, abfsInputStream.getBCursor()); assertTrue(abfsInputStream.getLimit() >= actualLength); } finally { iStream.close(); } } protected AzureBlobFileSystem getFileSystem(boolean readSmallFilesCompletely) throws IOException { final AzureBlobFileSystem fs = getFileSystem(); getAbfsStore(fs).getAbfsConfiguration() .setReadSmallFilesCompletely(readSmallFilesCompletely); return fs; } private AzureBlobFileSystem getFileSystem(boolean optimizeFooterRead, boolean readSmallFileCompletely, int fileSize) throws IOException { final AzureBlobFileSystem fs = getFileSystem(); getAbfsStore(fs).getAbfsConfiguration() .setOptimizeFooterRead(optimizeFooterRead); if (fileSize <= getAbfsStore(fs).getAbfsConfiguration() .getReadBufferSize()) { getAbfsStore(fs).getAbfsConfiguration() .setReadSmallFilesCompletely(readSmallFileCompletely); } return fs; } protected byte[] getRandomBytesArray(int length) { final byte[] b = new byte[length]; new Random().nextBytes(b); return b; } protected Path createFileWithContent(FileSystem fs, String fileName, byte[] fileContent) throws IOException { Path testFilePath = path(fileName); try (FSDataOutputStream oStream = fs.create(testFilePath)) { oStream.write(fileContent); oStream.flush(); } return testFilePath; } protected AzureBlobFileSystemStore getAbfsStore(FileSystem fs) throws NoSuchFieldException, IllegalAccessException { AzureBlobFileSystem abfs = (AzureBlobFileSystem) fs; Field abfsStoreField = AzureBlobFileSystem.class .getDeclaredField("abfsStore"); abfsStoreField.setAccessible(true); return (AzureBlobFileSystemStore) abfsStoreField.get(abfs); } protected Map<String, Long> getInstrumentationMap(FileSystem fs) throws NoSuchFieldException, IllegalAccessException { AzureBlobFileSystem abfs = (AzureBlobFileSystem) fs; Field abfsCountersField = AzureBlobFileSystem.class .getDeclaredField("abfsCounters"); abfsCountersField.setAccessible(true); AbfsCounters abfsCounters = (AbfsCounters) abfsCountersField.get(abfs); return abfsCounters.toMap(); } protected void assertContentReadCorrectly(byte[] actualFileContent, int from, int len, byte[] contentRead) { for (int i = 0; i < len; i++) { assertEquals(contentRead[i], actualFileContent[i + from]); } } protected void assertBuffersAreNotEqual(byte[] actualContent, byte[] contentRead, AbfsConfiguration conf) { assertBufferEquality(actualContent, contentRead, conf, false); } protected void assertBuffersAreEqual(byte[] actualContent, byte[] contentRead, AbfsConfiguration conf) { assertBufferEquality(actualContent, contentRead, conf, true); } private void assertBufferEquality(byte[] actualContent, byte[] contentRead, AbfsConfiguration conf, boolean assertEqual) { int bufferSize = conf.getReadBufferSize(); int actualContentSize = actualContent.length; int n = (actualContentSize < bufferSize) ? actualContentSize : bufferSize; int matches = 0; for (int i = 0; i < n; i++) { if (actualContent[i] == contentRead[i]) { matches++; } } if (assertEqual) { assertEquals(n, matches); } else { assertNotEquals(n, matches); } } protected void seek(FSDataInputStream iStream, long seekPos) throws IOException { AbfsInputStream abfsInputStream = (AbfsInputStream) iStream.getWrappedStream(); verifyBeforeSeek(abfsInputStream); iStream.seek(seekPos); verifyAfterSeek(abfsInputStream, seekPos); } private void verifyBeforeSeek(AbfsInputStream abfsInputStream){ assertEquals(0, abfsInputStream.getFCursor()); assertEquals(-1, abfsInputStream.getFCursorAfterLastRead()); assertEquals(0, abfsInputStream.getLimit()); assertEquals(0, abfsInputStream.getBCursor()); } private void verifyAfterSeek(AbfsInputStream abfsInputStream, long seekPos) throws IOException { assertEquals(seekPos, abfsInputStream.getPos()); assertEquals(-1, abfsInputStream.getFCursorAfterLastRead()); assertEquals(0, abfsInputStream.getLimit()); assertEquals(0, abfsInputStream.getBCursor()); } }
/* * Copyright (c) 2012-present, salesforce.com, inc. * All rights reserved. * Redistribution and use of this software in source and binary forms, with or * without modification, are permitted provided that the following conditions * are met: * - Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * - Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * - Neither the name of salesforce.com, inc. nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission of salesforce.com, inc. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.salesforce.androidsdk.config; import java.io.IOException; import java.util.Arrays; import java.util.Scanner; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import android.content.Context; import android.content.res.Resources; import android.text.TextUtils; import com.salesforce.androidsdk.R; import com.salesforce.androidsdk.app.SalesforceSDKManager; import com.salesforce.androidsdk.config.RuntimeConfig.ConfigKey; /** * Class encapsulating the application configuration (consumer key, oauth scopes, refresh behavior). * * @author wmathurin */ public class BootConfig { // We expect a assets/www/bootconfig.json file to be provided by hybrid apps. private static final String HYBRID_BOOTCONFIG_PATH = "www" + System.getProperty("file.separator") + "bootconfig.json"; // bootconfig.json should contain a map with the following keys. private static final String REMOTE_ACCESS_CONSUMER_KEY = "remoteAccessConsumerKey"; private static final String OAUTH_REDIRECT_URI = "oauthRedirectURI"; private static final String OAUTH_SCOPES = "oauthScopes"; private static final String IS_LOCAL = "isLocal"; private static final String START_PAGE = "startPage"; private static final String ERROR_PAGE = "errorPage"; private static final String SHOULD_AUTHENTICATE = "shouldAuthenticate"; private static final String ATTEMPT_OFFLINE_LOAD = "attemptOfflineLoad"; private static final String PUSH_NOTIFICATION_CLIENT_ID = "androidPushNotificationClientId"; // Default for optional configs. private static final boolean DEFAULT_SHOULD_AUTHENTICATE = true; private static final boolean DEFAULT_ATTEMPT_OFFLINE_LOAD = true; private String remoteAccessConsumerKey; private String oauthRedirectURI; private String[] oauthScopes; private boolean isLocal; private String startPage; private String errorPage; private boolean shouldAuthenticate; private boolean attemptOfflineLoad; private String pushNotificationClientId; private static BootConfig INSTANCE = null; /** * Method to (build and) get the singleton instance. * * @param ctx Context. * @return BootConfig instance. */ public static BootConfig getBootConfig(Context ctx) { if (INSTANCE == null) { INSTANCE = new BootConfig(); if (SalesforceSDKManager.getInstance().isHybrid()) { INSTANCE.readFromJSON(ctx); } else { INSTANCE.readFromXML(ctx); } INSTANCE.readFromRuntimeConfig(ctx); } return INSTANCE; } /** * Use runtime configurations (from MDM provider) if any * @param ctx */ private void readFromRuntimeConfig(Context ctx) { RuntimeConfig runtimeConfig = RuntimeConfig.getRuntimeConfig(ctx); String mdmRemoteAccessConsumeKey = runtimeConfig.getString(ConfigKey.ManagedAppOAuthID); String mdmOauthRedirectURI = runtimeConfig.getString(ConfigKey.ManagedAppCallbackURL); if (!TextUtils.isEmpty(mdmRemoteAccessConsumeKey)) { remoteAccessConsumerKey = mdmRemoteAccessConsumeKey; } if (!TextUtils.isEmpty(mdmOauthRedirectURI)) { oauthRedirectURI = mdmOauthRedirectURI; } } /** * @return boot config as JSONObject * @throws JSONException */ public JSONObject asJSON() throws JSONException { JSONObject config = new JSONObject(); config.put(REMOTE_ACCESS_CONSUMER_KEY, remoteAccessConsumerKey); config.put(OAUTH_REDIRECT_URI, oauthRedirectURI); config.put(OAUTH_SCOPES, new JSONArray(Arrays.asList(oauthScopes))); config.put(IS_LOCAL, isLocal); config.put(START_PAGE, startPage); config.put(ERROR_PAGE, errorPage); if (!TextUtils.isEmpty(pushNotificationClientId)) { config.put(PUSH_NOTIFICATION_CLIENT_ID, pushNotificationClientId); } config.put(SHOULD_AUTHENTICATE, shouldAuthenticate); config.put(ATTEMPT_OFFLINE_LOAD, attemptOfflineLoad); return config; } /** * Initializes this BootConfig object by reading the content of bootconfig.json. * * @param ctx Context. */ private void readFromJSON(Context ctx) { final String jsonStr = readBootConfigFile(ctx); parseBootConfigStr(jsonStr); } /** * Initializes this BootConfig object by reading the config from XML. * * @param ctx Context. */ private void readFromXML(Context ctx) { final Resources res = ctx.getResources(); remoteAccessConsumerKey = res.getString(R.string.remoteAccessConsumerKey); oauthRedirectURI = res.getString(R.string.oauthRedirectURI); oauthScopes = res.getStringArray(R.array.oauthScopes); pushNotificationClientId = res.getString(R.string.androidPushNotificationClientId); } /** * Reads the contents of the boot config file. * * @param ctx Context. * @return String content of bootconfig.json. */ private String readBootConfigFile(Context ctx) { Scanner scanner = null; try { scanner = new Scanner(ctx.getAssets().open(HYBRID_BOOTCONFIG_PATH)); // Good trick to get a string from a stream (http://weblogs.java.net/blog/pat/archive/2004/10/stupid_scanner_1.html). return scanner.useDelimiter("\\A").next(); } catch (IOException e) { throw new BootConfigException("Failed to open " + HYBRID_BOOTCONFIG_PATH, e); } finally { if (scanner != null) { scanner.close(); } } } /** * Initializes this BootConfig object by parsing a JSON string. * * @param jsonStr JSON string. */ private void parseBootConfigStr(String jsonStr) { try { final JSONObject config = new JSONObject(jsonStr); // Required fields. remoteAccessConsumerKey = config.getString(REMOTE_ACCESS_CONSUMER_KEY); oauthRedirectURI = config.getString(OAUTH_REDIRECT_URI); final JSONArray jsonScopes = config.getJSONArray(OAUTH_SCOPES); oauthScopes = new String[jsonScopes.length()]; for (int i = 0; i < oauthScopes.length; i++) { oauthScopes[i] = jsonScopes.getString(i); } isLocal = config.getBoolean(IS_LOCAL); startPage = config.getString(START_PAGE); errorPage = config.getString(ERROR_PAGE); // Optional fields. pushNotificationClientId = config.optString(PUSH_NOTIFICATION_CLIENT_ID); shouldAuthenticate = config.optBoolean(SHOULD_AUTHENTICATE, DEFAULT_SHOULD_AUTHENTICATE); attemptOfflineLoad = config.optBoolean(ATTEMPT_OFFLINE_LOAD, DEFAULT_ATTEMPT_OFFLINE_LOAD); } catch (JSONException e) { throw new BootConfigException("Failed to parse " + HYBRID_BOOTCONFIG_PATH, e); } } /** * Returns the consumer key value specified for your remote access object or connected app. * * @return Consumer key value specified for your remote access object or connected app. */ public String getRemoteAccessConsumerKey() { return remoteAccessConsumerKey; } /** * Returns the redirect URI value specified for your remote access object or connected app. * * @return Redirect URI value specified for your remote access object or connected app. */ public String getOauthRedirectURI() { return oauthRedirectURI; } /** * Returns the authorization/access scope(s) that the application needs to ask for at login. * @return Authorization/access scope(s) that the application needs to ask for at login. */ public String[] getOauthScopes() { return oauthScopes; } /** * Returns if the start page is local or a VF page. * * @return True - if start page is in assets/www, False - if it's a VF page. */ public boolean isLocal() { return isLocal; } /** * Returns the path to the start page (local or remote). * Example: index.html or /apex/basicpage. * * @return Path to start page (local or remote). */ public String getStartPage() { return startPage; } /** * Returns the path to the local error page. * * @return Path to local error page. */ public String getErrorPage() { return errorPage; } /** * Returns whether the app should go through login flow the first time or not. * * @return True - if the app should go through login flow, False - otherwise. */ public boolean shouldAuthenticate() { return shouldAuthenticate; } /** * Returns whether the app should attempt to load cached content when offline. * * @return True - if the app should attempt to load cached content, False - otherwise. */ public boolean attemptOfflineLoad() { return attemptOfflineLoad; } /** * Returns the push notification client ID. * * @return Push notification client ID. */ public String getPushNotificationClientId() { return pushNotificationClientId; } /** * Exception thrown for all bootconfig parsing errors. */ static public class BootConfigException extends RuntimeException { private static final long serialVersionUID = 1L; public BootConfigException(String msg, Throwable cause) { super(msg, cause); } } }
/* * Copyright 2002-2007,2009 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.opensymphony.xwork2.validator; import com.opensymphony.xwork2.ActionInvocation; import com.opensymphony.xwork2.ActionProxy; import com.opensymphony.xwork2.Validateable; import com.opensymphony.xwork2.inject.Inject; import com.opensymphony.xwork2.interceptor.MethodFilterInterceptor; import com.opensymphony.xwork2.interceptor.PrefixMethodInvocationUtil; import com.opensymphony.xwork2.util.logging.Logger; import com.opensymphony.xwork2.util.logging.LoggerFactory; /** * <!-- START SNIPPET: description --> * * This interceptor runs the action through the standard validation framework, which in turn checks the action against * any validation rules (found in files such as <i>ActionClass-validation.xml</i>) and adds field-level and action-level * error messages (provided that the action implements {@link com.opensymphony.xwork2.ValidationAware}). This interceptor * is often one of the last (or second to last) interceptors applied in a stack, as it assumes that all values have * already been set on the action. * * <p/>This interceptor does nothing if the name of the method being invoked is specified in the <b>excludeMethods</b> * parameter. <b>excludeMethods</b> accepts a comma-delimited list of method names. For example, requests to * <b>foo!input.action</b> and <b>foo!back.action</b> will be skipped by this interceptor if you set the * <b>excludeMethods</b> parameter to "input, back". * * </ol> * * <p/> The workflow of the action request does not change due to this interceptor. Rather, * this interceptor is often used in conjuction with the <b>workflow</b> interceptor. * * <p/> * * <b>NOTE:</b> As this method extends off MethodFilterInterceptor, it is capable of * deciding if it is applicable only to selective methods in the action class. See * <code>MethodFilterInterceptor</code> for more info. * * <!-- END SNIPPET: description --> * * <p/> <u>Interceptor parameters:</u> * * <!-- START SNIPPET: parameters --> * * <ul> * * <li>alwaysInvokeValidate - Defaults to true. If true validate() method will always * be invoked, otherwise it will not.</li> * * <li>programmatic - Defaults to true. If true and the action is Validateable call validate(), * and any method that starts with "validate". * </li> * * <li>declarative - Defaults to true. Perform validation based on xml or annotations.</li> * * </ul> * * <!-- END SNIPPET: parameters --> * * <p/> <u>Extending the interceptor:</u> * * <p/> * * <!-- START SNIPPET: extending --> * * There are no known extension points for this interceptor. * * <!-- END SNIPPET: extending --> * * <p/> <u>Example code:</u> * * <pre> * <!-- START SNIPPET: example --> * * &lt;action name="someAction" class="com.examples.SomeAction"&gt; * &lt;interceptor-ref name="params"/&gt; * &lt;interceptor-ref name="validation"/&gt; * &lt;interceptor-ref name="workflow"/&gt; * &lt;result name="success"&gt;good_result.ftl&lt;/result&gt; * &lt;/action&gt; * * &lt;-- in the following case myMethod of the action class will not * get validated --&gt; * &lt;action name="someAction" class="com.examples.SomeAction"&gt; * &lt;interceptor-ref name="params"/&gt; * &lt;interceptor-ref name="validation"&gt; * &lt;param name="excludeMethods"&gt;myMethod&lt;/param&gt; * &lt;/interceptor-ref&gt; * &lt;interceptor-ref name="workflow"/&gt; * &lt;result name="success"&gt;good_result.ftl&lt;/result&gt; * &lt;/action&gt; * * &lt;-- in the following case only annotated methods of the action class will * be validated --&gt; * &lt;action name="someAction" class="com.examples.SomeAction"&gt; * &lt;interceptor-ref name="params"/&gt; * &lt;interceptor-ref name="validation"&gt; * &lt;param name="validateAnnotatedMethodOnly"&gt;true&lt;/param&gt; * &lt;/interceptor-ref&gt; * &lt;interceptor-ref name="workflow"/&gt; * &lt;result name="success"&gt;good_result.ftl&lt;/result&gt; * &lt;/action&gt; * * * <!-- END SNIPPET: example --> * </pre> * * @author Jason Carreira * @author Rainer Hermanns * @author <a href='mailto:the_mindstorm[at]evolva[dot]ro'>Alexandru Popescu</a> * @see ActionValidatorManager * @see com.opensymphony.xwork2.interceptor.DefaultWorkflowInterceptor */ public class ValidationInterceptor extends MethodFilterInterceptor { private boolean validateAnnotatedMethodOnly; private ActionValidatorManager actionValidatorManager; private static final Logger LOG = LoggerFactory.getLogger(ValidationInterceptor.class); private final static String VALIDATE_PREFIX = "validate"; private final static String ALT_VALIDATE_PREFIX = "validateDo"; private boolean alwaysInvokeValidate = true; private boolean programmatic = true; private boolean declarative = true; @Inject public void setActionValidatorManager(ActionValidatorManager mgr) { this.actionValidatorManager = mgr; } /** * Determines if {@link Validateable}'s <code>validate()</code> should be called, * as well as methods whose name that start with "validate". Defaults to "true". * * @param programmatic <tt>true</tt> then <code>validate()</code> is invoked. */ public void setProgrammatic(boolean programmatic) { this.programmatic = programmatic; } /** * Determines if validation based on annotations or xml should be performed. Defaults * to "true". * * @param declarative <tt>true</tt> then perform validation based on annotations or xml. */ public void setDeclarative(boolean declarative) { this.declarative = declarative; } /** * Determines if {@link Validateable}'s <code>validate()</code> should always * be invoked. Default to "true". * * @param alwaysInvokeValidate <tt>true</tt> then <code>validate()</code> is always invoked. */ public void setAlwaysInvokeValidate(String alwaysInvokeValidate) { this.alwaysInvokeValidate = Boolean.parseBoolean(alwaysInvokeValidate); } /** * Gets if <code>validate()</code> should always be called or only per annotated method. * * @return <tt>true</tt> to only validate per annotated method, otherwise <tt>false</tt> to always validate. */ public boolean isValidateAnnotatedMethodOnly() { return validateAnnotatedMethodOnly; } /** * Determine if <code>validate()</code> should always be called or only per annotated method. * Default to <tt>false</tt>. * * @param validateAnnotatedMethodOnly <tt>true</tt> to only validate per annotated method, otherwise <tt>false</tt> to always validate. */ public void setValidateAnnotatedMethodOnly(boolean validateAnnotatedMethodOnly) { this.validateAnnotatedMethodOnly = validateAnnotatedMethodOnly; } /** * Gets the current action and its context and delegates to {@link ActionValidatorManager} proper validate method. * * @param invocation the execution state of the Action. * @throws Exception if an error occurs validating the action. */ protected void doBeforeInvocation(ActionInvocation invocation) throws Exception { Object action = invocation.getAction(); ActionProxy proxy = invocation.getProxy(); //the action name has to be from the url, otherwise validators that use aliases, like //MyActio-someaction-validator.xml will not be found, see WW-3194 //UPDATE: see WW-3753 String context = this.getValidationContext(proxy); String method = proxy.getMethod(); if (log.isDebugEnabled()) { log.debug("Validating " + invocation.getProxy().getNamespace() + "/" + invocation.getProxy().getActionName() + " with method "+ method +"."); } if (declarative) { if (validateAnnotatedMethodOnly) { actionValidatorManager.validate(action, context, method); } else { actionValidatorManager.validate(action, context); } } if (action instanceof Validateable && programmatic) { // keep exception that might occured in validateXXX or validateDoXXX Exception exception = null; Validateable validateable = (Validateable) action; if (LOG.isDebugEnabled()) { LOG.debug("Invoking validate() on action "+validateable); } try { PrefixMethodInvocationUtil.invokePrefixMethod( invocation, new String[] { VALIDATE_PREFIX, ALT_VALIDATE_PREFIX }); } catch(Exception e) { // If any exception occurred while doing reflection, we want // validate() to be executed if (LOG.isWarnEnabled()) { LOG.warn("an exception occured while executing the prefix method", e); } exception = e; } if (alwaysInvokeValidate) { validateable.validate(); } if (exception != null) { // rethrow if something is wrong while doing validateXXX / validateDoXXX throw exception; } } } @Override protected String doIntercept(ActionInvocation invocation) throws Exception { doBeforeInvocation(invocation); return invocation.invoke(); } /** * Returns the context that will be used by the * {@link ActionValidatorManager} to associate the action invocation with * the appropriate {@link ValidatorConfig ValidatorConfigs}. * <p> * The context returned is used in the pattern * <i>ActionClass-context-validation.xml</i> * <p> * The default context is the action name from the URL, but the method can * be overridden to implement custom contexts. * <p> * This can be useful in cases in which a single action and a single model * require vastly different validation based on some condition. * * @return the Context */ protected String getValidationContext(ActionProxy proxy) { // This method created for WW-3753 return proxy.getActionName(); } }
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2000, 2010 Oracle and/or its affiliates. All rights reserved. * */ package com.sleepycat.collections; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentMap; import com.sleepycat.bind.EntityBinding; import com.sleepycat.bind.EntryBinding; import com.sleepycat.je.Database; /* <!-- begin JE only --> */ import com.sleepycat.je.EnvironmentFailureException; // for javadoc import com.sleepycat.je.OperationFailureException; // for javadoc /* <!-- end JE only --> */ import com.sleepycat.je.OperationStatus; import com.sleepycat.util.RuntimeExceptionWrapper; import com.sleepycat.util.keyrange.KeyRangeException; /** * A Map view of a {@link Database}. * * <p>In addition to the standard Map methods, this class provides the * following methods for stored maps only. Note that the use of these methods * is not compatible with the standard Java collections interface.</p> * <ul> * <li>{@link #duplicates}</li> * <li>{@link #duplicatesMap}</li> * <li>{@link #append}</li> * </ul> * * @author Mark Hayes */ public class StoredMap<K, V> extends StoredContainer implements ConcurrentMap<K, V> { private StoredKeySet<K> keySet; private StoredEntrySet<K, V> entrySet; private StoredValueSet<V> valueSet; /** * Creates a map view of a {@link Database}. * * @param database is the Database underlying the new collection. * * @param keyBinding is the binding used to translate between key buffers * and key objects. * * @param valueBinding is the binding used to translate between value * buffers and value objects. * * @param writeAllowed is true to create a read-write collection or false * to create a read-only collection. * * @throws IllegalArgumentException if formats are not consistently * defined or a parameter is invalid. * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public StoredMap(Database database, EntryBinding<K> keyBinding, EntryBinding<V> valueBinding, boolean writeAllowed) { super(new DataView(database, keyBinding, valueBinding, null, writeAllowed, null)); initView(); } /** * Creates a map view of a {@link Database} with a {@link * PrimaryKeyAssigner}. Writing is allowed for the created map. * * @param database is the Database underlying the new collection. * * @param keyBinding is the binding used to translate between key buffers * and key objects. * * @param valueBinding is the binding used to translate between value * buffers and value objects. * * @param keyAssigner is used by the {@link #append} method to assign * primary keys. * * @throws IllegalArgumentException if formats are not consistently * defined or a parameter is invalid. * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public StoredMap(Database database, EntryBinding<K> keyBinding, EntryBinding<V> valueBinding, PrimaryKeyAssigner keyAssigner) { super(new DataView(database, keyBinding, valueBinding, null, true, keyAssigner)); initView(); } /** * Creates a map entity view of a {@link Database}. * * @param database is the Database underlying the new collection. * * @param keyBinding is the binding used to translate between key buffers * and key objects. * * @param valueEntityBinding is the binding used to translate between * key/value buffers and entity value objects. * * @param writeAllowed is true to create a read-write collection or false * to create a read-only collection. * * @throws IllegalArgumentException if formats are not consistently * defined or a parameter is invalid. * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public StoredMap(Database database, EntryBinding<K> keyBinding, EntityBinding<V> valueEntityBinding, boolean writeAllowed) { super(new DataView(database, keyBinding, null, valueEntityBinding, writeAllowed, null)); initView(); } /** * Creates a map entity view of a {@link Database} with a {@link * PrimaryKeyAssigner}. Writing is allowed for the created map. * * @param database is the Database underlying the new collection. * * @param keyBinding is the binding used to translate between key buffers * and key objects. * * @param valueEntityBinding is the binding used to translate between * key/value buffers and entity value objects. * * @param keyAssigner is used by the {@link #append} method to assign * primary keys. * * @throws IllegalArgumentException if formats are not consistently * defined or a parameter is invalid. * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public StoredMap(Database database, EntryBinding<K> keyBinding, EntityBinding<V> valueEntityBinding, PrimaryKeyAssigner keyAssigner) { super(new DataView(database, keyBinding, null, valueEntityBinding, true, keyAssigner)); initView(); } StoredMap(DataView view) { super(view); initView(); } /** * Override this method to initialize view-dependent fields. */ void initAfterClone() { initView(); } /** * The keySet, entrySet and valueSet are created during Map construction * rather than lazily when requested (as done with the java.util.Map * implementations). This is done to avoid synchronization every time they * are requested. Since they are requested often but a StoredMap is * created infrequently, this gives the best performance. The additional * views are small objects and are cheap to construct. */ private void initView() { /* entrySet */ if (areKeyRangesAllowed()) { entrySet = new StoredSortedEntrySet<K, V>(view); } else { entrySet = new StoredEntrySet<K, V>(view); } /* keySet */ DataView newView = view.keySetView(); if (areKeyRangesAllowed()) { keySet = new StoredSortedKeySet<K>(newView); } else { keySet = new StoredKeySet<K>(newView); } /* valueSet */ newView = view.valueSetView(); if (areKeyRangesAllowed() && newView.canDeriveKeyFromValue()) { valueSet = new StoredSortedValueSet<V>(newView); } else { valueSet = new StoredValueSet<V>(newView); } } /** * Returns the value to which this map maps the specified key. If * duplicates are allowed, this method returns the first duplicate, in the * order in which duplicates are configured, that maps to the specified * key. * * This method conforms to the {@link Map#get} interface. * * <!-- begin JE only --> * @throws OperationFailureException if one of the <a * href="../je/OperationFailureException.html#readFailures">Read Operation * Failures</a> occurs. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * <!-- end JE only --> * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public V get(Object key) { return (V) getValue(key); } /** * Associates the specified value with the specified key in this map * (optional operation). If duplicates are allowed and the specified key * is already mapped to a value, this method appends the new duplicate * after the existing duplicates. This method conforms to the {@link * Map#put} interface. * * <p>The key parameter may be null if an entity binding is used and the * key will be derived from the value (entity) parameter. If an entity * binding is used and the key parameter is non-null, then the key * parameter must be equal to the key derived from the value parameter.</p> * * @return the previous value associated with specified key, or null if * there was no mapping for the key or if duplicates are allowed. * * <!-- begin JE only --> * @throws OperationFailureException if one of the <a * href="../je/OperationFailureException.html#writeFailures">Write * Operation Failures</a> occurs. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * <!-- end JE only --> * * @throws UnsupportedOperationException if the collection is indexed, or * if the collection is read-only. * * @throws IllegalArgumentException if an entity value binding is used and * the primary key of the value given is different than the existing stored * primary key. * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public V put(K key, V value) { return (V) putKeyValue(key, value); } /** * Appends a given value returning the newly assigned key. If a {@link * PrimaryKeyAssigner} is associated with Store for this map, it will be * used to assigned the returned key. Otherwise the Store must be a QUEUE * or RECNO database and the next available record number is assigned as * the key. This method does not exist in the standard {@link Map} * interface. * * <p>Note that for the JE product, QUEUE and RECNO databases are not * supported, and therefore a PrimaryKeyAssigner must be associated with * the map in order to call this method.</p> * * @param value the value to be appended. * * @return the assigned key. * * <!-- begin JE only --> * @throws OperationFailureException if one of the <a * href="../je/OperationFailureException.html#writeFailures">Write * Operation Failures</a> occurs. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * <!-- end JE only --> * * @throws UnsupportedOperationException if the collection is indexed, or * if the collection is read-only, or if the Store has no {@link * PrimaryKeyAssigner} and is not a QUEUE or RECNO database. * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public K append(V value) { boolean doAutoCommit = beginAutoCommit(); try { Object[] key = new Object[1]; view.append(value, key, null); commitAutoCommit(doAutoCommit); return (K) key[0]; } catch (Exception e) { throw handleException(e, doAutoCommit); } } /** * Removes the mapping for this key from this map if present (optional * operation). If duplicates are allowed, this method removes all * duplicates for the given key. This method conforms to the {@link * Map#remove} interface. * * <!-- begin JE only --> * @throws OperationFailureException if one of the <a * href="../je/OperationFailureException.html#writeFailures">Write * Operation Failures</a> occurs. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * <!-- end JE only --> * * @throws UnsupportedOperationException if the collection is read-only. * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public V remove(Object key) { Object[] oldVal = new Object[1]; removeKey(key, oldVal); return (V) oldVal[0]; } /** * If the specified key is not already associated with a value, associate * it with the given value. This method conforms to the {@link * ConcurrentMap#putIfAbsent} interface. * * <!-- begin JE only --> * @throws OperationFailureException if one of the <a * href="../je/OperationFailureException.html#writeFailures">Write * Operation Failures</a> occurs. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * <!-- end JE only --> * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public V putIfAbsent(K key, V value) { DataCursor cursor = null; boolean doAutoCommit = beginAutoCommit(); try { cursor = new DataCursor(view, true); V oldValue; while (true) { OperationStatus status = cursor.putNoOverwrite(key, value, false /*useCurrentKey*/); if (status == OperationStatus.SUCCESS) { /* We inserted the key. Return null. */ oldValue = null; break; } else { status = cursor.getSearchKey(key, null /*value*/, false /*lockForWrite*/); if (status == OperationStatus.SUCCESS) { /* The key is present. Return the current value. */ oldValue = (V) cursor.getCurrentValue(); break; } else { /* * If Serializable isolation is not configured, another * thread can delete the record after our attempt to * insert it failed above. Loop back and try again. */ continue; } } } closeCursor(cursor); commitAutoCommit(doAutoCommit); return oldValue; } catch (Exception e) { closeCursor(cursor); throw handleException(e, doAutoCommit); } } /** * Remove entry for key only if currently mapped to given value. This * method conforms to the {@link ConcurrentMap#remove(Object,Object)} * interface. * * <!-- begin JE only --> * @throws OperationFailureException if one of the <a * href="../je/OperationFailureException.html#writeFailures">Write * Operation Failures</a> occurs. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * <!-- end JE only --> * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public boolean remove(Object key, Object value) { DataCursor cursor = null; boolean doAutoCommit = beginAutoCommit(); try { cursor = new DataCursor(view, true, key); OperationStatus status = cursor.getFirst(true /*lockForWrite*/); boolean removed; if (status == OperationStatus.SUCCESS && cursor.getCurrentValue().equals(value)) { cursor.delete(); removed = true; } else { removed = false; } closeCursor(cursor); commitAutoCommit(doAutoCommit); return removed; } catch (Exception e) { closeCursor(cursor); throw handleException(e, doAutoCommit); } } /** * Replace entry for key only if currently mapped to some value. This * method conforms to the {@link ConcurrentMap#replace(Object,Object)} * interface. * * <!-- begin JE only --> * @throws OperationFailureException if one of the <a * href="../je/OperationFailureException.html#writeFailures">Write * Operation Failures</a> occurs. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * <!-- end JE only --> * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public V replace(K key, V value) { DataCursor cursor = null; boolean doAutoCommit = beginAutoCommit(); try { cursor = new DataCursor(view, true, key); OperationStatus status = cursor.getFirst(true /*lockForWrite*/); V oldValue; if (status == OperationStatus.SUCCESS) { oldValue = (V) cursor.getCurrentValue(); cursor.putCurrent(value); } else { oldValue = null; } closeCursor(cursor); commitAutoCommit(doAutoCommit); return oldValue; } catch (Exception e) { closeCursor(cursor); throw handleException(e, doAutoCommit); } } /** * Replace entry for key only if currently mapped to given value. This * method conforms to the {@link * ConcurrentMap#replace(Object,Object,Object)} interface. * * <!-- begin JE only --> * @throws OperationFailureException if one of the <a * href="../je/OperationFailureException.html#writeFailures">Write * Operation Failures</a> occurs. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * <!-- end JE only --> * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public boolean replace(K key, V oldValue, V newValue) { DataCursor cursor = null; boolean doAutoCommit = beginAutoCommit(); try { cursor = new DataCursor(view, true, key); OperationStatus status = cursor.getFirst(true /*lockForWrite*/); boolean replaced; if (status == OperationStatus.SUCCESS && cursor.getCurrentValue().equals(oldValue)) { cursor.putCurrent(newValue); replaced = true; } else { replaced = false; } closeCursor(cursor); commitAutoCommit(doAutoCommit); return replaced; } catch (Exception e) { closeCursor(cursor); throw handleException(e, doAutoCommit); } } /** * Returns true if this map contains the specified key. This method * conforms to the {@link Map#containsKey} interface. * * <!-- begin JE only --> * @throws OperationFailureException if one of the <a * href="../je/OperationFailureException.html#readFailures">Read Operation * Failures</a> occurs. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * <!-- end JE only --> * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public boolean containsKey(Object key) { return super.containsKey(key); } /** * Returns true if this map contains the specified value. When an entity * binding is used, this method returns whether the map contains the * primary key and value mapping of the entity. This method conforms to * the {@link Map#containsValue} interface. * * <!-- begin JE only --> * @throws OperationFailureException if one of the <a * href="../je/OperationFailureException.html#readFailures">Read Operation * Failures</a> occurs. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * <!-- end JE only --> * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public boolean containsValue(Object value) { return super.containsValue(value); } /** * Copies all of the mappings from the specified map to this map (optional * operation). When duplicates are allowed, the mappings in the specified * map are effectively appended to the existing mappings in this map, that * is no previously existing mappings in this map are replaced. This * method conforms to the {@link Map#putAll} interface. * * <!-- begin JE only --> * @throws OperationFailureException if one of the <a * href="../je/OperationFailureException.html#writeFailures">Write * Operation Failures</a> occurs. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * <!-- end JE only --> * * @throws UnsupportedOperationException if the collection is read-only, or * if the collection is indexed. * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public void putAll(Map<? extends K, ? extends V> map) { boolean doAutoCommit = beginAutoCommit(); Iterator i = null; try { Collection coll = map.entrySet(); i = storedOrExternalIterator(coll); while (i.hasNext()) { Map.Entry entry = (Map.Entry) i.next(); putKeyValue(entry.getKey(), entry.getValue()); } StoredIterator.close(i); commitAutoCommit(doAutoCommit); } catch (Exception e) { StoredIterator.close(i); throw handleException(e, doAutoCommit); } } /** * Returns a set view of the keys contained in this map. A {@link * java.util.SortedSet} is returned if the map supports key ranges. The * returned collection will be read-only if the map is read-only. This * method conforms to the {@link Map#keySet()} interface. * * <p>Note that the return value is a StoredCollection and must be treated * as such; for example, its iterators must be explicitly closed.</p> * * @return a {@link StoredKeySet} or a {@link StoredSortedKeySet} for this * map. * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). * * @see #areKeyRangesAllowed * @see #isWriteAllowed */ public Set<K> keySet() { return keySet; } /** * Returns a set view of the mappings contained in this map. A {@link * java.util.SortedSet} is returned if the map supports key ranges. The * returned collection will be read-only if the map is read-only. This * method conforms to the {@link Map#entrySet()} interface. * * <p>Note that the return value is a StoredCollection and must be treated * as such; for example, its iterators must be explicitly closed.</p> * * @return a {@link StoredEntrySet} or a {@link StoredSortedEntrySet} for * this map. * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). * * @see #areKeyRangesAllowed * @see #isWriteAllowed */ public Set<Map.Entry<K, V>> entrySet() { return entrySet; } /** * Returns a collection view of the values contained in this map. A {@link * java.util.SortedSet} is returned if the map supports key ranges and the * value/entity binding can be used to derive the map's key from its * value/entity object. The returned collection will be read-only if the * map is read-only. This method conforms to the {@link Map#values()} * interface. * * <p>Note that the return value is a StoredCollection and must be treated * as such; for example, its iterators must be explicitly closed.</p> * * @return a {@link StoredValueSet} or a {@link StoredSortedValueSet} for * this map. * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). * * @see #areKeyRangesAllowed * @see #isWriteAllowed */ public Collection<V> values() { return valueSet; } /** * Returns a new collection containing the values mapped to the given key * in this map. This collection's iterator() method is particularly useful * for iterating over the duplicates for a given key, since this is not * supported by the standard Map interface. This method does not exist in * the standard {@link Map} interface. * * <p>If no mapping for the given key is present, an empty collection is * returned. If duplicates are not allowed, at most a single value will be * in the collection returned. If duplicates are allowed, the returned * collection's add() method may be used to add values for the given * key.</p> * * @param key is the key for which values are to be returned. * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public Collection<V> duplicates(K key) { try { DataView newView = view.valueSetView(key); return new StoredValueSet(newView); } catch (KeyRangeException e) { return Collections.EMPTY_SET; } catch (Exception e) { throw StoredContainer.convertException(e); } } /** * Returns a new map from primary key to value for the subset of records * having a given secondary key (duplicates). This method does not exist * in the standard {@link Map} interface. * * <p>If no mapping for the given key is present, an empty collection is * returned. If duplicates are not allowed, at most a single value will be * in the collection returned. If duplicates are allowed, the returned * collection's add() method may be used to add values for the given * key.</p> * * @param secondaryKey is the secondary key for which duplicates values * will be represented by the returned map. * * @param primaryKeyBinding is the binding used for keys in the returned * map. * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public <PK> Map<PK, V> duplicatesMap(K secondaryKey, EntryBinding primaryKeyBinding) { try { DataView newView = view.duplicatesView(secondaryKey, primaryKeyBinding); if (isOrdered()) { return new StoredSortedMap(newView); } else { return new StoredMap(newView); } } catch (Exception e) { throw StoredContainer.convertException(e); } } /** * Compares the specified object with this map for equality. A value * comparison is performed by this method and the stored values are * compared rather than calling the equals() method of each element. This * method conforms to the {@link Map#equals} interface. * * <!-- begin JE only --> * @throws OperationFailureException if one of the <a * href="../je/OperationFailureException.html#readFailures">Read Operation * Failures</a> occurs. * * @throws EnvironmentFailureException if an unexpected, internal or * environment-wide failure occurs. * <!-- end JE only --> * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public boolean equals(Object other) { if (other instanceof Map) { return entrySet().equals(((Map) other).entrySet()); } else { return false; } } /* * Add this in to keep FindBugs from whining at us about implementing * equals(), but not hashCode(). */ public int hashCode() { return super.hashCode(); } // Inherit javadoc public int size() { return values().size(); } /** * Converts the map to a string representation for debugging. WARNING: All * mappings will be converted to strings and returned and therefore the * returned string may be very large. * * @return the string representation. * * @throws RuntimeExceptionWrapper if a checked exception is thrown, * including a {@code DatabaseException} on BDB (C edition). */ public String toString() { return entrySet().toString(); } }
/* * Copyright 2002-2008 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.beans.factory.config; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.springframework.beans.MutablePropertyValues; import org.springframework.beans.PropertyValue; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; import org.springframework.util.StringValueResolver; /** * Visitor class for traversing {@link BeanDefinition} objects, in particular * the property values and constructor argument values contained in them, * resolving bean metadata values. * * <p>Used by {@link PropertyPlaceholderConfigurer} to parse all String values * contained in a BeanDefinition, resolving any placeholders found. * * @author Juergen Hoeller * @since 1.2 * @see BeanDefinition * @see BeanDefinition#getPropertyValues * @see BeanDefinition#getConstructorArgumentValues * @see PropertyPlaceholderConfigurer */ public class BeanDefinitionVisitor { private StringValueResolver valueResolver; /** * Create a new BeanDefinitionVisitor, applying the specified * value resolver to all bean metadata values. * @param valueResolver the StringValueResolver to apply */ public BeanDefinitionVisitor(StringValueResolver valueResolver) { Assert.notNull(valueResolver, "StringValueResolver must not be null"); this.valueResolver = valueResolver; } /** * Create a new BeanDefinitionVisitor for subclassing. * Subclasses need to override the {@link #resolveStringValue} method. */ protected BeanDefinitionVisitor() { } /** * Traverse the given BeanDefinition object and the MutablePropertyValues * and ConstructorArgumentValues contained in them. * @param beanDefinition the BeanDefinition object to traverse * @see #resolveStringValue(String) */ public void visitBeanDefinition(BeanDefinition beanDefinition) { visitParentName(beanDefinition); visitBeanClassName(beanDefinition); visitFactoryBeanName(beanDefinition); visitFactoryMethodName(beanDefinition); visitScope(beanDefinition); visitPropertyValues(beanDefinition.getPropertyValues()); ConstructorArgumentValues cas = beanDefinition.getConstructorArgumentValues(); visitIndexedArgumentValues(cas.getIndexedArgumentValues()); visitGenericArgumentValues(cas.getGenericArgumentValues()); } protected void visitParentName(BeanDefinition beanDefinition) { String parentName = beanDefinition.getParentName(); if (parentName != null) { String resolvedName = resolveStringValue(parentName); if (!parentName.equals(resolvedName)) { beanDefinition.setParentName(resolvedName); } } } protected void visitBeanClassName(BeanDefinition beanDefinition) { String beanClassName = beanDefinition.getBeanClassName(); if (beanClassName != null) { String resolvedName = resolveStringValue(beanClassName); if (!beanClassName.equals(resolvedName)) { beanDefinition.setBeanClassName(resolvedName); } } } protected void visitFactoryBeanName(BeanDefinition beanDefinition) { String factoryBeanName = beanDefinition.getFactoryBeanName(); if (factoryBeanName != null) { String resolvedName = resolveStringValue(factoryBeanName); if (!factoryBeanName.equals(resolvedName)) { beanDefinition.setFactoryBeanName(resolvedName); } } } protected void visitFactoryMethodName(BeanDefinition beanDefinition) { String factoryMethodName = beanDefinition.getFactoryBeanName(); if (factoryMethodName != null) { String resolvedName = resolveStringValue(factoryMethodName); if (!factoryMethodName.equals(resolvedName)) { beanDefinition.setFactoryMethodName(resolvedName); } } } protected void visitScope(BeanDefinition beanDefinition) { String scope = beanDefinition.getScope(); if (scope != null) { String resolvedScope = resolveStringValue(scope); if (!scope.equals(resolvedScope)) { beanDefinition.setScope(resolvedScope); } } } protected void visitPropertyValues(MutablePropertyValues pvs) { PropertyValue[] pvArray = pvs.getPropertyValues(); for (int i = 0; i < pvArray.length; i++) { PropertyValue pv = pvArray[i]; Object newVal = resolveValue(pv.getValue()); if (!ObjectUtils.nullSafeEquals(newVal, pv.getValue())) { pvs.addPropertyValue(pv.getName(), newVal); } } } protected void visitIndexedArgumentValues(Map ias) { for (Iterator it = ias.values().iterator(); it.hasNext();) { ConstructorArgumentValues.ValueHolder valueHolder = (ConstructorArgumentValues.ValueHolder) it.next(); Object newVal = resolveValue(valueHolder.getValue()); if (!ObjectUtils.nullSafeEquals(newVal, valueHolder.getValue())) { valueHolder.setValue(newVal); } } } protected void visitGenericArgumentValues(List gas) { for (Iterator it = gas.iterator(); it.hasNext();) { ConstructorArgumentValues.ValueHolder valueHolder = (ConstructorArgumentValues.ValueHolder) it.next(); Object newVal = resolveValue(valueHolder.getValue()); if (!ObjectUtils.nullSafeEquals(newVal, valueHolder.getValue())) { valueHolder.setValue(newVal); } } } protected Object resolveValue(Object value) { if (value instanceof BeanDefinition) { visitBeanDefinition((BeanDefinition) value); } else if (value instanceof BeanDefinitionHolder) { visitBeanDefinition(((BeanDefinitionHolder) value).getBeanDefinition()); } else if (value instanceof RuntimeBeanReference) { RuntimeBeanReference ref = (RuntimeBeanReference) value; String newBeanName = resolveStringValue(ref.getBeanName()); if (!newBeanName.equals(ref.getBeanName())) { return new RuntimeBeanReference(newBeanName); } } else if (value instanceof List) { visitList((List) value); } else if (value instanceof Set) { visitSet((Set) value); } else if (value instanceof Map) { visitMap((Map) value); } else if (value instanceof TypedStringValue) { TypedStringValue typedStringValue = (TypedStringValue) value; String stringValue = typedStringValue.getValue(); if (stringValue != null) { String visitedString = resolveStringValue(stringValue); typedStringValue.setValue(visitedString); } } else if (value instanceof String) { return resolveStringValue((String) value); } return value; } protected void visitList(List listVal) { for (int i = 0; i < listVal.size(); i++) { Object elem = listVal.get(i); Object newVal = resolveValue(elem); if (!ObjectUtils.nullSafeEquals(newVal, elem)) { listVal.set(i, newVal); } } } protected void visitSet(Set setVal) { Set newContent = new LinkedHashSet(); boolean entriesModified = false; for (Iterator it = setVal.iterator(); it.hasNext();) { Object elem = it.next(); int elemHash = (elem != null ? elem.hashCode() : 0); Object newVal = resolveValue(elem); int newValHash = (newVal != null ? newVal.hashCode() : 0); newContent.add(newVal); entriesModified = entriesModified || (newVal != elem || newValHash != elemHash); } if (entriesModified) { setVal.clear(); setVal.addAll(newContent); } } protected void visitMap(Map mapVal) { Map newContent = new LinkedHashMap(); boolean entriesModified = false; for (Iterator it = mapVal.entrySet().iterator(); it.hasNext();) { Map.Entry entry = (Map.Entry) it.next(); Object key = entry.getKey(); int keyHash = (key != null ? key.hashCode() : 0); Object newKey = resolveValue(key); int newKeyHash = (newKey != null ? newKey.hashCode() : 0); Object val = entry.getValue(); Object newVal = resolveValue(val); newContent.put(newKey, newVal); entriesModified = entriesModified || (newVal != val || newKey != key || newKeyHash != keyHash); } if (entriesModified) { mapVal.clear(); mapVal.putAll(newContent); } } /** * Resolve the given String value, for example parsing placeholders. * @param strVal the original String value * @return the resolved String value */ protected String resolveStringValue(String strVal) { if (this.valueResolver == null) { throw new IllegalStateException("No StringValueResolver specified - pass a resolver " + "object into the constructor or override the 'resolveStringValue' method"); } return this.valueResolver.resolveStringValue(strVal); } }
/* * Copyright 2012-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.config; import static java.lang.Integer.parseInt; import com.facebook.buck.io.file.MorePaths; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.parser.BuildTargetParseException; import com.facebook.buck.parser.BuildTargetParser; import com.facebook.buck.parser.BuildTargetPatternParser; import com.facebook.buck.rules.CellPathResolver; import com.facebook.buck.rules.DefaultBuildTargetSourcePath; import com.facebook.buck.rules.PathSourcePath; import com.facebook.buck.rules.RuleKeyDiagnosticsMode; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.util.Ansi; import com.facebook.buck.util.AnsiEnvironmentChecking; import com.facebook.buck.util.HumanReadableException; import com.facebook.buck.util.PatternAndMessage; import com.facebook.buck.util.cache.FileHashCacheMode; import com.facebook.buck.util.config.Config; import com.facebook.buck.util.environment.Architecture; import com.facebook.buck.util.environment.Platform; import com.facebook.buck.util.network.hostname.HostnameFetching; import com.facebook.infer.annotation.PropagatesNullable; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Objects; import com.google.common.base.Predicates; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSetMultimap; import com.google.common.collect.LinkedHashMultimap; import com.google.common.collect.Maps; import com.google.common.collect.SetMultimap; import com.google.common.collect.Sets; import java.io.IOException; import java.net.URI; import java.nio.file.Path; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.regex.Pattern; /** Structured representation of data read from a {@code .buckconfig} file. */ public class BuckConfig implements ConfigPathGetter { private static final String ALIAS_SECTION_HEADER = "alias"; private static final String TEST_SECTION_HEADER = "test"; private static final Float DEFAULT_THREAD_CORE_RATIO = Float.valueOf(1.0F); /** * This pattern is designed so that a fully-qualified build target cannot be a valid alias name * and vice-versa. */ private static final Pattern ALIAS_PATTERN = Pattern.compile("[a-zA-Z_-][a-zA-Z0-9_-]*"); private static final ImmutableMap<String, ImmutableSet<String>> IGNORE_FIELDS_FOR_DAEMON_RESTART; private final CellPathResolver cellPathResolver; private final Architecture architecture; private final Config config; private final ImmutableSetMultimap<String, BuildTarget> aliasToBuildTargetMap; private final ProjectFilesystem projectFilesystem; private final Platform platform; private final ImmutableMap<String, String> environment; private final ConfigViewCache<BuckConfig> viewCache = new ConfigViewCache<>(this); static { ImmutableMap.Builder<String, ImmutableSet<String>> ignoreFieldsForDaemonRestartBuilder = ImmutableMap.builder(); ignoreFieldsForDaemonRestartBuilder.put( "apple", ImmutableSet.of("generate_header_symlink_tree_only")); ignoreFieldsForDaemonRestartBuilder.put("build", ImmutableSet.of("threads")); ignoreFieldsForDaemonRestartBuilder.put( "cache", ImmutableSet.of("dir", "dir_mode", "http_mode", "http_url", "mode", "slb_server_pool")); ignoreFieldsForDaemonRestartBuilder.put( "client", ImmutableSet.of("id", "skip-action-graph-cache")); ignoreFieldsForDaemonRestartBuilder.put( "log", ImmutableSet.of( "chrome_trace_generation", "compress_traces", "max_traces", "public_announcements")); ignoreFieldsForDaemonRestartBuilder.put("project", ImmutableSet.of("ide_prompt")); IGNORE_FIELDS_FOR_DAEMON_RESTART = ignoreFieldsForDaemonRestartBuilder.build(); } public BuckConfig( Config config, ProjectFilesystem projectFilesystem, Architecture architecture, Platform platform, ImmutableMap<String, String> environment, CellPathResolver cellPathResolver) { this.cellPathResolver = cellPathResolver; this.config = config; this.projectFilesystem = projectFilesystem; this.architecture = architecture; // We could create this Map on demand; however, in practice, it is almost always needed when // BuckConfig is needed because CommandLineBuildTargetNormalizer needs it. this.aliasToBuildTargetMap = createAliasToBuildTargetMap(this.getEntriesForSection(ALIAS_SECTION_HEADER)); this.platform = platform; this.environment = environment; } /** Returns a clone of the current config with a the argument CellPathResolver. */ public BuckConfig withCellPathResolver(CellPathResolver resolver) { return new BuckConfig(config, projectFilesystem, architecture, platform, environment, resolver); } /** * Get a {@link ConfigView} of this config. * * @param cls Class of the config view. * @param <T> Type of the config view. */ public <T extends ConfigView<BuckConfig>> T getView(final Class<T> cls) { return viewCache.getView(cls); } /** * @return whether {@code aliasName} conforms to the pattern for a valid alias name. This does not * indicate whether it is an alias that maps to a build target in a BuckConfig. */ private static boolean isValidAliasName(String aliasName) { return ALIAS_PATTERN.matcher(aliasName).matches(); } public static void validateAliasName(String aliasName) throws HumanReadableException { validateAgainstAlias(aliasName, "Alias"); } public static void validateLabelName(String aliasName) throws HumanReadableException { validateAgainstAlias(aliasName, "Label"); } private static void validateAgainstAlias(String aliasName, String fieldName) { if (isValidAliasName(aliasName)) { return; } if (aliasName.isEmpty()) { throw new HumanReadableException("%s cannot be the empty string.", fieldName); } throw new HumanReadableException("Not a valid %s: %s.", fieldName.toLowerCase(), aliasName); } public Architecture getArchitecture() { return architecture; } public ImmutableMap<String, String> getEntriesForSection(String section) { ImmutableMap<String, String> entries = config.get(section); if (entries != null) { return entries; } else { return ImmutableMap.of(); } } public ImmutableList<String> getMessageOfTheDay() { return getListWithoutComments("project", "motd"); } public ImmutableList<String> getListWithoutComments(String section, String field) { return config.getListWithoutComments(section, field); } public ImmutableList<String> getListWithoutComments( String section, String field, char splitChar) { return config.getListWithoutComments(section, field, splitChar); } public CellPathResolver getCellPathResolver() { return cellPathResolver; } public Optional<ImmutableList<String>> getOptionalListWithoutComments( String section, String field) { return config.getOptionalListWithoutComments(section, field); } public Optional<ImmutableList<String>> getOptionalListWithoutComments( String section, String field, char splitChar) { return config.getOptionalListWithoutComments(section, field, splitChar); } public Optional<ImmutableList<Path>> getOptionalPathList( String section, String field, boolean resolve) { Optional<ImmutableList<String>> rawPaths = config.getOptionalListWithoutComments(section, field); if (rawPaths.isPresent()) { ImmutableList<Path> paths = rawPaths .get() .stream() .map( input -> convertPath( input, resolve, String.format( "Error in %s.%s: Cell-relative path not found: ", section, field))) .collect(ImmutableList.toImmutableList()); return Optional.of(paths); } return Optional.empty(); } public ImmutableSet<String> getBuildTargetForAliasAsString(String possiblyFlavoredAlias) { String[] parts = possiblyFlavoredAlias.split("#", 2); String unflavoredAlias = parts[0]; ImmutableSet<BuildTarget> buildTargets = getBuildTargetsForAlias(unflavoredAlias); if (buildTargets.isEmpty()) { return ImmutableSet.of(); } String suffix = parts.length == 2 ? "#" + parts[1] : ""; return buildTargets .stream() .map(buildTarget -> buildTarget.getFullyQualifiedName() + suffix) .collect(ImmutableSet.toImmutableSet()); } public ImmutableSet<BuildTarget> getBuildTargetsForAlias(String unflavoredAlias) { return aliasToBuildTargetMap.get(unflavoredAlias); } public BuildTarget getBuildTargetForFullyQualifiedTarget(String target) { return BuildTargetParser.INSTANCE.parse( target, BuildTargetPatternParser.fullyQualified(), getCellPathResolver()); } public ImmutableList<BuildTarget> getBuildTargetList(String section, String key) { ImmutableList<String> targetsToForce = getListWithoutComments(section, key); if (targetsToForce.size() == 0) { return ImmutableList.of(); } ImmutableList.Builder<BuildTarget> targets = new ImmutableList.Builder<>(); for (String targetOrAlias : targetsToForce) { Set<String> expandedAlias = getBuildTargetForAliasAsString(targetOrAlias); if (expandedAlias.isEmpty()) { targets.add(getBuildTargetForFullyQualifiedTarget(targetOrAlias)); } else { for (String target : expandedAlias) { targets.add(getBuildTargetForFullyQualifiedTarget(target)); } } } return targets.build(); } /** @return the parsed BuildTarget in the given section and field, if set. */ public Optional<BuildTarget> getBuildTarget(String section, String field) { Optional<String> target = getValue(section, field); return target.isPresent() ? Optional.of(getBuildTargetForFullyQualifiedTarget(target.get())) : Optional.empty(); } /** * @return the parsed BuildTarget in the given section and field, if set and a valid build target. * <p>This is useful if you use getTool to get the target, if any, but allow filesystem * references. */ public Optional<BuildTarget> getMaybeBuildTarget(String section, String field) { Optional<String> value = getValue(section, field); if (!value.isPresent()) { return Optional.empty(); } try { return Optional.of(getBuildTargetForFullyQualifiedTarget(value.get())); } catch (BuildTargetParseException e) { return Optional.empty(); } } /** @return the parsed BuildTarget in the given section and field. */ public BuildTarget getRequiredBuildTarget(String section, String field) { Optional<BuildTarget> target = getBuildTarget(section, field); return getOrThrow(section, field, target); } public <T extends Enum<T>> Optional<T> getEnum(String section, String field, Class<T> clazz) { return config.getEnum(section, field, clazz); } /** * @return a {@link SourcePath} identified by a @{link BuildTarget} or {@link Path} reference by * the given section:field, if set. */ public Optional<SourcePath> getSourcePath(String section, String field) { Optional<String> value = getValue(section, field); if (!value.isPresent()) { return Optional.empty(); } try { BuildTarget target = getBuildTargetForFullyQualifiedTarget(value.get()); return Optional.of(DefaultBuildTargetSourcePath.of(target)); } catch (BuildTargetParseException e) { return Optional.of( PathSourcePath.of( projectFilesystem, checkPathExists( value.get(), String.format("Overridden %s:%s path not found: ", section, field)))); } } /** @return a {@link SourcePath} identified by a {@link Path}. */ public PathSourcePath getPathSourcePath(@PropagatesNullable Path path) { if (path == null) { return null; } if (path.isAbsolute()) { return PathSourcePath.of(projectFilesystem, path); } return PathSourcePath.of( projectFilesystem, checkPathExists( path.toString(), String.format( "Failed to transform Path %s to Source Path because path was not found.", path))); } /** * In a {@link BuckConfig}, an alias can either refer to a fully-qualified build target, or an * alias defined earlier in the {@code alias} section. The mapping produced by this method * reflects the result of resolving all aliases as values in the {@code alias} section. */ private ImmutableSetMultimap<String, BuildTarget> createAliasToBuildTargetMap( ImmutableMap<String, String> rawAliasMap) { // We use a LinkedHashMap rather than an ImmutableMap.Builder because we want both (1) order to // be preserved, and (2) the ability to inspect the Map while building it up. SetMultimap<String, BuildTarget> aliasToBuildTarget = LinkedHashMultimap.create(); for (Map.Entry<String, String> aliasEntry : rawAliasMap.entrySet()) { String alias = aliasEntry.getKey(); validateAliasName(alias); // Determine whether the mapping is to a build target or to an alias. List<String> values = Splitter.on(' ').splitToList(aliasEntry.getValue()); for (String value : values) { Set<BuildTarget> buildTargets; if (isValidAliasName(value)) { buildTargets = aliasToBuildTarget.get(value); if (buildTargets.isEmpty()) { throw new HumanReadableException("No alias for: %s.", value); } } else if (value.isEmpty()) { continue; } else { // Here we parse the alias values with a BuildTargetParser to be strict. We could be // looser and just grab everything between "//" and ":" and assume it's a valid base path. buildTargets = ImmutableSet.of( BuildTargetParser.INSTANCE.parse( value, BuildTargetPatternParser.fullyQualified(), getCellPathResolver())); } aliasToBuildTarget.putAll(alias, buildTargets); } } return ImmutableSetMultimap.copyOf(aliasToBuildTarget); } /** * Create a map of {@link BuildTarget} base paths to aliases. Note that there may be more than one * alias to a base path, so the first one listed in the .buckconfig will be chosen. */ public ImmutableMap<Path, String> getBasePathToAliasMap() { ImmutableMap<String, String> aliases = config.get(ALIAS_SECTION_HEADER); if (aliases == null) { return ImmutableMap.of(); } // Build up the Map with an ordinary HashMap because we need to be able to check whether the Map // already contains the key before inserting. Map<Path, String> basePathToAlias = new HashMap<>(); for (Map.Entry<String, BuildTarget> entry : aliasToBuildTargetMap.entries()) { String alias = entry.getKey(); BuildTarget buildTarget = entry.getValue(); Path basePath = buildTarget.getBasePath(); if (!basePathToAlias.containsKey(basePath)) { basePathToAlias.put(basePath, alias); } } return ImmutableMap.copyOf(basePathToAlias); } public ImmutableMultimap<String, BuildTarget> getAliases() { return this.aliasToBuildTargetMap; } public long getDefaultTestTimeoutMillis() { return Long.parseLong(getValue("test", "timeout").orElse("0")); } public boolean isParallelExternalTestSpecComputationEnabled() { return getBooleanValue( TEST_SECTION_HEADER, "parallel_external_test_spec_computation_enabled", false); } private static final String LOG_SECTION = "log"; public boolean isPublicAnnouncementsEnabled() { return getBooleanValue(LOG_SECTION, "public_announcements", true); } public boolean isProcessTrackerEnabled() { return getBooleanValue(LOG_SECTION, "process_tracker_enabled", true); } public boolean isProcessTrackerDeepEnabled() { return getBooleanValue(LOG_SECTION, "process_tracker_deep_enabled", false); } public boolean isRuleKeyLoggerEnabled() { return getBooleanValue(LOG_SECTION, "rule_key_logger_enabled", false); } public RuleKeyDiagnosticsMode getRuleKeyDiagnosticsMode() { return getEnum(LOG_SECTION, "rule_key_diagnostics_mode", RuleKeyDiagnosticsMode.class) .orElse(RuleKeyDiagnosticsMode.NEVER); } public boolean isMachineReadableLoggerEnabled() { return getBooleanValue(LOG_SECTION, "machine_readable_logger_enabled", true); } public boolean isBuckConfigLocalWarningEnabled() { return getBooleanValue(LOG_SECTION, "buckconfig_local_warning_enabled", false); } public ProjectTestsMode xcodeProjectTestsMode() { return getEnum("project", "xcode_project_tests_mode", ProjectTestsMode.class) .orElse(ProjectTestsMode.WITH_TESTS); } public boolean getRestartAdbOnFailure() { return Boolean.parseBoolean(getValue("adb", "adb_restart_on_failure").orElse("true")); } public ImmutableList<String> getAdbRapidInstallTypes() { return getListWithoutComments("adb", "rapid_install_types_beta"); } public boolean getMultiInstallMode() { return getBooleanValue("adb", "multi_install_mode", false); } public boolean getFlushEventsBeforeExit() { return getBooleanValue("daemon", "flush_events_before_exit", false); } public ImmutableSet<String> getListenerJars() { return ImmutableSet.copyOf(getListWithoutComments("extensions", "listeners")); } /** Return Strings so as to avoid a dependency on {@link com.facebook.buck.cli.LabelSelector}! */ public ImmutableList<String> getDefaultRawExcludedLabelSelectors() { return getListWithoutComments("test", "excluded_labels"); } /** * Create an Ansi object appropriate for the current output. First respect the user's preferences, * if set. Next, respect any default provided by the caller. (This is used by buckd to tell the * daemon about the client's terminal.) Finally, allow the Ansi class to autodetect whether the * current output is a tty. * * @param defaultColor Default value provided by the caller (e.g. the client of buckd) */ public Ansi createAnsi(Optional<String> defaultColor) { String color = getValue("color", "ui").map(Optional::of).orElse(defaultColor).orElse("auto"); switch (color) { case "false": case "never": return Ansi.withoutTty(); case "true": case "always": return Ansi.forceTty(); case "auto": default: return new Ansi( AnsiEnvironmentChecking.environmentSupportsAnsiEscapes(platform, environment)); } } public Path resolvePathThatMayBeOutsideTheProjectFilesystem(@PropagatesNullable Path path) { if (path == null) { return path; } return resolveNonNullPathOutsideTheProjectFilesystem(path); } public Path resolveNonNullPathOutsideTheProjectFilesystem(Path path) { if (path.isAbsolute()) { return getPathFromVfs(path); } Path expandedPath = MorePaths.expandHomeDir(path); return projectFilesystem.resolve(expandedPath); } public String getLocalhost() { try { return HostnameFetching.getHostname(); } catch (IOException e) { return "<unknown>"; } } public Platform getPlatform() { return platform; } public boolean isActionGraphCheckingEnabled() { return getBooleanValue("cache", "action_graph_cache_check_enabled", false); } public int getMaxActionGraphCacheEntries() { return getInteger("cache", "max_action_graph_cache_entries").orElse(1); } public IncrementalActionGraphMode getIncrementalActionGraphMode() { return getEnum("cache", "incremental_action_graph", IncrementalActionGraphMode.class) .orElse(IncrementalActionGraphMode.DEFAULT); } public int getMaxActionGraphNodeCacheEntries() { return getInteger("cache", "max_action_graph_node_cache_entries").orElse(10000); } public Optional<String> getRepository() { return config.get("cache", "repository"); } /** * Whether Buck should use Buck binary hash or git commit id as the core key in all rule keys. * * <p>The binary hash reflects the code that can affect the content of artifacts. * * <p>By default git commit id is used as the core key. * * @return <code>True</code> if binary hash should be used as the core key */ public boolean useBuckBinaryHash() { return getBooleanValue("cache", "use_buck_binary_hash", false); } public Optional<ImmutableSet<PatternAndMessage>> getUnexpectedFlavorsMessages() { ImmutableMap<String, String> entries = config.get("unknown_flavors_messages"); if (!entries.isEmpty()) { Set<PatternAndMessage> patternAndMessages = new HashSet<>(); for (Map.Entry<String, String> entry : entries.entrySet()) { patternAndMessages.add( PatternAndMessage.of(Pattern.compile(entry.getKey()), entry.getValue())); } return Optional.of(ImmutableSet.copyOf(patternAndMessages)); } return Optional.empty(); } public boolean hasUserDefinedValue(String sectionName, String propertyName) { return config.get(sectionName).containsKey(propertyName); } public Optional<ImmutableMap<String, String>> getSection(String sectionName) { ImmutableMap<String, String> values = config.get(sectionName); return values.isEmpty() ? Optional.empty() : Optional.of(values); } /** * @return the string value for the config settings, where present empty values are {@code * Optional.empty()}. */ public Optional<String> getValue(String sectionName, String propertyName) { return config.getValue(sectionName, propertyName); } /** * @return the string value for the config settings, where present empty values are {@code * Optional[]}. */ public Optional<String> getRawValue(String sectionName, String propertyName) { return config.get(sectionName, propertyName); } public Optional<Integer> getInteger(String sectionName, String propertyName) { return config.getInteger(sectionName, propertyName); } public Optional<Long> getLong(String sectionName, String propertyName) { return config.getLong(sectionName, propertyName); } public Optional<Float> getFloat(String sectionName, String propertyName) { return config.getFloat(sectionName, propertyName); } public Optional<Boolean> getBoolean(String sectionName, String propertyName) { return config.getBoolean(sectionName, propertyName); } public boolean getBooleanValue(String sectionName, String propertyName, boolean defaultValue) { return config.getBooleanValue(sectionName, propertyName, defaultValue); } public Optional<URI> getUrl(String section, String field) { return config.getUrl(section, field); } public ImmutableMap<String, String> getMap(String section, String field) { return config.getMap(section, field); } public <T> T getOrThrow(String section, String field, Optional<T> value) { if (!value.isPresent()) { throw new HumanReadableException( String.format(".buckconfig: %s:%s must be set", section, field)); } return value.get(); } // This is a hack. A cleaner approach would be to expose a narrow view of the config to any code // that affects the state cached by the Daemon. public boolean equalsForDaemonRestart(BuckConfig other) { return this.config.equalsIgnoring(other.config, IGNORE_FIELDS_FOR_DAEMON_RESTART); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } else if (!(obj instanceof BuckConfig)) { return false; } BuckConfig that = (BuckConfig) obj; return Objects.equal(this.config, that.config); } @Override public String toString() { return String.format("%s (config=%s)", super.toString(), config); } @Override public int hashCode() { return Objects.hashCode(config); } public ImmutableMap<String, String> getEnvironment() { return environment; } public String[] getEnv(String propertyName, String separator) { String value = getEnvironment().get(propertyName); if (value == null) { value = ""; } return value.split(separator); } /** @return the local cache directory */ public String getLocalCacheDirectory(String dirCacheName) { return getValue(dirCacheName, "dir") .orElse(projectFilesystem.getBuckPaths().getCacheDir().toString()); } public int getKeySeed() { return parseInt(getValue("cache", "key_seed").orElse("0")); } /** @return the path for the given section and property. */ @Override public Optional<Path> getPath(String sectionName, String name) { return getPath(sectionName, name, true); } public Path getRequiredPath(String section, String field) { Optional<Path> path = getPath(section, field); return getOrThrow(section, field, path); } public String getClientId() { return getValue("client", "id").orElse("buck"); } /** * @return whether the current invocation of Buck should skip the Action Graph cache, leaving the * cached Action Graph in memory for the next request and creating a fresh Action Graph for * the current request (which will be garbage-collected when the current request is complete). * Commonly, a one-off request, like from a linter, will specify this option so that it does * not invalidate the primary in-memory Action Graph that the user is likely relying on for * fast iterative builds. */ public boolean isSkipActionGraphCache() { return getBooleanValue("client", "skip-action-graph-cache", false); } /** @return the number of threads Buck should use. */ public int getNumThreads() { return getNumThreads(getDefaultMaximumNumberOfThreads()); } /** * @return the number of threads Buck should use for testing. This will use the build * parallelization settings if not configured. */ public int getNumTestThreads() { double ratio = config.getFloat(TEST_SECTION_HEADER, "thread_utilization_ratio").orElse(1.0F); if (ratio <= 0.0F) { throw new HumanReadableException( "thread_utilization_ratio must be greater than zero (was " + ratio + ")"); } return (int) Math.ceil(ratio * getNumThreads()); } /** @return the number of threads to be used for the scheduled executor thread pool. */ public int getNumThreadsForSchedulerPool() { return config.getLong("build", "scheduler_threads").orElse((long) 2).intValue(); } /** @return the maximum size of files input based rule keys will be willing to hash. */ public long getBuildInputRuleKeyFileSizeLimit() { return config.getLong("build", "input_rule_key_file_size_limit").orElse(Long.MAX_VALUE); } public int getDefaultMaximumNumberOfThreads() { return getDefaultMaximumNumberOfThreads(Runtime.getRuntime().availableProcessors()); } @VisibleForTesting int getDefaultMaximumNumberOfThreads(int detectedProcessorCount) { double ratio = config.getFloat("build", "thread_core_ratio").orElse(DEFAULT_THREAD_CORE_RATIO); if (ratio <= 0.0F) { throw new HumanReadableException( "thread_core_ratio must be greater than zero (was " + ratio + ")"); } int scaledValue = (int) Math.ceil(ratio * detectedProcessorCount); int threadLimit = detectedProcessorCount; Optional<Long> reservedCores = getNumberOfReservedCores(); if (reservedCores.isPresent()) { threadLimit -= reservedCores.get(); } if (scaledValue > threadLimit) { scaledValue = threadLimit; } Optional<Long> minThreads = getThreadCoreRatioMinThreads(); if (minThreads.isPresent()) { scaledValue = Math.max(scaledValue, minThreads.get().intValue()); } Optional<Long> maxThreads = getThreadCoreRatioMaxThreads(); if (maxThreads.isPresent()) { long maxThreadsValue = maxThreads.get(); if (minThreads.isPresent() && minThreads.get() > maxThreadsValue) { throw new HumanReadableException( "thread_core_ratio_max_cores must be larger than thread_core_ratio_min_cores"); } if (maxThreadsValue > threadLimit) { throw new HumanReadableException( "thread_core_ratio_max_cores is larger than thread_core_ratio_reserved_cores allows"); } scaledValue = Math.min(scaledValue, (int) maxThreadsValue); } if (scaledValue <= 0) { throw new HumanReadableException( "Configuration resulted in an invalid number of build threads (" + scaledValue + ")."); } return scaledValue; } private Optional<Long> getNumberOfReservedCores() { Optional<Long> reservedCores = config.getLong("build", "thread_core_ratio_reserved_cores"); if (reservedCores.isPresent() && reservedCores.get() < 0) { throw new HumanReadableException("thread_core_ratio_reserved_cores must be larger than zero"); } return reservedCores; } private Optional<Long> getThreadCoreRatioMaxThreads() { Optional<Long> maxThreads = config.getLong("build", "thread_core_ratio_max_threads"); if (maxThreads.isPresent() && maxThreads.get() < 0) { throw new HumanReadableException("thread_core_ratio_max_threads must be larger than zero"); } return maxThreads; } private Optional<Long> getThreadCoreRatioMinThreads() { Optional<Long> minThreads = config.getLong("build", "thread_core_ratio_min_threads"); if (minThreads.isPresent() && minThreads.get() <= 0) { throw new HumanReadableException("thread_core_ratio_min_threads must be larger than zero"); } return minThreads; } /** * @return the number of threads Buck should use or the specified defaultValue if it is not set. */ public int getNumThreads(int defaultValue) { return config.getLong("build", "threads").orElse((long) defaultValue).intValue(); } public Optional<ImmutableList<String>> getAllowedJavaSpecificationVersions() { return getOptionalListWithoutComments("project", "allowed_java_specification_versions"); } public long getCountersFirstFlushIntervalMillis() { return config.getLong("counters", "first_flush_interval_millis").orElse(5000L); } public long getCountersFlushIntervalMillis() { return config.getLong("counters", "flush_interval_millis").orElse(30000L); } public Optional<Path> getPath(String sectionName, String name, boolean isCellRootRelative) { Optional<String> pathString = getValue(sectionName, name); return pathString.isPresent() ? Optional.of( convertPathWithError( pathString.get(), isCellRootRelative, String.format("Overridden %s:%s path not found: ", sectionName, name))) : Optional.empty(); } /** * Return a {@link Path} from the underlying {@link java.nio.file.FileSystem} implementation. This * allows to safely call {@link Path#resolve(Path)} and similar calls without exceptions caused by * mis-matched underlying filesystem implementations causing grief. This is particularly useful * for those times where we're using (eg) JimFs for our testing. */ private Path getPathFromVfs(String path) { return projectFilesystem.getPath(path); } private Path getPathFromVfs(Path path) { return projectFilesystem.getPath(path.toString()); } private Path convertPathWithError(String pathString, boolean isCellRootRelative, String error) { return isCellRootRelative ? checkPathExistsAndResolve(pathString, error) : getPathFromVfs(pathString); } private Path convertPath(String pathString, boolean resolve, String error) { return resolve ? checkPathExistsAndResolve(pathString, error) : checkPathExists(pathString, error); } public Path checkPathExistsAndResolve(String pathString, String errorMsg) { return projectFilesystem.getPathForRelativePath(checkPathExists(pathString, errorMsg)); } private Path checkPathExists(String pathString, String errorMsg) { Path path = getPathFromVfs(pathString); if (projectFilesystem.exists(path)) { return path; } throw new HumanReadableException(errorMsg + path); } public ImmutableSet<String> getSections() { return config.getSectionToEntries().keySet(); } public ImmutableMap<String, ImmutableMap<String, String>> getRawConfigForParser() { ImmutableMap<String, ImmutableMap<String, String>> rawSections = config.getSectionToEntries(); // If the raw config doesn't have sections which have ignored fields, then just return it as-is. ImmutableSet<String> sectionsWithIgnoredFields = IGNORE_FIELDS_FOR_DAEMON_RESTART.keySet(); if (Sets.intersection(rawSections.keySet(), sectionsWithIgnoredFields).isEmpty()) { return rawSections; } // Otherwise, iterate through the config to do finer-grain filtering. ImmutableMap.Builder<String, ImmutableMap<String, String>> filtered = ImmutableMap.builder(); for (Map.Entry<String, ImmutableMap<String, String>> sectionEnt : rawSections.entrySet()) { String sectionName = sectionEnt.getKey(); // If this section doesn't have a corresponding ignored section, then just add it as-is. if (!sectionsWithIgnoredFields.contains(sectionName)) { filtered.put(sectionEnt); continue; } // If none of this section's entries are ignored, then add it as-is. ImmutableMap<String, String> fields = sectionEnt.getValue(); ImmutableSet<String> ignoredFieldNames = IGNORE_FIELDS_FOR_DAEMON_RESTART.getOrDefault(sectionName, ImmutableSet.of()); if (Sets.intersection(fields.keySet(), ignoredFieldNames).isEmpty()) { filtered.put(sectionEnt); continue; } // Otherwise, filter out the ignored fields. ImmutableMap<String, String> remainingKeys = ImmutableMap.copyOf(Maps.filterKeys(fields, Predicates.not(ignoredFieldNames::contains))); if (!remainingKeys.isEmpty()) { filtered.put(sectionName, remainingKeys); } } return filtered.build(); } public Optional<ImmutableList<String>> getExternalTestRunner() { Optional<String> value = getValue("test", "external_runner"); if (!value.isPresent()) { return Optional.empty(); } return Optional.of(ImmutableList.copyOf(Splitter.on(' ').splitToList(value.get()))); } /** * @return whether to symlink the default output location (`buck-out`) to the user-provided * override for compatibility. */ public boolean getBuckOutCompatLink() { return getBooleanValue("project", "buck_out_compat_link", false); } /** @return whether to enabled versions on build/test command. */ public boolean getBuildVersions() { return getBooleanValue("build", "versions", false); } /** @return whether to enabled versions on targets command. */ public boolean getTargetsVersions() { return getBooleanValue("targets", "versions", false); } /** @return whether to enable caching of rule key calculations between builds. */ public boolean getRuleKeyCaching() { return getBooleanValue("build", "rule_key_caching", false); } public ImmutableList<String> getCleanAdditionalPaths() { return getListWithoutComments("clean", "additional_paths"); } public ImmutableList<String> getCleanExcludedCaches() { return getListWithoutComments("clean", "excluded_dir_caches"); } /** @return whether to enable new file hash cache engine. */ public FileHashCacheMode getFileHashCacheMode() { return getEnum("build", "file_hash_cache_mode", FileHashCacheMode.class) .orElse(FileHashCacheMode.DEFAULT); } /** Whether to parallelize action graph creation. */ public ActionGraphParallelizationMode getActionGraphParallelizationMode() { return getEnum("build", "action_graph_parallelization", ActionGraphParallelizationMode.class) .orElse(ActionGraphParallelizationMode.DEFAULT); } public Config getConfig() { return config; } public boolean isLogBuildIdToConsoleEnabled() { return getBooleanValue("log", "log_build_id_to_console_enabled", false); } /** Whether to create symlinks of build output in buck-out/last. */ public boolean createBuildOutputSymLinksEnabled() { return getBooleanValue("build", "create_build_output_symlinks_enabled", false); } public boolean isEmbeddedCellBuckOutEnabled() { return getBooleanValue("project", "embedded_cell_buck_out_enabled", false); } /** Whether to instrument the action graph and record performance */ public boolean getShouldInstrumentActionGraph() { return getBooleanValue("instrumentation", "action_graph", false); } public Optional<String> getPathToBuildPrehookScript() { return getValue("build", "prehook_script"); } /** The timeout to apply to entire test rules. */ public Optional<Long> getDefaultTestRuleTimeoutMs() { return config.getLong(TEST_SECTION_HEADER, "rule_timeout"); } }
/* * Copyright 2010 The Greplin Bloom Filter Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.greplin.bloomfilter; import java.io.IOException; import java.io.RandomAccessFile; import java.util.Arrays; /** * Represents the metadata associated with a serialized bloom filter. * <p/> * The old header format was: * 4 bytes for the number of hash Fns * 4 bytes for the total file size (data + metadata) in bits * That header format always assumed 4-bit buckets * <p/> * The header format is now as follows: * 8 bytes of zeroes (to distinguish this format from the old one) * 3 bytes for the 'magic word' which is: 0xB1 0xF1 0xCA * 1 byte for the header version (currently 2 - implicitly 1 in the old header format) * 4 bytes for header length in bytes (currently 32 bytes) * 4 bytes for real-size in bytes (total size of data + metadata) * 4 bytes for the number of hash fns * 4 bytes for the number of counting-bits in each bucket * 4 bytes of 0 padding to make the whole header 32-bytes even * The new format has the first two bytes as '0', which the old format will never have - so we can safely identify * which is which. If we detect the old format, we can safely assume there are four bits per bucket. * This is a little convoluted, but it's the safest way to guarantee backwards compatibility with the old format */ class BloomMetadata { private static final int INT_SIZE = 4; private static final int BITS_IN_BYTE = 8; private static final byte VERSION = 2; private static final byte[] MAGIC_WORD = {(byte) 0xB1, (byte) 0xF1, (byte) 0xCA}; private static final int EXPECTED_HEADER_BYTES = 32; private final byte version; private final int headerLength; private final int totalLength; private final int hashFns; private final BucketSize bucketSize; private final int maxCountInBucket; private final int bucketsPerByte; private final int bucketCount; public static BloomMetadata readHeader(RandomAccessFile buffer) throws IOException { final int firstInt = buffer.readInt(); final int secondInt = buffer.readInt(); if (firstInt == 0 && secondInt == 0) { return readNewStyleHeader(buffer); } else { return readOldStyleHeader(buffer, firstInt, secondInt); } } public static BloomMetadata createNew(final int buckets, final int hashFns, final BucketSize countBits) throws IOException { return new BloomMetadata(null, VERSION, EXPECTED_HEADER_BYTES, EXPECTED_HEADER_BYTES + bytes(buckets * countBits.getBits()), hashFns, countBits); } public static BloomMetadata createNewWithLength(final int totalLength, final int hashFns, final BucketSize countBits) throws IOException { return new BloomMetadata(null, VERSION, EXPECTED_HEADER_BYTES, totalLength, hashFns, countBits); } private BloomMetadata(RandomAccessFile file, byte version, int headerLength, int totalLength, int hashFns, BucketSize bucketSize) throws IOException { this.version = version; this.headerLength = headerLength; this.totalLength = totalLength; this.hashFns = hashFns; this.bucketSize = bucketSize; this.maxCountInBucket = (1 << this.bucketSize.getBits()) - 1; this.bucketsPerByte = BITS_IN_BYTE / this.bucketSize.getBits(); this.bucketCount = (this.totalLength - this.headerLength) * this.bucketsPerByte; if (hashFns <= 0) { throw new InvalidBloomFilter("Invalid number of hashFns (" + hashFns + " bytes)"); } if (this.totalLength < this.headerLength) { throw new InvalidBloomFilter("Impossibly short size (" + totalLength + " bytes)"); } if (file != null && file.length() != totalLength) { throw new InvalidBloomFilter("Expected a file length of " + totalLength + " but only got " + file.length()); } } private static int bytes(int bits) { return bits / BITS_IN_BYTE + (bits % BITS_IN_BYTE == 0 ? 0 : 1); } private static BloomMetadata readOldStyleHeader(RandomAccessFile file, int hashFns, int realSize) throws IOException { return new BloomMetadata(file, (byte) 1, 2 * INT_SIZE, bytes(realSize), hashFns, BucketSize.FOUR); } private static BloomMetadata readNewStyleHeader(RandomAccessFile buffer) throws IOException { // verify the magic word is present and intact final byte[] shouldBeMagicWord = new byte[MAGIC_WORD.length]; buffer.read(shouldBeMagicWord); if (!Arrays.equals(MAGIC_WORD, shouldBeMagicWord)) { throw new InvalidBloomFilter("Invalid Magic Word " + Arrays.toString(shouldBeMagicWord)); } // verify the version is correct final byte version = buffer.readByte(); if (!(version == VERSION)) { throw new InvalidBloomFilter("Unrecognized version (" + version + ")"); } final int headerLen = buffer.readInt(); if (headerLen != EXPECTED_HEADER_BYTES) { throw new InvalidBloomFilter("Unexpected header length (" + headerLen + " bytes)"); } final int realSize = buffer.readInt(); final int hashFns = buffer.readInt(); final int bucketSizeInt = buffer.readInt(); final BucketSize bucketSize = BucketSize.getBucketSize(bucketSizeInt); if (bucketSize == null) { throw new InvalidBloomFilter("Invalid bucketSize (" + bucketSize + " bytes)"); } if (buffer.readInt() != 0) { throw new InvalidBloomFilter("Invalid end padding"); } return new BloomMetadata(buffer, version, headerLen, realSize, hashFns, bucketSize); } public byte getVersion() { return version; } public int getHeaderLength() { return headerLength; } public int getTotalLength() { return totalLength; } public int getHashFns() { return hashFns; } public BucketSize getBucketSize() { return bucketSize; } public int getMaxCountInBucket() { return maxCountInBucket; } public int getBucketsPerByte() { return bucketsPerByte; } public int getBucketCount() { return bucketCount; } public void writeToFile(RandomAccessFile file) throws IOException { if (getVersion() == 1) { assert getHeaderLength() == 2 * INT_SIZE; file.writeInt(getHashFns()); file.writeInt(getTotalLength() / BITS_IN_BYTE); } else { assert getVersion() == VERSION; file.writeInt(0); // 4 bytes file.writeInt(0); // 8 bytes file.write(MAGIC_WORD); // 11 bytes file.writeByte(VERSION); // 12 bytes file.writeInt(EXPECTED_HEADER_BYTES); // 16 bytes file.writeInt(getTotalLength()); // 20 bytes file.writeInt(getHashFns()); // 24 bytes file.writeInt(getBucketSize().getBits()); // 28 bytes file.writeInt(0); // 32 bytes } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; import java.io.IOException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.MRConfig; /** * Manipulate the working area for the transient store for maps and reduces. * * This class is used by map and reduce tasks to identify the directories that * they need to write to/read from for intermediate files. The callers of * these methods are from child space. */ @InterfaceAudience.Private @InterfaceStability.Unstable public class YarnOutputFiles extends MapOutputFile { private JobConf conf; private static final String JOB_OUTPUT_DIR = "output"; private static final String SPILL_FILE_PATTERN = "%s_spill_%d.out"; private static final String SPILL_INDEX_FILE_PATTERN = SPILL_FILE_PATTERN + ".index"; public YarnOutputFiles() { } // assume configured to $localdir/usercache/$user/appcache/$appId private LocalDirAllocator lDirAlloc = new LocalDirAllocator(MRConfig.LOCAL_DIR); private Path getAttemptOutputDir() { return new Path(JOB_OUTPUT_DIR, conf.get(JobContext.TASK_ATTEMPT_ID)); } /** * Return the path to local map output file created earlier * * @return path * @throws IOException */ public Path getOutputFile() throws IOException { Path attemptOutput = new Path(getAttemptOutputDir(), MAP_OUTPUT_FILENAME_STRING); return lDirAlloc.getLocalPathToRead(attemptOutput.toString(), conf); } /** * Create a local map output file name. * * @param size the size of the file * @return path * @throws IOException */ public Path getOutputFileForWrite(long size) throws IOException { Path attemptOutput = new Path(getAttemptOutputDir(), MAP_OUTPUT_FILENAME_STRING); return lDirAlloc.getLocalPathForWrite(attemptOutput.toString(), size, conf); } /** * Create a local map output file name on the same volume. */ public Path getOutputFileForWriteInVolume(Path existing) { Path outputDir = new Path(existing.getParent(), JOB_OUTPUT_DIR); Path attemptOutputDir = new Path(outputDir, conf.get(JobContext.TASK_ATTEMPT_ID)); return new Path(attemptOutputDir, MAP_OUTPUT_FILENAME_STRING); } /** * Return the path to a local map output index file created earlier * * @return path * @throws IOException */ public Path getOutputIndexFile() throws IOException { Path attemptIndexOutput = new Path(getAttemptOutputDir(), MAP_OUTPUT_FILENAME_STRING + MAP_OUTPUT_INDEX_SUFFIX_STRING); return lDirAlloc.getLocalPathToRead(attemptIndexOutput.toString(), conf); } /** * Create a local map output index file name. * * @param size the size of the file * @return path * @throws IOException */ public Path getOutputIndexFileForWrite(long size) throws IOException { Path attemptIndexOutput = new Path(getAttemptOutputDir(), MAP_OUTPUT_FILENAME_STRING + MAP_OUTPUT_INDEX_SUFFIX_STRING); return lDirAlloc.getLocalPathForWrite(attemptIndexOutput.toString(), size, conf); } /** * Create a local map output index file name on the same volume. */ public Path getOutputIndexFileForWriteInVolume(Path existing) { Path outputDir = new Path(existing.getParent(), JOB_OUTPUT_DIR); Path attemptOutputDir = new Path(outputDir, conf.get(JobContext.TASK_ATTEMPT_ID)); return new Path(attemptOutputDir, MAP_OUTPUT_FILENAME_STRING + MAP_OUTPUT_INDEX_SUFFIX_STRING); } /** * Return a local map spill file created earlier. * * @param spillNumber the number * @return path * @throws IOException */ public Path getSpillFile(int spillNumber) throws IOException { return lDirAlloc.getLocalPathToRead( String.format(SPILL_FILE_PATTERN, conf.get(JobContext.TASK_ATTEMPT_ID), spillNumber), conf); } /** * Create a local map spill file name. * * @param spillNumber the number * @param size the size of the file * @return path * @throws IOException */ public Path getSpillFileForWrite(int spillNumber, long size) throws IOException { return lDirAlloc.getLocalPathForWrite( String.format(SPILL_FILE_PATTERN, conf.get(JobContext.TASK_ATTEMPT_ID), spillNumber), size, conf); } /** * Return a local map spill index file created earlier * * @param spillNumber the number * @return path * @throws IOException */ public Path getSpillIndexFile(int spillNumber) throws IOException { return lDirAlloc.getLocalPathToRead( String.format(SPILL_INDEX_FILE_PATTERN, conf.get(JobContext.TASK_ATTEMPT_ID), spillNumber), conf); } /** * Create a local map spill index file name. * * @param spillNumber the number * @param size the size of the file * @return path * @throws IOException */ public Path getSpillIndexFileForWrite(int spillNumber, long size) throws IOException { return lDirAlloc.getLocalPathForWrite( String.format(SPILL_INDEX_FILE_PATTERN, conf.get(JobContext.TASK_ATTEMPT_ID), spillNumber), size, conf); } /** * Return a local reduce input file created earlier * * @param mapId a map task id * @return path * @throws IOException */ public Path getInputFile(int mapId) throws IOException { throw new UnsupportedOperationException("Incompatible with LocalRunner"); } /** * Create a local reduce input file name. * * @param mapId a map task id * @param size the size of the file * @return path * @throws IOException */ public Path getInputFileForWrite(org.apache.hadoop.mapreduce.TaskID mapId, long size) throws IOException { return lDirAlloc.getLocalPathForWrite(String.format( REDUCE_INPUT_FILE_FORMAT_STRING, getAttemptOutputDir().toString(), mapId.getId()), size, conf); } /** Removes all of the files related to a task. */ public void removeAll() throws IOException { throw new UnsupportedOperationException("Incompatible with LocalRunner"); } @Override public void setConf(Configuration conf) { if (conf instanceof JobConf) { this.conf = (JobConf) conf; } else { this.conf = new JobConf(conf); } } @Override public Configuration getConf() { return conf; } }
package com.spm.android.activity; import java.math.BigDecimal; import java.math.RoundingMode; import java.util.List; import java.util.Set; import android.app.Activity; import android.view.View; import android.view.View.OnClickListener; import android.view.View.OnLongClickListener; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.CompoundButton.OnCheckedChangeListener; import android.widget.TextView; import com.spm.R; import com.spm.android.activity.ProductsAdapter.ProductViewHolder; import com.spm.android.common.adapter.BaseHolderArrayAdapter; import com.spm.domain.Client; import com.spm.domain.Product; /** * * @author Agustin Sgarlata */ public class ProductsAdapter extends BaseHolderArrayAdapter<Product, ProductViewHolder> { private Set<Product> selectedProducts; private Double dto; private Client client; TextView totales; Double precioTotal = new Double(0); /** * @param context * @param products * @param selectedProducts * @param dto * @param client */ public ProductsAdapter(Activity context, List<Product> products, Set<Product> selectedProducts, Double dto, Client client, TextView totales) { super(context, products, R.layout.product_row); this.selectedProducts = selectedProducts; this.dto = dto; this.client = client; this.totales = totales; } /** * @see com.splatt.android.common.adapter.BaseHolderArrayAdapter#fillHolderFromItem(java.lang.Object, * java.lang.Object) */ @Override protected void fillHolderFromItem(final Product product, final ProductViewHolder holder) { // holder.productName.setText(product.getName()); holder.productName.setText(product.getId().toString()); Long priceList = client.getPriceList(); Double price = product.getPrice1(); if (priceList.compareTo(2L) == 0) { price = product.getPrice2(); } if (priceList.compareTo(3L) == 0) { price = product.getPrice3(); } if (priceList.compareTo(4L) == 0) { price = product.getPrice4(); } if (priceList.compareTo(5L) == 0) { price = product.getPrice5(); } if (price == null) { price = new Double(0); } final Double priceDto = price * (1 - (dto / 100)); BigDecimal priceDtoBig = new BigDecimal(priceDto.toString()); priceDtoBig = priceDtoBig.setScale(2, RoundingMode.HALF_UP); holder.productPrice.setText("$ " + priceDtoBig.toString()); if (isCheckeable(product)) { // We need to null the previous listener to prevent it from running // with the former user. holder.checked.setOnCheckedChangeListener(null); holder.checked.setChecked(selectedProducts.contains(product)); holder.quantity.setText(String.valueOf(product.getQuantity())); holder.checked .setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (isChecked) { selectedProducts.add(product); } else { selectedProducts.remove(product); } onSelectionChanged(); } }); // holder.checked.setVisibility(View.VISIBLE); } else { holder.checked.setVisibility(View.GONE); } holder.minus.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { Integer quantity = Integer.valueOf(holder.quantity.getText() .toString()); if (quantity.intValue() > 0) { quantity--; product.setQuantity(quantity); holder.quantity.setText(quantity.toString()); } if (quantity.intValue() == 0) { holder.checked.setChecked(false); selectedProducts.remove(product); } precioTotal -= priceDto; BigDecimal precioTotalBig = new BigDecimal(precioTotal .toString()); precioTotalBig = precioTotalBig.setScale(2, RoundingMode.HALF_UP); totales.setText("Total en $: " + precioTotalBig.toString()); } }); holder.minus.setOnLongClickListener(new OnLongClickListener() { @Override public boolean onLongClick(View v) { Integer quantity = Integer.valueOf(holder.quantity.getText() .toString()); if (quantity.intValue() > 0) { quantity--; product.setQuantity(quantity); holder.quantity.setText(quantity.toString()); } return false; } }); // TODO: no sirve asi, corregir holder.plus.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { Integer quantity = Integer.valueOf(holder.quantity.getText() .toString()); quantity++; product.setQuantity(quantity); holder.quantity.setText(quantity.toString()); if (quantity.intValue() > 0) { holder.checked.setChecked(true); selectedProducts.add(product); } precioTotal += priceDto; BigDecimal precioTotalBig = new BigDecimal(precioTotal .toString()); precioTotalBig = precioTotalBig.setScale(2, RoundingMode.HALF_UP); totales.setText("Total en $: " + precioTotalBig.toString()); } }); } protected Boolean isCheckeable(Product product) { return true; } protected void onSelectionChanged() { // Do Nothing } /** * @see com.splatt.android.common.adapter.BaseHolderArrayAdapter#createViewHolderFromConvertView(android.view.View) */ @Override protected ProductViewHolder createViewHolderFromConvertView(View convertView) { ProductViewHolder holder = new ProductViewHolder(); holder.productName = findView(convertView, R.id.productName); holder.minus = findView(convertView, R.id.minus); holder.quantity = findView(convertView, R.id.quantity); holder.plus = findView(convertView, R.id.plus); holder.productPrice = findView(convertView, R.id.productPrice); holder.checked = findView(convertView, R.id.check); return holder; } public static class ProductViewHolder { private TextView productName; private TextView minus; private TextView quantity; private TextView plus; private TextView productPrice; private CheckBox checked; } }
package net.nemerosa.ontrack.service.security; import net.nemerosa.ontrack.model.Ack; import net.nemerosa.ontrack.model.exceptions.AccountDefaultAdminCannotDeleteException; import net.nemerosa.ontrack.model.exceptions.AccountDefaultAdminCannotUpdateNameException; import net.nemerosa.ontrack.model.security.*; import net.nemerosa.ontrack.model.structure.Entity; import net.nemerosa.ontrack.model.structure.ID; import net.nemerosa.ontrack.model.structure.NameDescription; import net.nemerosa.ontrack.model.structure.Project; import net.nemerosa.ontrack.repository.AccountGroupRepository; import net.nemerosa.ontrack.repository.AccountRepository; import net.nemerosa.ontrack.repository.RoleRepository; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import java.util.*; import java.util.stream.Collectors; @Service @Transactional public class AccountServiceImpl implements AccountService { private final RoleRepository roleRepository; private final RolesService rolesService; private final AccountRepository accountRepository; private final AccountGroupRepository accountGroupRepository; private final SecurityService securityService; private final AuthenticationSourceService authenticationSourceService; private final PasswordEncoder passwordEncoder; private Collection<AccountGroupContributor> accountGroupContributors = Collections.emptyList(); @Autowired public AccountServiceImpl( RoleRepository roleRepository, RolesService rolesService, AccountRepository accountRepository, AccountGroupRepository accountGroupRepository, SecurityService securityService, AuthenticationSourceService authenticationSourceService, PasswordEncoder passwordEncoder) { this.roleRepository = roleRepository; this.rolesService = rolesService; this.accountRepository = accountRepository; this.accountGroupRepository = accountGroupRepository; this.securityService = securityService; this.authenticationSourceService = authenticationSourceService; this.passwordEncoder = passwordEncoder; } @Autowired(required = false) public void setAccountGroupContributors(Collection<AccountGroupContributor> accountGroupContributors) { this.accountGroupContributors = accountGroupContributors; } @Override public Account withACL(AuthenticatedAccount raw) { return raw.getAccount() // Global role .withGlobalRole( roleRepository.findGlobalRoleByAccount(raw.getAccount().id()).flatMap(rolesService::getGlobalRole) ) // Project roles .withProjectRoles( roleRepository.findProjectRoleAssociationsByAccount(raw.getAccount().id(), rolesService::getProjectRoleAssociation) ) // Groups from the repository .withGroups( accountGroupRepository.findByAccount(raw.getAccount().id()).stream() .map(this::groupWithACL) .collect(Collectors.toList()) ) // Group contributions .withGroups( accountGroupContributors.stream() .flatMap(accountGroupContributor -> accountGroupContributor.collectGroups(raw).stream()) .map(this::groupWithACL) .collect(Collectors.toList()) ) // OK .lock(); } @Override public List<Account> getAccounts() { securityService.checkGlobalFunction(AccountManagement.class); return accountRepository.findAll(authenticationSourceService::getAuthenticationSource) .stream() .map(account -> account.withGroups(accountGroupRepository.findByAccount(account.id()))) .collect(Collectors.toList()); } @Override public Account create(AccountInput input) { Account account = create( input, "password" ); accountRepository.setPassword(account.id(), passwordEncoder.encode(input.getPassword())); return account; } @Override public Account create(AccountInput input, String authenticationSourceMode) { securityService.checkGlobalFunction(AccountManagement.class); // Creates the account Account account = Account.of( input.getName(), input.getFullName(), input.getEmail(), SecurityRole.USER, authenticationSourceService.getAuthenticationSource(authenticationSourceMode) ); // Saves it account = accountRepository.newAccount(account); // Account groups accountGroupRepository.linkAccountToGroups(account.id(), input.getGroups()); // OK return account; } @Override public Optional<Account> findUserByNameAndSource(String username, AuthenticationSourceProvider sourceProvider) { securityService.checkGlobalFunction(AccountManagement.class); return accountRepository.findUserByNameAndSource(username, sourceProvider); } @Override public Account updateAccount(ID accountId, AccountInput input) { securityService.checkGlobalFunction(AccountManagement.class); // Gets the existing account Account account = getAccount(accountId); // Checks if default admin if (account.isDefaultAdmin() && !StringUtils.equals(account.getName(), input.getName())) { throw new AccountDefaultAdminCannotUpdateNameException(); } // Updates it account = account.update(input); // Saves it accountRepository.saveAccount(account); // Updating the password? if (StringUtils.isNotBlank(input.getPassword())) { accountRepository.setPassword(accountId.getValue(), passwordEncoder.encode(input.getPassword())); } // Account groups accountGroupRepository.linkAccountToGroups(account.id(), input.getGroups()); // OK return getAccount(accountId); } @Override public Ack deleteAccount(ID accountId) { // Security check securityService.checkGlobalFunction(AccountManagement.class); // Check the `admin` account if (getAccount(accountId).isDefaultAdmin()) { throw new AccountDefaultAdminCannotDeleteException(); } // Deletion return accountRepository.deleteAccount(accountId); } @Override public List<AccountGroup> getAccountGroups() { securityService.checkGlobalFunction(AccountGroupManagement.class); return accountGroupRepository.findAll(); } @Override public AccountGroup createGroup(NameDescription nameDescription) { securityService.checkGlobalFunction(AccountGroupManagement.class); // Creates the account group AccountGroup group = AccountGroup.of(nameDescription.getName(), nameDescription.getDescription()); // Saves it return accountGroupRepository.newAccountGroup(group); } @Override public AccountGroup getAccountGroup(ID groupId) { securityService.checkGlobalFunction(AccountGroupManagement.class); return accountGroupRepository.getById(groupId); } @Override public AccountGroup updateGroup(ID groupId, NameDescription input) { securityService.checkGlobalFunction(AccountGroupManagement.class); AccountGroup group = getAccountGroup(groupId).update(input); accountGroupRepository.update(group); return group; } @Override public Ack deleteGroup(ID groupId) { securityService.checkGlobalFunction(AccountGroupManagement.class); return accountGroupRepository.delete(groupId); } @Override public List<AccountGroupSelection> getAccountGroupsForSelection(ID accountId) { // Account groups or none Set<Integer> accountGroupIds = accountId.ifSet(accountGroupRepository::findByAccount) .orElse(Collections.emptyList()) .stream() .map(Entity::id) .collect(Collectors.toSet()); // Collection of groups with the selection return getAccountGroups().stream() .map(group -> AccountGroupSelection.of(group, accountGroupIds.contains(group.id()))) .collect(Collectors.toList()); } @Override public Collection<PermissionTarget> searchPermissionTargets(String token) { securityService.checkGlobalFunction(AccountManagement.class); List<PermissionTarget> targets = new ArrayList<>(); // Users first targets.addAll( accountRepository.findByNameToken(token, authenticationSourceService::getAuthenticationSource) .stream() .map(Account::asPermissionTarget) .collect(Collectors.toList()) ); // ... then groups targets.addAll( accountGroupRepository.findByNameToken(token) .stream() .map(AccountGroup::asPermissionTarget) .collect(Collectors.toList()) ); // OK return targets; } @Override public Ack saveGlobalPermission(PermissionTargetType type, int id, PermissionInput input) { switch (type) { case ACCOUNT: securityService.checkGlobalFunction(AccountManagement.class); return roleRepository.saveGlobalRoleForAccount(id, input.getRole()); case GROUP: securityService.checkGlobalFunction(AccountGroupManagement.class); return roleRepository.saveGlobalRoleForGroup(id, input.getRole()); default: return Ack.NOK; } } @Override public Collection<GlobalPermission> getGlobalPermissions() { Collection<GlobalPermission> permissions = new ArrayList<>(); // Users first permissions.addAll( accountRepository.findAll(authenticationSourceService::getAuthenticationSource) .stream() .map(this::getGlobalPermission) .filter(Optional::isPresent) .map(Optional::get) .collect(Collectors.toList()) ); // ... then groups permissions.addAll( accountGroupRepository.findAll() .stream() .map(this::getGroupGlobalPermission) .filter(Optional::isPresent) .map(Optional::get) .collect(Collectors.toList()) ); // OK return permissions; } @Override public Ack deleteGlobalPermission(PermissionTargetType type, int id) { switch (type) { case ACCOUNT: securityService.checkGlobalFunction(AccountManagement.class); return roleRepository.deleteGlobalRoleForAccount(id); case GROUP: securityService.checkGlobalFunction(AccountGroupManagement.class); return roleRepository.deleteGlobalRoleForGroup(id); default: return Ack.NOK; } } @Override public Collection<ProjectPermission> getProjectPermissions(ID projectId) { securityService.checkProjectFunction(projectId.getValue(), ProjectAuthorisationMgt.class); Collection<ProjectPermission> permissions = new ArrayList<>(); // Users first permissions.addAll( accountRepository.findAll(authenticationSourceService::getAuthenticationSource) .stream() .map(account -> getProjectPermission(projectId, account)) .filter(Optional::isPresent) .map(Optional::get) .collect(Collectors.toList()) ); // ... then groups permissions.addAll( accountGroupRepository.findAll() .stream() .map(accountGroup -> getGroupProjectPermission(projectId, accountGroup)) .filter(Optional::isPresent) .map(Optional::get) .collect(Collectors.toList()) ); // OK return permissions; } @Override public Ack saveProjectPermission(ID projectId, PermissionTargetType type, int id, PermissionInput input) { securityService.checkProjectFunction(projectId.getValue(), ProjectAuthorisationMgt.class); switch (type) { case ACCOUNT: return roleRepository.saveProjectRoleForAccount(projectId.getValue(), id, input.getRole()); case GROUP: return roleRepository.saveProjectRoleForGroup(projectId.getValue(), id, input.getRole()); default: return Ack.NOK; } } @Override public Ack deleteProjectPermission(ID projectId, PermissionTargetType type, int id) { securityService.checkProjectFunction(projectId.getValue(), ProjectAuthorisationMgt.class); switch (type) { case ACCOUNT: return roleRepository.deleteProjectRoleForAccount(projectId.getValue(), id); case GROUP: return roleRepository.deleteProjectRoleForGroup(projectId.getValue(), id); default: return Ack.NOK; } } @Override public Collection<ProjectRoleAssociation> getProjectPermissionsForAccount(Account account) { return roleRepository.findProjectRoleAssociationsByAccount( account.id(), rolesService::getProjectRoleAssociation ) .stream() // Filter by authorisation .filter(projectRoleAssociation -> securityService.isProjectFunctionGranted( projectRoleAssociation.getProjectId(), ProjectAuthorisationMgt.class )) // OK .collect(Collectors.toList()); } @Override public Optional<GlobalRole> getGlobalRoleForAccount(Account account) { return roleRepository.findGlobalRoleByAccount(account.id()) .flatMap(rolesService::getGlobalRole); } @Override public List<Account> getAccountsForGroup(AccountGroup accountGroup) { return accountRepository.getAccountsForGroup(accountGroup, authenticationSourceService::getAuthenticationSource); } @Override public Optional<GlobalRole> getGlobalRoleForAccountGroup(AccountGroup group) { return roleRepository.findGlobalRoleByGroup(group.id()) .flatMap(rolesService::getGlobalRole); } @Override public Collection<ProjectRoleAssociation> getProjectPermissionsForAccountGroup(AccountGroup group) { return roleRepository.findProjectRoleAssociationsByGroup( group.id(), rolesService::getProjectRoleAssociation ) .stream() // Filter by authorisation .filter(projectRoleAssociation -> securityService.isProjectFunctionGranted( projectRoleAssociation.getProjectId(), ProjectAuthorisationMgt.class )) // OK .collect(Collectors.toList()); } @Override public Collection<AccountGroup> findAccountGroupsByGlobalRole(GlobalRole globalRole) { return roleRepository.findAccountGroupsByGlobalRole(globalRole, this::getAccountGroup); } @Override public Collection<Account> findAccountsByGlobalRole(GlobalRole globalRole) { return roleRepository.findAccountsByGlobalRole(globalRole, this::getAccount); } @Override public Collection<AccountGroup> findAccountGroupsByProjectRole(Project project, ProjectRole projectRole) { return roleRepository.findAccountGroupsByProjectRole(project, projectRole, this::getAccountGroup); } @Override public Collection<Account> findAccountsByProjectRole(Project project, ProjectRole projectRole) { return roleRepository.findAccountsByProjectRole(project, projectRole, this::getAccount); } private Optional<ProjectPermission> getGroupProjectPermission(ID projectId, AccountGroup accountGroup) { Optional<ProjectRoleAssociation> roleAssociationOptional = roleRepository.findProjectRoleAssociationsByGroup( accountGroup.id(), projectId.getValue(), rolesService::getProjectRoleAssociation ); if (roleAssociationOptional.isPresent()) { return Optional.of( new ProjectPermission( projectId, accountGroup.asPermissionTarget(), roleAssociationOptional.get().getProjectRole() ) ); } else { return Optional.empty(); } } private Optional<ProjectPermission> getProjectPermission(ID projectId, Account account) { Optional<ProjectRoleAssociation> roleAssociationOptional = roleRepository.findProjectRoleAssociationsByAccount( account.id(), projectId.getValue(), rolesService::getProjectRoleAssociation ); if (roleAssociationOptional.isPresent()) { return Optional.of( new ProjectPermission( projectId, account.asPermissionTarget(), roleAssociationOptional.get().getProjectRole() ) ); } else { return Optional.empty(); } } private Optional<GlobalPermission> getGroupGlobalPermission(AccountGroup group) { Optional<String> roleId = roleRepository.findGlobalRoleByGroup(group.id()); if (roleId.isPresent()) { Optional<GlobalRole> globalRole = rolesService.getGlobalRole(roleId.get()); if (globalRole.isPresent()) { return Optional.of( new GlobalPermission( group.asPermissionTarget(), globalRole.get() ) ); } } return Optional.empty(); } private Optional<GlobalPermission> getGlobalPermission(Account account) { Optional<String> roleId = roleRepository.findGlobalRoleByAccount(account.id()); if (roleId.isPresent()) { Optional<GlobalRole> globalRole = rolesService.getGlobalRole(roleId.get()); if (globalRole.isPresent()) { return Optional.of( new GlobalPermission( account.asPermissionTarget(), globalRole.get() ) ); } } return Optional.empty(); } @Override public Account getAccount(ID accountId) { securityService.checkGlobalFunction(AccountManagement.class); return accountRepository.getAccount(accountId, authenticationSourceService::getAuthenticationSource) .withGroups(accountGroupRepository.findByAccount(accountId.getValue())); } protected AccountGroup groupWithACL(AccountGroup group) { return group // Global role .withGlobalRole( roleRepository.findGlobalRoleByGroup(group.id()).flatMap(rolesService::getGlobalRole) ) // Project roles .withProjectRoles( roleRepository.findProjectRoleAssociationsByGroup(group.id(), rolesService::getProjectRoleAssociation) ) // OK .lock(); } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.yamlinput; import java.util.List; import org.apache.commons.vfs2.FileObject; import org.pentaho.di.core.Const; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.ResultFile; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.fileinput.FileInputList; import org.pentaho.di.core.row.RowDataUtil; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaFactory; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; /** * Read YAML files, parse them and convert them to rows and writes these to one or more output streams. * * @author Samatar * @since 20-06-2007 */ public class YamlInput extends BaseStep implements StepInterface { private static Class<?> PKG = YamlInputMeta.class; // for i18n purposes, needed by Translator2!! private YamlInputMeta meta; private YamlInputData data; public YamlInput( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { super( stepMeta, stepDataInterface, copyNr, transMeta, trans ); } private void handleMissingFiles() throws KettleException { List<FileObject> nonExistantFiles = data.files.getNonExistantFiles(); if ( nonExistantFiles.size() != 0 ) { String message = FileInputList.getRequiredFilesDescription( nonExistantFiles ); logError( BaseMessages.getString( PKG, "YamlInput.Log.RequiredFilesTitle" ), BaseMessages.getString( PKG, "YamlInput.Log.RequiredFiles", message ) ); throw new KettleException( BaseMessages.getString( PKG, "YamlInput.Log.RequiredFilesMissing", message ) ); } List<FileObject> nonAccessibleFiles = data.files.getNonAccessibleFiles(); if ( nonAccessibleFiles.size() != 0 ) { String message = FileInputList.getRequiredFilesDescription( nonAccessibleFiles ); logError( BaseMessages.getString( PKG, "YamlInput.Log.RequiredFilesTitle" ), BaseMessages.getString( PKG, "YamlInput.Log.RequiredNotAccessibleFiles", message ) ); throw new KettleException( BaseMessages.getString( PKG, "YamlInput.Log.RequiredNotAccessibleFilesMissing", message ) ); } } private boolean readNextString() { try { data.readrow = getRow(); // Grab another row ... if ( data.readrow == null ) { // finished processing! if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "YamlInput.Log.FinishedProcessing" ) ); } return false; } if ( first ) { first = false; data.outputRowMeta = getInputRowMeta().clone(); // Get total previous fields data.totalPreviousFields = data.outputRowMeta.size(); data.totalOutFields = data.totalPreviousFields + data.nrInputFields; meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); // Check is Yaml field is provided if ( Utils.isEmpty( meta.getYamlField() ) ) { logError( BaseMessages.getString( PKG, "YamlInput.Log.NoField" ) ); throw new KettleException( BaseMessages.getString( PKG, "YamlInput.Log.NoField" ) ); } // cache the position of the field data.indexOfYamlField = getInputRowMeta().indexOfValue( meta.getYamlField() ); if ( data.indexOfYamlField < 0 ) { // The field is unreachable ! logError( BaseMessages.getString( PKG, "YamlInput.Log.ErrorFindingField", meta.getYamlField() ) ); throw new KettleException( BaseMessages.getString( PKG, "YamlInput.Exception.CouldnotFindField", meta .getYamlField() ) ); } } // get field value String Fieldvalue = getInputRowMeta().getString( data.readrow, data.indexOfYamlField ); getLinesInput(); if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "YamlInput.Log.YAMLStream", meta.getYamlField(), Fieldvalue ) ); } if ( meta.getIsAFile() ) { // source is a file. data.yaml = new YamlReader(); data.yaml.loadFile( KettleVFS.getFileObject( Fieldvalue, getTransMeta() ) ); addFileToResultFilesname( data.yaml.getFile() ); } else { data.yaml = new YamlReader(); data.yaml.loadString( Fieldvalue ); } } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "YamlInput.Log.UnexpectedError", e.toString() ) ); stopAll(); logError( Const.getStackTracker( e ) ); setErrors( 1 ); return false; } return true; } private void addFileToResultFilesname( FileObject file ) throws Exception { if ( meta.addResultFile() ) { // Add this to the result file names... ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, file, getTransMeta().getName(), getStepname() ); resultFile.setComment( BaseMessages.getString( PKG, "YamlInput.Log.FileAddedResult" ) ); addResultFile( resultFile ); } } private boolean openNextFile() { try { if ( data.filenr >= data.files.nrOfFiles() ) { // finished processing! if ( log.isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "YamlInput.Log.FinishedProcessing" ) ); } return false; } // Get file to process from list data.file = data.files.getFile( data.filenr ); // Move file pointer ahead! data.filenr++; if ( meta.isIgnoreEmptyFile() && data.file.getContent().getSize() == 0 ) { if ( isBasic() ) { logBasic( BaseMessages.getString( PKG, "YamlInput.Error.FileSizeZero", data.file.getName() ) ); } // Let's open the next file openNextFile(); } else { if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "YamlInput.Log.OpeningFile", data.file.toString() ) ); } // We have a file // define a Yaml reader and load file data.yaml = new YamlReader(); data.yaml.loadFile( data.file ); addFileToResultFilesname( data.file ); if ( isDetailed() ) { logDetailed( BaseMessages.getString( PKG, "YamlInput.Log.FileOpened", data.file.toString() ) ); } } } catch ( Exception e ) { logError( BaseMessages.getString( PKG, "YamlInput.Log.UnableToOpenFile", "" + data.filenr, data.file .toString(), e.toString() ) ); stopAll(); setErrors( 1 ); logError( Const.getStackTracker( e ) ); return false; } return true; } public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { if ( first && !meta.isInFields() ) { first = false; data.files = meta.getFiles( this ); if ( !meta.isdoNotFailIfNoFile() && data.files.nrOfFiles() == 0 ) { throw new KettleException( BaseMessages.getString( PKG, "YamlInput.Log.NoFiles" ) ); } handleMissingFiles(); // Create the output row meta-data data.outputRowMeta = new RowMeta(); data.totalPreviousFields = 0; data.totalOutFields = data.totalPreviousFields + data.nrInputFields; meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); data.totalOutStreamFields = data.outputRowMeta.size(); } // Grab a row Object[] r = getOneRow(); if ( r == null ) { setOutputDone(); // signal end to receiver(s) return false; // end of data or error. } if ( log.isRowLevel() ) { logRowlevel( BaseMessages.getString( PKG, "YamlInput.Log.ReadRow", data.outputRowMeta.getString( r ) ) ); } incrementLinesOutput(); data.rownr++; putRow( data.outputRowMeta, r ); // copy row to output rowset(s); if ( meta.getRowLimit() > 0 && data.rownr > meta.getRowLimit() ) { // limit has been reached: stop now. setOutputDone(); return false; } return true; } private Object[] getOneRow() throws KettleException { Object[] row = null; boolean rowAvailable = false; boolean fileOpened = false; if ( !meta.isInFields() ) { while ( data.file == null || ( data.file != null && !fileOpened && !rowAvailable ) ) { if ( data.file != null ) { // We have opened a file // read one row row = getRowData(); if ( row == null ) { // No row extracted // let's see for the next file if ( !openNextFile() ) { return null; } fileOpened = true; } else { // We had extracted one row rowAvailable = true; } } else { // First time we get there // we have to open a new file if ( !openNextFile() ) { return null; } fileOpened = true; } } } else { while ( data.readrow == null || ( data.readrow != null && !fileOpened && !rowAvailable ) ) { if ( data.readrow != null ) { // We have red the incoming Yaml value // let's get one row row = getRowData(); if ( row == null ) { // No row.. reader next row if ( !readNextString() ) { return null; } fileOpened = true; } else { // We have returned one row rowAvailable = true; } } else { // First time we get there // We have to parse incoming Yaml value if ( !readNextString() ) { return null; } fileOpened = true; } if ( data.readrow == null ) { return null; } } } if ( !rowAvailable ) { row = getRowData(); } return row; } private Object[] getRowData() throws KettleException { // Build an empty row based on the meta-data Object[] outputRowData = null; try { // Create new row... outputRowData = data.yaml.getRow( data.rowMeta ); if ( outputRowData == null ) { return null; } if ( data.readrow != null ) { outputRowData = RowDataUtil.addRowData( data.readrow, data.totalPreviousFields, outputRowData ); } else { outputRowData = RowDataUtil.resizeArray( outputRowData, data.totalOutStreamFields ); } int rowIndex = data.totalOutFields; // See if we need to add the filename to the row... if ( meta.includeFilename() && !Utils.isEmpty( meta.getFilenameField() ) ) { outputRowData[rowIndex++] = KettleVFS.getFilename( data.file ); } // See if we need to add the row number to the row... if ( meta.includeRowNumber() && !Utils.isEmpty( meta.getRowNumberField() ) ) { outputRowData[rowIndex++] = new Long( data.rownr ); } } catch ( Exception e ) { boolean sendToErrorRow = false; String errorMessage = null; if ( getStepMeta().isDoingErrorHandling() ) { sendToErrorRow = true; errorMessage = e.toString(); } else { logError( BaseMessages.getString( PKG, "YamlInput.ErrorInStepRunning", e.toString() ) ); setErrors( 1 ); stopAll(); logError( Const.getStackTracker( e ) ); setOutputDone(); // signal end to receiver(s) } if ( sendToErrorRow ) { // Simply add this row to the error row putError( getInputRowMeta(), outputRowData, 1, errorMessage, null, "YamlInput001" ); } } return outputRowData; } public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (YamlInputMeta) smi; data = (YamlInputData) sdi; if ( super.init( smi, sdi ) ) { data.rownr = 1L; data.nrInputFields = meta.getInputFields().length; data.rowMeta = new RowMeta(); for ( int i = 0; i < data.nrInputFields; i++ ) { YamlInputField field = meta.getInputFields()[i]; String path = environmentSubstitute( field.getPath() ); try { ValueMetaInterface valueMeta = ValueMetaFactory.createValueMeta( path, field.getType() ); valueMeta.setTrimType( field.getTrimType() ); data.rowMeta.addValueMeta( valueMeta ); } catch ( Exception e ) { log.logError( "Unable to create value meta", e ); return false; } } return true; } return false; } public void dispose( StepMetaInterface smi, StepDataInterface sdi ) { meta = (YamlInputMeta) smi; data = (YamlInputData) sdi; if ( data.yaml != null ) { try { data.yaml.close(); } catch ( Exception e ) { // Ignore } } if ( data.file != null ) { try { data.file.close(); } catch ( Exception e ) { // Ignore } } super.dispose( smi, sdi ); } }