gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.config.impl;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.client.config.ClientNetworkConfig;
import com.hazelcast.client.config.ClientSecurityConfig;
import com.hazelcast.client.config.ClientUserCodeDeploymentConfig;
import com.hazelcast.config.ClassFilter;
import com.hazelcast.config.DiscoveryConfig;
import com.hazelcast.config.GlobalSerializerConfig;
import com.hazelcast.config.ListenerConfig;
import com.hazelcast.config.PersistentMemoryConfig;
import com.hazelcast.config.PersistentMemoryDirectoryConfig;
import com.hazelcast.config.SerializationConfig;
import com.hazelcast.config.SerializerConfig;
import com.hazelcast.config.security.JaasAuthenticationConfig;
import com.hazelcast.config.security.RealmConfig;
import com.hazelcast.config.security.TokenIdentityConfig;
import com.hazelcast.internal.util.StringUtil;
import com.hazelcast.internal.yaml.YamlMapping;
import com.hazelcast.internal.yaml.YamlNode;
import com.hazelcast.internal.yaml.YamlScalar;
import org.w3c.dom.Node;
import java.nio.ByteOrder;
import java.util.Map;
import java.util.Properties;
import static com.hazelcast.config.security.TokenEncoding.getTokenEncoding;
import static com.hazelcast.internal.config.DomConfigHelper.childElements;
import static com.hazelcast.internal.config.DomConfigHelper.cleanNodeName;
import static com.hazelcast.internal.config.DomConfigHelper.getBooleanValue;
import static com.hazelcast.internal.config.DomConfigHelper.getIntegerValue;
import static com.hazelcast.internal.config.yaml.W3cDomUtil.getWrappedYamlMapping;
import static com.hazelcast.internal.yaml.YamlUtil.asScalar;
public class YamlClientDomConfigProcessor extends ClientDomConfigProcessor {
public YamlClientDomConfigProcessor(boolean domLevel3, ClientConfig clientConfig) {
super(domLevel3, clientConfig, new QueryCacheYamlConfigBuilderHelper());
}
public YamlClientDomConfigProcessor(boolean domLevel3, ClientConfig clientConfig, boolean strict) {
super(domLevel3, clientConfig, new QueryCacheYamlConfigBuilderHelper(strict), strict);
}
@Override
protected void handleClusterMembers(Node node, ClientNetworkConfig clientNetworkConfig) {
for (Node child : childElements(node)) {
clientNetworkConfig.addAddress(getTextContent(child));
}
}
@Override
protected void handleOutboundPorts(Node child, ClientNetworkConfig clientNetworkConfig) {
for (Node n : childElements(child)) {
String value = getTextContent(n);
clientNetworkConfig.addOutboundPortDefinition(value);
}
}
@Override
@SuppressWarnings({"checkstyle:cyclomaticcomplexity"})
protected SerializationConfig parseSerialization(final Node node) {
SerializationConfig serializationConfig = new SerializationConfig();
for (Node child : childElements(node)) {
final String name = cleanNodeName(child);
if (matches("portable-version", name)) {
serializationConfig.setPortableVersion(getIntegerValue(name, getTextContent(child)));
} else if (matches("check-class-def-errors", name)) {
serializationConfig.setCheckClassDefErrors(getBooleanValue(getTextContent(child)));
} else if (matches("use-native-byte-order", name)) {
serializationConfig.setUseNativeByteOrder(getBooleanValue(getTextContent(child)));
} else if (matches("byte-order", name)) {
ByteOrder byteOrder = null;
if (ByteOrder.BIG_ENDIAN.toString().equals(getTextContent(child))) {
byteOrder = ByteOrder.BIG_ENDIAN;
} else if (ByteOrder.LITTLE_ENDIAN.toString().equals(getTextContent(child))) {
byteOrder = ByteOrder.LITTLE_ENDIAN;
}
serializationConfig.setByteOrder(byteOrder != null ? byteOrder : ByteOrder.BIG_ENDIAN);
} else if (matches("enable-compression", name)) {
serializationConfig.setEnableCompression(getBooleanValue(getTextContent(child)));
} else if (matches("enable-shared-object", name)) {
serializationConfig.setEnableSharedObject(getBooleanValue(getTextContent(child)));
} else if (matches("allow-unsafe", name)) {
serializationConfig.setAllowUnsafe(getBooleanValue(getTextContent(child)));
} else if (matches("allow-override-default-serializers", name)) {
serializationConfig.setAllowOverrideDefaultSerializers(getBooleanValue(getTextContent(child)));
} else if (matches("data-serializable-factories", name)) {
fillDataSerializableFactories(child, serializationConfig);
} else if (matches("portable-factories", name)) {
fillPortableFactories(child, serializationConfig);
} else if (matches("serializers", name)) {
fillSerializers(child, serializationConfig);
} else if (matches("global-serializer", name)) {
fillGlobalSerializer(child, serializationConfig);
} else if (matches("java-serialization-filter", name)) {
fillJavaSerializationFilter(child, serializationConfig);
}
}
return serializationConfig;
}
@Override
protected String parseCustomLoadBalancerClassName(Node node) {
return getAttribute(node, "class-name");
}
private void fillGlobalSerializer(Node child, SerializationConfig serializationConfig) {
GlobalSerializerConfig globalSerializerConfig = new GlobalSerializerConfig();
String attrClassName = getAttribute(child, "class-name");
String attrOverrideJavaSerialization = getAttribute(child, "override-java-serialization");
boolean overrideJavaSerialization =
attrOverrideJavaSerialization != null && getBooleanValue(attrOverrideJavaSerialization.trim());
globalSerializerConfig.setClassName(attrClassName);
globalSerializerConfig.setOverrideJavaSerialization(overrideJavaSerialization);
serializationConfig.setGlobalSerializerConfig(globalSerializerConfig);
}
@Override
protected void fillSerializers(Node node, SerializationConfig serializationConfig) {
for (Node child : childElements(node)) {
SerializerConfig serializerConfig = new SerializerConfig();
final String typeClassName = getAttribute(child, "type-class");
final String className = getAttribute(child, "class-name");
serializerConfig.setTypeClassName(typeClassName);
serializerConfig.setClassName(className);
serializationConfig.addSerializerConfig(serializerConfig);
}
}
@Override
protected void fillDataSerializableFactories(Node node, SerializationConfig serializationConfig) {
for (Node child : childElements(node)) {
final Node factoryIdNode = getNamedItemNode(child, "factory-id");
final Node classNameNode = getNamedItemNode(child, "class-name");
if (factoryIdNode == null) {
throw new IllegalArgumentException(
"'factory-id' attribute of 'data-serializable-factory' is required!");
}
if (classNameNode == null) {
throw new IllegalArgumentException(
"'class-name' attribute of 'data-serializable-factory' is required!");
}
int factoryId = Integer.parseInt(getTextContent(factoryIdNode));
String className = getTextContent(classNameNode);
serializationConfig.addDataSerializableFactoryClass(factoryId, className);
}
}
@Override
protected void fillPortableFactories(Node node, SerializationConfig serializationConfig) {
for (Node child : childElements(node)) {
final Node factoryIdNode = getNamedItemNode(child, "factory-id");
final Node classNameNode = getNamedItemNode(child, "class-name");
if (factoryIdNode == null) {
throw new IllegalArgumentException("'factory-id' attribute of 'portable-factory' is required!");
}
if (classNameNode == null) {
throw new IllegalArgumentException("'class-name' attribute of 'portable-factory' is required!");
}
int factoryId = Integer.parseInt(getTextContent(factoryIdNode));
String className = getTextContent(classNameNode);
serializationConfig.addPortableFactoryClass(factoryId, className);
}
}
@Override
protected ClassFilter parseClassFilterList(Node node) {
ClassFilter list = new ClassFilter();
for (Node typeNode : childElements(node)) {
final String name = cleanNodeName(typeNode);
if (matches("class", name)) {
for (Node classNode : childElements(typeNode)) {
list.addClasses(getTextContent(classNode));
}
} else if (matches("package", name)) {
for (Node packageNode : childElements(typeNode)) {
list.addPackages(getTextContent(packageNode));
}
} else if (matches("prefix", name)) {
for (Node prefixNode : childElements(typeNode)) {
list.addPrefixes(getTextContent(prefixNode));
}
}
}
return list;
}
@Override
protected void handleUserCodeDeploymentNode(ClientUserCodeDeploymentConfig userCodeDeploymentConfig, Node child) {
String childNodeName = cleanNodeName(child);
if (matches("classnames", childNodeName)) {
for (Node classNameNode : childElements(child)) {
userCodeDeploymentConfig.addClass(getTextContent(classNameNode));
}
} else if (matches("jarpaths", childNodeName)) {
for (Node jarPathNode : childElements(child)) {
userCodeDeploymentConfig.addJar(getTextContent(jarPathNode));
}
}
}
@Override
protected void handleListeners(Node node) {
for (Node child : childElements(node)) {
String className = getTextContent(child);
clientConfig.addListenerConfig(new ListenerConfig(className));
}
}
@Override
protected void handleNearCache(Node node) {
for (Node child : childElements(node)) {
handleNearCacheNode(child);
}
}
@Override
protected void handleReliableTopic(Node node) {
for (Node child : childElements(node)) {
handleReliableTopicNode(child);
}
}
@Override
protected void handleFlakeIdGenerator(Node node) {
for (Node child : childElements(node)) {
handleFlakeIdGeneratorNode(child);
}
}
@Override
protected void handleProxyFactoryNode(Node child) {
handleProxyFactory(child);
}
@Override
protected String getName(Node node) {
return node.getNodeName();
}
@Override
protected void fillProperties(Node node, Map<String, Comparable> properties) {
YamlMapping propertiesMapping = getWrappedYamlMapping(node);
for (YamlNode propNode : propertiesMapping.children()) {
YamlScalar propScalar = asScalar(propNode);
String key = propScalar.nodeName();
String value = propScalar.nodeValue().toString();
properties.put(key, value);
}
}
@Override
protected void fillProperties(Node node, Properties properties) {
YamlMapping propertiesMapping = getWrappedYamlMapping(node);
for (YamlNode propNode : propertiesMapping.children()) {
YamlScalar propScalar = asScalar(propNode);
String key = propScalar.nodeName();
String value = propScalar.nodeValue().toString();
properties.put(key, value);
}
}
@Override
protected void handleDiscoveryStrategies(Node node, ClientNetworkConfig clientNetworkConfig) {
DiscoveryConfig discoveryConfig = clientNetworkConfig.getDiscoveryConfig();
for (Node child : childElements(node)) {
String name = cleanNodeName(child);
if (matches("discovery-strategies", name)) {
handleDiscoveryStrategiesNode(child, discoveryConfig);
} else if (matches("node-filter", name)) {
handleDiscoveryNodeFilter(child, discoveryConfig);
}
}
}
private void handleDiscoveryStrategiesNode(Node node, DiscoveryConfig discoveryConfig) {
for (Node child : childElements(node)) {
handleDiscoveryStrategy(child, discoveryConfig);
}
}
@Override
protected void handleTokenIdentity(ClientSecurityConfig clientSecurityConfig, Node node) {
clientSecurityConfig.setTokenIdentityConfig(new TokenIdentityConfig(
getTokenEncoding(getAttribute(node, "encoding")), getAttribute(node, "value")));
}
@Override
protected void handleRealms(Node node, ClientSecurityConfig clientSecurityConfig) {
for (Node child : childElements(node)) {
handleRealm(child, clientSecurityConfig);
}
}
@Override
protected void handleJaasAuthentication(RealmConfig realmConfig, Node node) {
JaasAuthenticationConfig jaasAuthenticationConfig = new JaasAuthenticationConfig();
for (Node child : childElements(node)) {
jaasAuthenticationConfig.addLoginModuleConfig(handleLoginModule(child));
}
realmConfig.setJaasAuthenticationConfig(jaasAuthenticationConfig);
}
@Override
protected void handlePersistentMemoryDirectory(PersistentMemoryConfig persistentMemoryConfig, Node dirNode) {
String directory = getTextContent(getNamedItemNode(dirNode, "directory"));
String numaNodeIdStr = getTextContent(getNamedItemNode(dirNode, "numa-node"));
if (!StringUtil.isNullOrEmptyAfterTrim(numaNodeIdStr)) {
int numaNodeId = getIntegerValue("numa-node", numaNodeIdStr);
persistentMemoryConfig.addDirectoryConfig(new PersistentMemoryDirectoryConfig(directory, numaNodeId));
} else {
persistentMemoryConfig.addDirectoryConfig(new PersistentMemoryDirectoryConfig(directory));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.data.conversion;
import org.apache.flink.annotation.Internal;
import org.apache.flink.table.api.TableException;
import org.apache.flink.table.catalog.DataTypeFactory;
import org.apache.flink.table.data.GenericRowData;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.runtime.generated.CompileUtils;
import org.apache.flink.table.types.DataType;
import org.apache.flink.table.types.logical.StructuredType;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.IntStream;
import static org.apache.flink.table.types.extraction.ExtractionUtils.getStructuredField;
import static org.apache.flink.table.types.extraction.ExtractionUtils.getStructuredFieldGetter;
import static org.apache.flink.table.types.extraction.ExtractionUtils.getStructuredFieldSetter;
import static org.apache.flink.table.types.extraction.ExtractionUtils.hasInvokableConstructor;
import static org.apache.flink.table.types.extraction.ExtractionUtils.isStructuredFieldDirectlyReadable;
import static org.apache.flink.table.types.extraction.ExtractionUtils.isStructuredFieldDirectlyWritable;
import static org.apache.flink.table.types.extraction.ExtractionUtils.primitiveToWrapper;
import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getFieldNames;
/** Converter for {@link StructuredType} of its implementation class. */
@Internal
@SuppressWarnings("unchecked")
public class StructuredObjectConverter<T> implements DataStructureConverter<RowData, T> {
private static final long serialVersionUID = 1L;
private final DataStructureConverter<Object, Object>[] fieldConverters;
private final RowData.FieldGetter[] fieldGetters;
private final String generatedName;
private final String generatedCode;
private transient DataStructureConverter<RowData, T> generatedConverter;
private StructuredObjectConverter(
DataStructureConverter<Object, Object>[] fieldConverters,
RowData.FieldGetter[] fieldGetters,
String generatedName,
String generatedCode) {
this.fieldConverters = fieldConverters;
this.fieldGetters = fieldGetters;
this.generatedName = generatedName;
this.generatedCode = generatedCode;
}
@Override
public void open(ClassLoader classLoader) {
for (DataStructureConverter<Object, Object> fieldConverter : fieldConverters) {
fieldConverter.open(classLoader);
}
try {
final Class<?> compiledConverter =
CompileUtils.compile(classLoader, generatedName, generatedCode);
generatedConverter =
(DataStructureConverter<RowData, T>)
compiledConverter
.getConstructor(
RowData.FieldGetter[].class,
DataStructureConverter[].class)
.newInstance(fieldGetters, fieldConverters);
} catch (Throwable t) {
throw new TableException("Error while generating structured type converter.", t);
}
generatedConverter.open(classLoader);
}
@Override
public RowData toInternal(T external) {
return generatedConverter.toInternal(external);
}
@Override
public T toExternal(RowData internal) {
return generatedConverter.toExternal(internal);
}
// --------------------------------------------------------------------------------------------
// Factory method
// --------------------------------------------------------------------------------------------
private static final AtomicInteger nextUniqueClassId = new AtomicInteger();
public static StructuredObjectConverter<?> create(DataType dataType) {
try {
return createOrError(dataType);
} catch (Throwable t) {
throw new TableException(
String.format("Could not create converter for structured type '%s'.", dataType),
t);
}
}
/**
* Creates a {@link DataStructureConverter} for the given structured type.
*
* <p>Note: We do not perform validation if data type and structured type implementation match.
* This must have been done earlier in the {@link DataTypeFactory}.
*/
@SuppressWarnings("RedundantCast")
private static StructuredObjectConverter<?> createOrError(DataType dataType) {
final List<DataType> fields = dataType.getChildren();
final DataStructureConverter<Object, Object>[] fieldConverters =
fields.stream()
.map(
dt ->
(DataStructureConverter<Object, Object>)
DataStructureConverters.getConverter(dt))
.toArray(DataStructureConverter[]::new);
final RowData.FieldGetter[] fieldGetters =
IntStream.range(0, fields.size())
.mapToObj(
pos ->
RowData.createFieldGetter(
fields.get(pos).getLogicalType(), pos))
.toArray(RowData.FieldGetter[]::new);
final Class<?>[] fieldClasses =
fields.stream().map(DataType::getConversionClass).toArray(Class[]::new);
final StructuredType structuredType = (StructuredType) dataType.getLogicalType();
final Class<?> implementationClass =
structuredType.getImplementationClass().orElseThrow(IllegalStateException::new);
final int uniqueClassId = nextUniqueClassId.getAndIncrement();
final String converterName =
String.format(
"%s$%s$Converter",
implementationClass.getName().replace('.', '$'), uniqueClassId);
final String converterCode =
generateCode(
converterName,
implementationClass,
getFieldNames(structuredType).toArray(new String[0]),
fieldClasses);
return new StructuredObjectConverter<>(
fieldConverters, fieldGetters, converterName, converterCode);
}
private static String generateCode(
String converterName, Class<?> clazz, String[] fieldNames, Class<?>[] fieldClasses) {
final int fieldCount = fieldClasses.length;
final StringBuilder sb = new StringBuilder();
// we ignore checkstyle here for readability and preserving indention
line(
sb,
"public class ",
converterName,
" implements ",
DataStructureConverter.class,
" {");
line(sb, " private final ", RowData.FieldGetter.class, "[] fieldGetters;");
line(sb, " private final ", DataStructureConverter.class, "[] fieldConverters;");
line(
sb,
" public ",
converterName,
"(",
RowData.FieldGetter.class,
"[] fieldGetters, ",
DataStructureConverter.class,
"[] fieldConverters) {");
line(sb, " this.fieldGetters = fieldGetters;");
line(sb, " this.fieldConverters = fieldConverters;");
line(sb, " }");
line(sb, " public ", Object.class, " toInternal(", Object.class, " o) {");
line(sb, " final ", clazz, " external = (", clazz, ") o;");
line(
sb,
" final ",
GenericRowData.class,
" genericRow = new ",
GenericRowData.class,
"(",
fieldCount,
");");
for (int pos = 0; pos < fieldCount; pos++) {
line(sb, " ", getterExpr(clazz, pos, fieldNames[pos], fieldClasses[pos]), ";");
}
line(sb, " return genericRow;");
line(sb, " }");
line(sb, " public ", Object.class, " toExternal(", Object.class, " o) {");
line(sb, " final ", RowData.class, " internal = (", RowData.class, ") o;");
if (hasInvokableConstructor(clazz, fieldClasses)) {
line(sb, " final ", clazz, " structured = new ", clazz, "(");
for (int pos = 0; pos < fieldCount; pos++) {
line(
sb,
" ",
parameterExpr(pos, fieldClasses[pos]),
(pos < fieldCount - 1) ? ", " : "");
}
line(sb, " );");
} else {
line(sb, " final ", clazz, " structured = new ", clazz, "();");
for (int pos = 0; pos < fieldCount; pos++) {
line(sb, " ", setterExpr(clazz, pos, fieldNames[pos]), ";");
}
}
line(sb, " return structured;");
line(sb, " }");
line(sb, "}");
return sb.toString();
}
private static String getterExpr(
Class<?> implementationClass, int pos, String fieldName, Class<?> fieldClass) {
final Field field = getStructuredField(implementationClass, fieldName);
String accessExpr;
if (isStructuredFieldDirectlyReadable(field)) {
// field is accessible without getter
accessExpr = expr("external.", field.getName());
} else {
// field is accessible with a getter
final Method getter =
getStructuredFieldGetter(implementationClass, field)
.orElseThrow(
() ->
fieldNotReadableException(
implementationClass, fieldName));
accessExpr = expr("external.", getter.getName(), "()");
}
accessExpr = castExpr(accessExpr, fieldClass);
return expr(
"genericRow.setField(",
pos,
", fieldConverters[",
pos,
"].toInternalOrNull(",
accessExpr,
"))");
}
private static IllegalStateException fieldNotReadableException(
Class<?> implementationClass, String fieldName) {
return new IllegalStateException(
String.format(
"Could not find a getter for field '%s' in class '%s'. "
+ "Make sure that the field is readable (via public visibility or getter).",
fieldName, implementationClass.getName()));
}
private static IllegalStateException fieldNotWritableException(
Class<?> implementationClass, String fieldName) {
return new IllegalStateException(
String.format(
"Could not find a setter for field '%s' in class '%s'. "
+ "Make sure that the field is writable (via public visibility, "
+ "setter, or full constructor).",
fieldName, implementationClass.getName()));
}
private static String parameterExpr(int pos, Class<?> fieldClass) {
final String conversionExpr =
expr(
"fieldConverters[",
pos,
"].toExternalOrNull(fieldGetters[",
pos,
"].getFieldOrNull(internal))");
return castExpr(conversionExpr, fieldClass);
}
private static String setterExpr(Class<?> implementationClass, int pos, String fieldName) {
final Field field = getStructuredField(implementationClass, fieldName);
final String conversionExpr =
expr(
"fieldConverters[",
pos,
"].toExternalOrNull(fieldGetters[",
pos,
"].getFieldOrNull(internal))");
if (isStructuredFieldDirectlyWritable(field)) {
// field is accessible without setter
return expr(
"structured.",
field.getName(),
" = ",
castExpr(conversionExpr, field.getType()));
} else {
// field is accessible with a setter
final Method setter =
getStructuredFieldSetter(implementationClass, field)
.orElseThrow(
() ->
fieldNotWritableException(
implementationClass, fieldName));
return expr(
"structured.",
setter.getName(),
"(",
castExpr(conversionExpr, setter.getParameterTypes()[0]),
")");
}
}
private static String castExpr(String expr, Class<?> clazz) {
// help Janino to box primitive types and fix missing generics
return expr("((", primitiveToWrapper(clazz), ") ", expr, ")");
}
private static String expr(Object... parts) {
final StringBuilder sb = new StringBuilder();
for (Object part : parts) {
if (part instanceof Class) {
sb.append(((Class<?>) part).getCanonicalName());
} else {
sb.append(part);
}
}
return sb.toString();
}
private static void line(StringBuilder sb, Object... parts) {
for (Object part : parts) {
if (part instanceof Class) {
sb.append(((Class<?>) part).getCanonicalName());
} else {
sb.append(part);
}
}
sb.append("\n");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.rel.hint;
import org.apache.calcite.plan.RelOptRule;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.convert.ConverterRule;
import org.apache.calcite.util.Litmus;
import org.apache.calcite.util.trace.CalciteTrace;
import com.google.common.collect.ImmutableMap;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.slf4j.Logger;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static java.util.Objects.requireNonNull;
/**
* A collection of {@link HintStrategy}s.
*
* <p>Every hint must register a {@link HintStrategy} into the collection.
* With a hint strategies mapping, the hint strategy table is used as a tool
* to decide i) if the given hint was registered; ii) which hints are suitable for the rel with
* a given hints collection; iii) if the hint options are valid.
*
* <p>The hint strategy table is immutable. To create one, use
* {@link #builder()}.
*
* <p>Match of hint name is case insensitive.
*
* @see HintPredicate
*/
public class HintStrategyTable {
//~ Static fields/initializers ---------------------------------------------
/** Empty strategies. */
public static final HintStrategyTable EMPTY =
new HintStrategyTable(ImmutableMap.of(), HintErrorLogger.INSTANCE);
//~ Instance fields --------------------------------------------------------
/** Mapping from hint name to {@link HintStrategy}. */
private final Map<Key, HintStrategy> strategies;
/** Handler for the hint error. */
private final Litmus errorHandler;
private HintStrategyTable(Map<Key, HintStrategy> strategies, Litmus litmus) {
this.strategies = ImmutableMap.copyOf(strategies);
this.errorHandler = litmus;
}
//~ Methods ----------------------------------------------------------------
/**
* Applies this {@link HintStrategyTable} hint strategies to the given relational
* expression and the {@code hints}.
*
* @param hints Hints that may attach to the {@code rel}
* @param rel Relational expression
* @return A hint list that can be attached to the {@code rel}
*/
public List<RelHint> apply(List<RelHint> hints, RelNode rel) {
return hints.stream()
.filter(relHint -> canApply(relHint, rel))
.collect(Collectors.toList());
}
private boolean canApply(RelHint hint, RelNode rel) {
final Key key = Key.of(hint.hintName);
assert this.strategies.containsKey(key) : "hint " + hint.hintName + " must be present";
return this.strategies.get(key).predicate.apply(hint, rel);
}
/**
* Checks if the given hint is valid.
*
* @param hint The hint
*/
public boolean validateHint(RelHint hint) {
final Key key = Key.of(hint.hintName);
boolean hintExists = this.errorHandler.check(
this.strategies.containsKey(key),
"Hint: {} should be registered in the {}",
hint.hintName,
this.getClass().getSimpleName());
if (!hintExists) {
return false;
}
final HintStrategy strategy = strategies.get(key);
if (strategy != null && strategy.hintOptionChecker != null) {
return strategy.hintOptionChecker.checkOptions(hint, this.errorHandler);
}
return true;
}
/** Returns whether the {@code hintable} has hints that imply
* the given {@code rule} should be excluded. */
public boolean isRuleExcluded(Hintable hintable, RelOptRule rule) {
final List<RelHint> hints = hintable.getHints();
if (hints.size() == 0) {
return false;
}
for (RelHint hint : hints) {
final Key key = Key.of(hint.hintName);
assert this.strategies.containsKey(key) : "hint " + hint.hintName + " must be present";
final HintStrategy strategy = strategies.get(key);
if (strategy.excludedRules.contains(rule)) {
return isDesiredConversionPossible(strategy.converterRules, hintable);
}
}
return false;
}
/** Returns whether the {@code hintable} has hints that imply
* the given {@code hintable} can make conversion successfully. */
private static boolean isDesiredConversionPossible(
Set<ConverterRule> converterRules,
Hintable hintable) {
// If no converter rules are specified, we assume the conversion is possible.
return converterRules.size() == 0
|| converterRules.stream()
.anyMatch(converterRule -> converterRule.convert((RelNode) hintable) != null);
}
/**
* Returns a {@code HintStrategyTable} builder.
*/
public static Builder builder() {
return new Builder();
}
//~ Inner Class ------------------------------------------------------------
/**
* Key used to keep the strategies which ignores the case sensitivity.
*/
private static class Key {
private final String name;
private Key(String name) {
this.name = name;
}
static Key of(String name) {
return new Key(name.toLowerCase(Locale.ROOT));
}
@Override public boolean equals(@Nullable Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Key key = (Key) o;
return name.equals(key.name);
}
@Override public int hashCode() {
return this.name.hashCode();
}
}
/**
* Builder for {@code HintStrategyTable}.
*/
public static class Builder {
private final Map<Key, HintStrategy> strategies = new HashMap<>();
private Litmus errorHandler = HintErrorLogger.INSTANCE;
public Builder hintStrategy(String hintName, HintPredicate strategy) {
this.strategies.put(Key.of(hintName),
HintStrategy.builder(requireNonNull(strategy, "HintPredicate")).build());
return this;
}
public Builder hintStrategy(String hintName, HintStrategy entry) {
this.strategies.put(Key.of(hintName), requireNonNull(entry, "HintStrategy"));
return this;
}
/**
* Sets an error handler to customize the hints error handling.
*
* <p>The default behavior is to log warnings.
*
* @param errorHandler The handler
*/
public Builder errorHandler(Litmus errorHandler) {
this.errorHandler = errorHandler;
return this;
}
public HintStrategyTable build() {
return new HintStrategyTable(
this.strategies,
this.errorHandler);
}
}
/** Implementation of {@link org.apache.calcite.util.Litmus} that returns
* a status code, it logs warnings for fail check and does not throw. */
public static class HintErrorLogger implements Litmus {
private static final Logger LOGGER = CalciteTrace.PARSER_LOGGER;
public static final HintErrorLogger INSTANCE = new HintErrorLogger();
@Override public boolean fail(@Nullable String message, @Nullable Object... args) {
LOGGER.warn(requireNonNull(message, "message"), args);
return false;
}
@Override public boolean succeed() {
return true;
}
@Override public boolean check(boolean condition, @Nullable String message,
@Nullable Object... args) {
if (condition) {
return succeed();
} else {
return fail(message, args);
}
}
}
}
| |
package org.zstack.network.service.virtualrouter;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Configurable;
import org.zstack.appliancevm.*;
import org.zstack.appliancevm.ApplianceVmConstant.Params;
import org.zstack.core.cloudbus.CloudBusCallBack;
import org.zstack.core.db.Q;
import org.zstack.core.db.SimpleQuery;
import org.zstack.core.db.SimpleQuery.Op;
import org.zstack.core.defer.Deferred;
import org.zstack.core.errorcode.ErrorFacade;
import org.zstack.core.thread.ChainTask;
import org.zstack.core.thread.SyncTaskChain;
import org.zstack.core.timeout.ApiTimeoutManager;
import org.zstack.core.workflow.FlowChainBuilder;
import org.zstack.header.core.Completion;
import org.zstack.header.core.NoErrorCompletion;
import org.zstack.header.core.workflow.*;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.errorcode.OperationFailureException;
import org.zstack.header.errorcode.SysErrors;
import org.zstack.header.message.APIMessage;
import org.zstack.header.message.Message;
import org.zstack.header.message.MessageReply;
import org.zstack.header.network.l2.L2NetworkGetVniExtensionPoint;
import org.zstack.header.network.l2.L2NetworkVO;
import org.zstack.header.network.l2.L2NetworkVO_;
import org.zstack.header.network.l3.*;
import org.zstack.header.network.service.VirtualRouterAfterAttachNicExtensionPoint;
import org.zstack.header.network.service.VirtualRouterAfterDetachNicExtensionPoint;
import org.zstack.header.network.service.VirtualRouterBeforeDetachNicExtensionPoint;
import org.zstack.header.rest.JsonAsyncRESTCallback;
import org.zstack.header.rest.RESTFacade;
import org.zstack.header.vm.*;
import org.zstack.network.service.virtualrouter.VirtualRouterCommands.PingCmd;
import org.zstack.network.service.virtualrouter.VirtualRouterCommands.PingRsp;
import org.zstack.network.service.virtualrouter.VirtualRouterConstant.Param;
import org.zstack.network.service.virtualrouter.ha.VirtualRouterHaBackend;
import java.util.*;
import static org.zstack.core.Platform.operr;
import static org.zstack.core.Platform.inerr;
import static org.zstack.network.service.virtualrouter.VirtualRouterNicMetaData.ADDITIONAL_PUBLIC_NIC_MASK;
import static org.zstack.network.service.virtualrouter.VirtualRouterNicMetaData.GUEST_NIC_MASK;
/**
*/
@Configurable(preConstruction = true, autowire = Autowire.BY_TYPE)
public class VirtualRouter extends ApplianceVmBase {
static {
allowedOperations.addState(VmInstanceState.Running, APIReconnectVirtualRouterMsg.class.getName());
allowedOperations.addState(VmInstanceState.Running, ReconnectVirtualRouterVmMsg.class.getName());
}
@Autowired
protected VirtualRouterManager vrMgr;
@Autowired
protected RESTFacade restf;
@Autowired
protected ErrorFacade errf;
@Autowired
protected ApiTimeoutManager apiTimeoutManager;
@Autowired
protected VirtualRouterHaBackend haBackend;
protected VirtualRouterVmInventory vr;
public VirtualRouter(ApplianceVmVO vo) {
super(vo);
}
public VirtualRouter(VirtualRouterVmVO vo) {
super(vo);
vr = new VirtualRouterVmInventory(vo);
}
@Override
protected VmInstanceInventory getSelfInventory() {
return VirtualRouterVmInventory.valueOf(getSelf());
}
@Override
protected List<Flow> getPostCreateFlows() {
return vrMgr.getPostCreateFlows();
}
@Override
protected List<Flow> getPostStartFlows() {
return vrMgr.getPostStartFlows();
}
@Override
protected List<Flow> getPostStopFlows() {
return vrMgr.getPostStopFlows();
}
@Override
protected List<Flow> getPostRebootFlows() {
return vrMgr.getPostRebootFlows();
}
@Override
protected List<Flow> getPostDestroyFlows() {
return vrMgr.getPostDestroyFlows();
}
@Override
protected List<Flow> getPostMigrateFlows() {
return vrMgr.getPostMigrateFlows();
}
protected FlowChain getReconnectChain() {
return vrMgr.getReconnectFlowChain();
}
@Override
protected void handleApiMessage(APIMessage msg) {
if (msg instanceof APIReconnectVirtualRouterMsg) {
handle((APIReconnectVirtualRouterMsg) msg);
} else {
super.handleApiMessage(msg);
}
}
@Override
protected void handleLocalMessage(Message msg) {
if (msg instanceof VirtualRouterAsyncHttpCallMsg) {
handle((VirtualRouterAsyncHttpCallMsg) msg);
} else if (msg instanceof ReconnectVirtualRouterVmMsg) {
handle((ReconnectVirtualRouterVmMsg) msg);
} else if (msg instanceof PingVirtualRouterVmMsg) {
handle((PingVirtualRouterVmMsg) msg);
} else {
super.handleLocalMessage(msg);
}
}
private void handle(final PingVirtualRouterVmMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return syncThreadName;
}
@Override
public void run(final SyncTaskChain chain) {
final PingVirtualRouterVmReply reply = new PingVirtualRouterVmReply();
if (VmInstanceState.Running != self.getState() || ApplianceVmStatus.Connecting == getSelf().getStatus()) {
reply.setDoReconnect(false);
bus.reply(msg, reply);
chain.next();
return;
}
PingCmd cmd = new PingCmd();
cmd.setUuid(self.getUuid());
restf.asyncJsonPost(buildUrl(vr.getManagementNic().getIp(), VirtualRouterConstant.VR_PING), cmd, new JsonAsyncRESTCallback<PingRsp>(msg, chain) {
@Override
public void fail(ErrorCode err) {
reply.setDoReconnect(true);
reply.setConnected(false);
logger.warn(String.format("failed to ping the virtual router vm[uuid:%s], %s. We will reconnect it soon", self.getUuid(), reply.getError()));
bus.reply(msg, reply);
chain.next();
}
@Override
public void success(PingRsp ret) {
reply.setDoReconnect(true);
if (!ret.isSuccess()) {
logger.warn(String.format("failed to ping the virtual router vm[uuid:%s], %s. We will reconnect it soon", self.getUuid(), ret.getError()));
reply.setConnected(false);
} else {
boolean connected = self.getUuid().equals(ret.getUuid());
if (!connected) {
logger.warn(String.format("a signature lost on the virtual router vm[uuid:%s] changed, it's probably caused by the agent restart. We will issue a reconnect soon", self.getUuid()));
} else {
connected = ApplianceVmStatus.Connected == getSelf().getStatus();
}
reply.setConnected(connected);
reply.setHaStatus(ret.getHaStatus());
if ((!ret.getHealthy()) && (ret.getHealthDetail() != null)) {
fireServiceUnhealthyCanonicalEvent(inerr("virtual router %s unhealthy, detail %s", getSelf().getUuid(), ret.getHealthDetail()));
} else {
fireServicehealthyCanonicalEvent();
}
}
bus.reply(msg, reply);
chain.next();
}
@Override
public Class<PingRsp> getReturnClass() {
return PingRsp.class;
}
});
}
@Override
public String getName() {
return "ping-virtual-router";
}
});
}
private void handle(final ReconnectVirtualRouterVmMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return syncThreadName;
}
@Override
public void run(final SyncTaskChain chain) {
final ReconnectVirtualRouterVmReply reply = new ReconnectVirtualRouterVmReply();
refreshVO();
ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR);
if (allowed != null) {
reply.setError(allowed);
bus.reply(msg, reply);
chain.next();
return;
}
reconnect(new Completion(msg, chain) {
@Override
public void success() {
bus.reply(msg, reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
chain.next();
}
});
}
@Override
public String getName() {
return String.format("reconnect-virtual-router-%s", self.getUuid());
}
});
}
protected String buildUrl(String mgmtIp, String path) {
return vrMgr.buildUrl(mgmtIp, path);
}
private void handle(final VirtualRouterAsyncHttpCallMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return String.format("%s-commands", syncThreadName);
}
@Override
public void run(final SyncTaskChain chain) {
refreshVO();
final VirtualRouterAsyncHttpCallReply reply = new VirtualRouterAsyncHttpCallReply();
if (msg.isCheckStatus() && getSelf().getState() != VmInstanceState.Running) {
throw new OperationFailureException(operr("the virtual router[name:%s, uuid:%s, current state:%s] is not running," +
"and cannot perform required operation. Please retry your operation later once it is running", self.getName(), self.getUuid(), self.getState()));
}
if (msg.isCheckStatus() && getSelf().getStatus() != ApplianceVmStatus.Connected) {
throw new OperationFailureException(operr("virtual router[uuid:%s] is in status of %s that cannot make http call to %s",
self.getUuid(), getSelf().getStatus(), msg.getPath()));
}
if (vr.getManagementNic() == null) {
throw new OperationFailureException(operr("virtual router[uuid:%s] has no management nic that cannot make http call to %s",
self.getUuid(), msg.getPath()));
}
restf.asyncJsonPost(buildUrl(vr.getManagementNic().getIp(), msg.getPath()), msg.getCommand(), new JsonAsyncRESTCallback<LinkedHashMap>(msg, chain) {
@Override
public void fail(ErrorCode err) {
reply.setError(err);
bus.reply(msg, reply);
chain.next();
}
@Override
public void success(LinkedHashMap ret) {
reply.setResponse(ret);
bus.reply(msg, reply);
chain.next();
}
@Override
public Class<LinkedHashMap> getReturnClass() {
return LinkedHashMap.class;
}
});
}
@Override
protected int getSyncLevel() {
return vrMgr.getParallelismDegree(self.getUuid());
}
@Override
public String getName() {
return getSyncSignature();
}
});
}
private void handle(final APIReconnectVirtualRouterMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return syncThreadName;
}
@Override
public void run(final SyncTaskChain chain) {
final APIReconnectVirtualRouterEvent evt = new APIReconnectVirtualRouterEvent(msg.getId());
refreshVO();
ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR);
if (allowed != null) {
evt.setError(allowed);
bus.publish(evt);
chain.next();
return;
}
reconnect(new Completion(msg, chain) {
@Override
public void success() {
evt.setInventory((ApplianceVmInventory) getSelfInventory());
bus.publish(evt);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
chain.next();
}
});
}
@Override
public String getName() {
return String.format("reconnect-virtual-router-%s", self.getUuid());
}
});
}
private void reconnect(final Completion completion) {
ApplianceVmStatus oldStatus = getSelf().getStatus();
FlowChain chain = getReconnectChain();
chain.setName(String.format("reconnect-virtual-router-%s", self.getUuid()));
chain.getData().put(VirtualRouterConstant.Param.VR.toString(), vr);
chain.getData().put(Param.IS_RECONNECT.toString(), Boolean.TRUE.toString());
chain.getData().put(Params.isReconnect.toString(), Boolean.TRUE.toString());
chain.getData().put(Params.managementNicIp.toString(), vr.getManagementNic().getIp());
chain.getData().put(Params.applianceVmUuid.toString(), self.getUuid());
SimpleQuery<ApplianceVmFirewallRuleVO> q = dbf.createQuery(ApplianceVmFirewallRuleVO.class);
q.add(ApplianceVmFirewallRuleVO_.applianceVmUuid, Op.EQ, getSelf().getUuid());
List<ApplianceVmFirewallRuleVO> vos = q.list();
List<ApplianceVmFirewallRuleInventory> rules = ApplianceVmFirewallRuleInventory.valueOf(vos);
chain.getData().put(ApplianceVmConstant.Params.applianceVmFirewallRules.toString(), rules);
chain.insert(new Flow() {
String __name__ = "change-appliancevm-status-to-connecting";
@Override
public void run(FlowTrigger trigger, Map data) {
changeApplianceVmStatus(ApplianceVmStatus.Connecting);
trigger.next();
}
@Override
public void rollback(FlowRollback trigger, Map data) {
changeApplianceVmStatus(ApplianceVmStatus.Disconnected);
fireDisconnectedCanonicalEvent(operr("appliance vm %s reconnect failed",
getSelf().getUuid()));
trigger.rollback();
}
}).then(new NoRollbackFlow() {
String __name__ = "change-appliancevm-status-to-connected";
@Override
public void run(FlowTrigger trigger, Map data) {
changeApplianceVmStatus(ApplianceVmStatus.Connected);
trigger.next();
}
}).done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
self = dbf.reload(self);
completion.success();
}
}).error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
if (oldStatus == ApplianceVmStatus.Connected) {
fireDisconnectedCanonicalEvent(errCode);
}
completion.fail(errCode);
}
}).start();
}
private class virtualRouterAfterAttachNicFlow extends NoRollbackFlow {
@Override
public void run(FlowTrigger trigger, Map data) {
VmNicInventory nicInventory = (VmNicInventory) data.get(Param.VR_NIC);
L3NetworkVO l3NetworkVO = Q.New(L3NetworkVO.class).eq(L3NetworkVO_.uuid, nicInventory.getL3NetworkUuid()).find();
VirtualRouterCommands.ConfigureNicCmd cmd = new VirtualRouterCommands.ConfigureNicCmd();
VirtualRouterCommands.NicInfo info = new VirtualRouterCommands.NicInfo();
info.setIp(nicInventory.getIp());
info.setDefaultRoute(false);
info.setGateway(nicInventory.getGateway());
info.setMac(nicInventory.getMac());
info.setNetmask(nicInventory.getNetmask());
L2NetworkVO l2NetworkVO = Q.New(L2NetworkVO.class).eq(L2NetworkVO_.uuid, l3NetworkVO.getL2NetworkUuid()).find();
info.setCategory(l3NetworkVO.getCategory().toString());
info.setL2type(l2NetworkVO.getType());
info.setPhysicalInterface(l2NetworkVO.getPhysicalInterface());
for (L2NetworkGetVniExtensionPoint ext : pluginRgty.getExtensionList(L2NetworkGetVniExtensionPoint.class)) {
if (ext.getL2NetworkVniType().equals(l2NetworkVO.getType())) {
info.setVni(ext.getL2NetworkVni(l2NetworkVO.getUuid()));
}
}
cmd.setNics(Arrays.asList(info));
VirtualRouterAsyncHttpCallMsg cmsg = new VirtualRouterAsyncHttpCallMsg();
cmsg.setCommand(cmd);
cmsg.setPath(VirtualRouterConstant.VR_CONFIGURE_NIC_PATH);
cmsg.setVmInstanceUuid(vr.getUuid());
cmsg.setCheckStatus(true);
bus.makeTargetServiceIdByResourceUuid(cmsg, VmInstanceConstant.SERVICE_ID, vr.getUuid());
bus.send(cmsg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
trigger.fail(reply.getError());
return;
}
VirtualRouterAsyncHttpCallReply re = reply.castReply();
VirtualRouterCommands.ConfigureNicRsp rsp = re.toResponse(VirtualRouterCommands.ConfigureNicRsp.class);
if (rsp.isSuccess()) {
logger.debug(String.format("successfully add nic[ip:%s, mac:%s] to virtual router vm[uuid:%s, ip:%s]",
info.getIp(), info.getMac(), vr.getUuid(), vr.getManagementNic().getIp()));
trigger.next();
} else {
ErrorCode err = operr("unable to add nic[ip:%s, mac:%s] to virtual router vm[uuid:%s ip:%s], because %s",
info.getIp(), info.getMac(), vr.getUuid(), vr.getManagementNic().getIp(), rsp.getError());
trigger.fail(err);
}
}
});
}
}
private class virtualRouterApplyServicesAfterAttachNicFlow implements Flow {
String __name__ = "virtualRouter-apply-services-afterAttachNic";
private void virtualRouterApplyServicesAfterAttachNic(Iterator<VirtualRouterAfterAttachNicExtensionPoint> it, VmNicInventory nicInv, Completion completion){
if (!it.hasNext()) {
completion.success();
return;
}
VirtualRouterAfterAttachNicExtensionPoint ext = it.next();
ext.afterAttachNic(nicInv, new Completion(completion) {
@Override
public void success() {
virtualRouterApplyServicesAfterAttachNic(it, nicInv, completion);
}
@Override
public void fail(ErrorCode errorCode) {
completion.fail(errorCode);
}
});
}
@Override
public void run(FlowTrigger trigger, Map data) {
VmNicInventory nicInv = (VmNicInventory) data.get(Param.VR_NIC);
Iterator<VirtualRouterAfterAttachNicExtensionPoint> it = pluginRgty.getExtensionList(VirtualRouterAfterAttachNicExtensionPoint.class).iterator();
virtualRouterApplyServicesAfterAttachNic(it, nicInv, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
private void virtualRouterApplyServicesAfterAttachNicRollback(Iterator<VirtualRouterAfterAttachNicExtensionPoint> it, VmNicInventory nicInv, NoErrorCompletion completion){
if (!it.hasNext()) {
completion.done();
return;
}
VirtualRouterAfterAttachNicExtensionPoint ext = it.next();
ext.afterAttachNicRollback(nicInv, new NoErrorCompletion(completion) {
@Override
public void done() {
virtualRouterApplyServicesAfterAttachNicRollback(it, nicInv, completion);
}
});
}
@Override
public void rollback(FlowRollback trigger, Map data) {
VmNicInventory nicInv = (VmNicInventory) data.get(Param.VR_NIC);
Iterator<VirtualRouterAfterAttachNicExtensionPoint> it = pluginRgty.getExtensionList(VirtualRouterAfterAttachNicExtensionPoint.class).iterator();
virtualRouterApplyServicesAfterAttachNicRollback(it, nicInv, new NoErrorCompletion() {
@Override
public void done() {
trigger.rollback();
}
});
}
}
@Override
protected void afterAttachNic(VmNicInventory nicInventory, Completion completion) {
thdf.chainSubmit(new ChainTask(completion) {
@Override
public String getSyncSignature() {
return syncThreadName;
}
@Override
@Deferred
public void run(final SyncTaskChain schain) {
VmNicVO vo = Q.New(VmNicVO.class).eq(VmNicVO_.uuid, nicInventory.getUuid()).find();
L3NetworkVO l3NetworkVO = Q.New(L3NetworkVO.class).eq(L3NetworkVO_.uuid, vo.getL3NetworkUuid()).find();
if (l3NetworkVO.getCategory().equals(L3NetworkCategory.Private)) {
vo.setMetaData(GUEST_NIC_MASK.toString());
UsedIpVO usedIpVO = Q.New(UsedIpVO.class).eq(UsedIpVO_.uuid, nicInventory.getUsedIpUuid()).find();
usedIpVO.setMetaData(GUEST_NIC_MASK.toString());
dbf.updateAndRefresh(usedIpVO);
} else {
vo.setMetaData(ADDITIONAL_PUBLIC_NIC_MASK.toString());
}
vo = dbf.updateAndRefresh(vo);
logger.debug(String.format("updated metadata of vmnic[uuid: %s]", vo.getUuid()));
Map data = new HashMap();
data.put(Param.VR_NIC, VmNicInventory.valueOf(vo));
FlowChain chain = FlowChainBuilder.newSimpleFlowChain();
chain.setName(String.format("apply-services-after-attach-nic-%s-from-virtualrouter-%s", nicInventory.getUuid(), nicInventory.getVmInstanceUuid()));
chain.setData(data);
chain.insert(new virtualRouterAfterAttachNicFlow());
chain.then(new virtualRouterApplyServicesAfterAttachNicFlow());
chain.then(haBackend.getAttachL3NetworkFlow());
chain.done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
completion.success();
schain.next();
}
}).error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
schain.next();
}
}).start();
}
@Override
public String getName() {
return String.format("after-attach-nic-%s-on-vm-%s", nicInventory.getUuid(), nicInventory.getVmInstanceUuid());
}
});
}
@Override
protected void afterDetachNic(VmNicInventory nicInventory, boolean isRollback, Completion completion) {
if (isRollback) {
completion.success();
return;
}
for (VirtualRouterAfterDetachNicExtensionPoint ext : pluginRgty.getExtensionList(VirtualRouterAfterDetachNicExtensionPoint.class)) {
ext.afterDetachNic(nicInventory);
}
haBackend.detachL3NetworkFromVirtualRouterHaGroup(nicInventory.getVmInstanceUuid(),
nicInventory.getL3NetworkUuid(), isRollback, completion);
}
private class virtualRouterbeforeDetachNic extends NoRollbackFlow {
String __name__ = "virtualRouter-beforeDetachNic";
@Override
public void run(FlowTrigger trigger, Map data) {
VmNicInventory nicInventory = (VmNicInventory) data.get(Param.VR_NIC);
VirtualRouterCommands.RemoveNicCmd cmd = new VirtualRouterCommands.RemoveNicCmd();
VirtualRouterCommands.NicInfo info = new VirtualRouterCommands.NicInfo();
info.setIp(nicInventory.getIp());
info.setDefaultRoute(false);
info.setGateway(nicInventory.getGateway());
info.setMac(nicInventory.getMac());
info.setNetmask(nicInventory.getNetmask());
cmd.setNics(Arrays.asList(info));
VirtualRouterAsyncHttpCallMsg cmsg = new VirtualRouterAsyncHttpCallMsg();
cmsg.setCommand(cmd);
cmsg.setPath(VirtualRouterConstant.VR_REMOVE_NIC_PATH);
cmsg.setVmInstanceUuid(vr.getUuid());
cmsg.setCheckStatus(true);
bus.makeTargetServiceIdByResourceUuid(cmsg, VmInstanceConstant.SERVICE_ID, vr.getUuid());
bus.send(cmsg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
trigger.fail(reply.getError());
return;
}
VirtualRouterAsyncHttpCallReply re = reply.castReply();
VirtualRouterCommands.RemoveNicRsp rsp = re.toResponse(VirtualRouterCommands.RemoveNicRsp.class);
if (rsp.isSuccess()) {
logger.debug(String.format("successfully detach nic[%s] from virtual router vm[uuid:%s, ip:%s]",info, vr.getUuid(), vr.getManagementNic()
.getIp()));
trigger.next();
} else {
ErrorCode err = operr("unable to detach nic[%s] from virtual router vm[uuid:%s ip:%s], because %s",
info, vr.getUuid(), vr.getManagementNic().getIp(), rsp.getError());
trigger.fail(err);
}
}
});
}
}
private class virtualRouterReleaseServicesbeforeDetachNicFlow implements Flow {
String __name__ = "virtualRouter-release-services-before-detach-nic";
private void virtualRouterReleaseServices(final Iterator<VirtualRouterBeforeDetachNicExtensionPoint> it, VmNicInventory nicInv, Completion completion) {
if (!it.hasNext()) {
completion.success();
return;
}
VirtualRouterBeforeDetachNicExtensionPoint ext = it.next();
ext.beforeDetachNic(nicInv, new Completion(completion) {
@Override
public void success() {
virtualRouterReleaseServices(it, nicInv, completion);
}
@Override
public void fail(ErrorCode errorCode) {
completion.fail(errorCode);
}
});
}
@Override
public void run(FlowTrigger trigger, Map data) {
VmNicInventory nicInv = (VmNicInventory) data.get(Param.VR_NIC);
Iterator<VirtualRouterBeforeDetachNicExtensionPoint> it = pluginRgty.getExtensionList(VirtualRouterBeforeDetachNicExtensionPoint.class).iterator();
virtualRouterReleaseServices(it, nicInv, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
private void virtualRouterReleaseServicesRollback(final Iterator<VirtualRouterBeforeDetachNicExtensionPoint> it, VmNicInventory nicInv, NoErrorCompletion completion) {
if (!it.hasNext()) {
completion.done();
return;
}
VirtualRouterBeforeDetachNicExtensionPoint ext = it.next();
ext.beforeDetachNicRollback(nicInv, new NoErrorCompletion(completion) {
@Override
public void done() {
virtualRouterReleaseServicesRollback(it, nicInv, completion);
}
});
}
@Override
public void rollback(FlowRollback trigger, Map data) {
VmNicInventory nicInv = (VmNicInventory) data.get(Param.VR_NIC);
Iterator<VirtualRouterBeforeDetachNicExtensionPoint> it = pluginRgty.getExtensionList(VirtualRouterBeforeDetachNicExtensionPoint.class).iterator();
virtualRouterReleaseServicesRollback(it, nicInv, new NoErrorCompletion(trigger) {
@Override
public void done() {
trigger.rollback();
}
});
}
}
@Override
protected void beforeDetachNic(VmNicInventory nicInventory, Completion completion) {
Map data = new HashMap();
data.put(Param.VR_NIC, nicInventory);
ApplianceVmVO appvm = Q.New(ApplianceVmVO.class)
.eq(ApplianceVmVO_.uuid, nicInventory.getVmInstanceUuid()).find();
if (appvm.getStatus().equals(ApplianceVmStatus.Disconnected)) {
logger.debug(String.format("appliance vm[uuid: %s] current status is [%s], skip before detach nic",
appvm.getUuid(), appvm.getStatus()));
completion.success();
return;
}
if (appvm.getState().equals(VmInstanceState.Stopped)) {
logger.debug(String.format("appliance vm[uuid: %s] current state is [%s], skip before detach nic",
appvm.getUuid(), appvm.getStatus()));
completion.success();
return;
}
FlowChain chain = FlowChainBuilder.newSimpleFlowChain();
chain.setName(String.format("release-services-before-detach-nic-%s-from-virtualrouter-%s", nicInventory.getUuid(), nicInventory.getVmInstanceUuid()));
chain.setData(data);
chain.insert(new virtualRouterReleaseServicesbeforeDetachNicFlow());
chain.then(new virtualRouterbeforeDetachNic());
chain.done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
completion.success();
}
}).error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
}).start();
}
}
| |
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright (c) 2008-2009, The KiWi Project (http://www.kiwi-project.eu)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of the KiWi Project nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* Contributor(s):
*
*
*/
package kiwi.model.kbase;
import java.io.Serializable;
import java.text.Collator;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import javax.persistence.DiscriminatorValue;
import javax.persistence.Entity;
import javax.persistence.EntityManager;
import javax.persistence.FetchType;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.NoResultException;
import javax.persistence.NonUniqueResultException;
import javax.persistence.OneToOne;
import javax.persistence.Query;
import javax.persistence.Transient;
import kiwi.api.query.sparql.SparqlService;
import kiwi.api.triplestore.TripleStore;
import kiwi.exception.NamespaceResolvingException;
import kiwi.exception.NonUniqueRelationException;
import kiwi.model.Constants;
import kiwi.model.content.ContentItem;
import org.hibernate.annotations.BatchSize;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
import org.hibernate.annotations.Fetch;
import org.hibernate.annotations.FetchMode;
import org.hibernate.annotations.Index;
import org.hibernate.annotations.LazyToOne;
import org.hibernate.annotations.LazyToOneOption;
import org.jboss.seam.Component;
import org.jboss.seam.annotations.security.Restrict;
import org.jboss.seam.log.Log;
import org.jboss.seam.log.Logging;
/**
* KiWiResources correspond to RDF resources. A KiWi resource is either an anonymous
* resource or a URIResource. A KiWiResource represents an RDF resource. However, there
* is no 1:1 correspondance, as there may be several KiWiResources in different
* KnowledgeSpaces that represent the same RDF resource.
* <p>
* Each KiWiResource has a 1:1 correspondance to a content item. Each KiWiResource is
* also directly related with a Sesame 2 resource in the knowledge base backend.
* The kiwi.sesame package contains appropriate adaptors for transforming Sesame 2
* resources into KiWiResources.
* <p>
*
* Each KiWiResoure furthermore participates in exactly one knowledge space, where the RDF
* resource may have different identifiers for the users. This knowledge space defines the
* getContext().of the node. For this reason, a Sesame 2 Value may correspond to several
* KiWiNodes in different knowledge spaces.
*
* @author Sebastian Schaffert
*/
@Entity
@DiscriminatorValue("r")
@Cache(usage = CacheConcurrencyStrategy.READ_ONLY)
@BatchSize(size = 100)
@NamedQueries({
@NamedQuery(name = "tripleStore.resourceByUri",
query = "from KiWiUriResource r " +
"left join fetch r.contentItem " +
"where r.uri = :uri"),
@NamedQuery(name = "tripleStore.resourceByAnonId",
query = "from KiWiAnonResource r " +
"left join fetch r.contentItem " +
"where r.anonId = :anonId")
})
@Restrict
public abstract class KiWiResource extends KiWiNode implements Serializable {
private static final long serialVersionUID = 1L;
@OneToOne(fetch=FetchType.EAGER)
@Cascade({CascadeType.PERSIST})
@Index(name="contentitemid_index")
@Fetch(FetchMode.JOIN)
@LazyToOne(LazyToOneOption.FALSE)
protected ContentItem contentItem;
/**
* Cache for literal property values.
*/
@Transient
private HashMap<String,KiWiLiteral> properties;
/**
* Cache for properties that are undefined. If property is undefined in rdf, store "TRUE".
*/
@Transient
private HashMap<String,Boolean> undef_properties;
/*
* Indicate whether a prefetch run for properties has already been executed.
*/
@Transient
private boolean prefetched = false;
/**
* Create a new empty KiWiResource; called by subclasses
*/
protected KiWiResource() {
super();
properties = new HashMap<String,KiWiLiteral>();
undef_properties = new HashMap<String, Boolean>();
}
/**
* Get the content item associated with this KiWiResource in the knowledge space "getContext()..
* CHANGED because of persistence conflicts - contentItem couldn't
* be persisted because there didn't exist any at this time
* @return
*/
public ContentItem getContentItem() {
return contentItem;
}
/**
* Set the content item associated with this KiWiResource.
* CHANGED because of persistence conflicts - contentItem couldn't
* be persisted because there didn't exist any at this time
* @param contentItem
*/
public void setContentItem(ContentItem contentItem) {
this.contentItem = contentItem;
}
/**
* Return the knowledge space that is the getContext().of this node.
* @return
*/
protected static final TripleStore getContext() {
return (TripleStore) Component.getInstance("tripleStore");
}
/**
* Generic method to query for literal values related to this resource with the property
* identified by "propLabel" (SeRQL/SPARQL short or long notation). Returns only literal
* values for which no language has been assigned.
*
* @param propLabel
* @return
* @throws NonUniqueRelationException
*/
public String getProperty(String propLabel) throws NonUniqueRelationException {
return getProperty(propLabel,null);
}
/**
* Generic method to query for literal values related to this resource with the property
* identified by "propLabel" (SeRQL/SPARQL short or long notation) and the given locale.
*
* @param propLabel label of the property; either RDF short form (e.g. "foaf:mbox") or long form (e.g. <http://xmlns.com/foaf/0.1/mbox>)
* @param loc
* @return
* @throws NonUniqueRelationException
*/
public String getProperty(String propLabel, Locale loc) {
KiWiLiteral l;
try {
l = getLiteral(propLabel,loc);
if(l == null) {
return null;
} else {
return l.getContent();
}
} catch (NamespaceResolvingException e) {
e.printStackTrace();
}
return null;
}
/**
* Generic method to query for literal values related to this resource with the property
* identified by "propLabel" (SeRQL/SPARQL short or long notation) and the given locale.
*
* @param propLabel label of the property; either RDF short form (e.g. "foaf:mbox") or long form (e.g. <http://xmlns.com/foaf/0.1/mbox>)
* @return
* @throws NamespaceResolvingException
*/
public Iterable<String> getProperties(String propLabel) throws NamespaceResolvingException {
return getProperties(propLabel,null);
}
/**
* Generic method to query for literal values related to this resource with the property
* identified by "propLabel" (SeRQL/SPARQL short or long notation) and the given locale.
*
* @param propLabel label of the property; either RDF short form (e.g. "foaf:mbox") or long
* form (e.g. <http://xmlns.com/foaf/0.1/mbox>)
* @param loc
* @return
* @throws NamespaceResolvingException
*/
public Iterable<String> getProperties(String propLabel, Locale loc) throws NamespaceResolvingException {
List<KiWiLiteral> props = listLiterals(propLabel, loc);
List<String> results = new ArrayList<String>(props.size());
for(KiWiLiteral l : props) {
results.add(l.getContent());
}
return results;
}
/**
* Generic method to set the literal value of a property of this resource to the provided
* value without setting a language.
*
* @param propLabel the SeRQL or SPARQL short or long notation for the property
* @param propValue the String value of this property
* @throws NamespaceResolvingException
*/
public void setProperty(String propLabel, String propValue) throws NamespaceResolvingException {
setProperty(propLabel,propValue,null);
}
/**
* Generic method to set the literal value of a property of this resource to the provided
* value in the provided language.
*
* @param propLabel the SeRQL or SPARQL short or long notation for the property
* @param propValue the String value of this property
* @param loc the Locale representing the language of this property
* @throws NamespaceResolvingException
*/
public <T> void setProperty(String propLabel, T propValue, Locale loc) throws NamespaceResolvingException {
String key = propLabel + (loc == null?"":"@"+loc.getLanguage());
if(propValue != null) {
// quick check whether the cached value is the same as the passed value; in this case we
// do nothing because no update is needed
if(properties.get(key) != null &&
properties.get(key).getContent().equals(propValue)) {
return;
}
// remove previous property setting
removeProperty(propLabel,loc);
TripleStore ts = getContext();
String prop_uri = _resolvePropLabel(propLabel);
// then set the new property value
//KiWiUriResource propResource = ts.createUriResource(prop_uri);
KiWiLiteral value = ts.createLiteral(propValue, loc, getContext().getXSDType(propValue.getClass()));
ts.createTriple(this, prop_uri, value);
// and update properties cache
properties.put(key, value);
undef_properties.put(key, Boolean.FALSE);
} else {
removeProperty(propLabel,loc);
}
}
public boolean removeProperty(String propLabel) throws NamespaceResolvingException {
return removeProperty(propLabel,null);
}
/**
* Remove a property from the KiWiResource.
*
* @param propLabel the property label in SeRQL syntax to remove
* @param loc the locale of the property to remove
* @return true if the property existed and was removed
* @throws NamespaceResolvingException
*/
public boolean removeProperty(String propLabel, Locale loc) throws NamespaceResolvingException {
String uri = _resolvePropLabel(propLabel);
EntityManager em = (EntityManager) Component.getInstance("entityManager");
// check whether property exists by simply counting (should be much more efficient than retrieving)
long count = 0;
Query q1 = em.createNamedQuery("tripleStore.countLiteralTripleBySubjectProperty"+(loc != null?"Locale":""));
q1.setParameter("subject", this);
q1.setParameter("property_uri",uri);
if(loc != null) {
q1.setParameter("locale",loc);
}
try {
count = (Long) q1.getSingleResult();
} catch(NoResultException ex) {
} catch(NonUniqueResultException ex) {
}
// look up triple that corresponds to property
KiWiTriple triple = null;
// when there is such a triple, retrieve it and then add it to the list of triples to be removed
if(count > 0) {
Query q2 = em.createNamedQuery("tripleStore.literalTripleBySubjectProperty"+(loc != null?"Locale":""));
q2.setHint("org.hibernate.cacheable", true);
q2.setParameter("subject", this);
q2.setParameter("property_uri",uri);
q2.setMaxResults(1);
if(loc != null) {
q2.setParameter("locale",loc);
}
try {
triple = (KiWiTriple) q2.getSingleResult();
} catch(NoResultException ex) {
} catch(NonUniqueResultException ex) {
}
}
// if triple exists, call TripleStore.remove on it
if(triple != null) {
getContext().removeTriple(triple);
// invalidate property cache
String key = propLabel + (loc == null?"":"@"+loc.getLanguage());
properties.remove(key);
undef_properties.put(key, Boolean.TRUE);
return true;
} else {
return false;
}
}
/* incoming and outgoing edges (KiWiTriple) */
/**
* List all outgoing edges from this resource to other resources. Shortcut for listOutgoing(null).
*
* @return all outgoing edges from this resource
*/
public Collection<KiWiTriple> listOutgoing() {
try {
return listOutgoing(null);
} catch (NamespaceResolvingException e) {
e.printStackTrace();
return Collections.emptySet();
}
}
public Collection<KiWiTriple> listOutgoingIncludeDeleted() {
try {
return listOutgoing(null, -1, true);
} catch (NamespaceResolvingException e) {
e.printStackTrace();
return Collections.emptySet();
}
}
/**
* List all outgoing edges from this resource to other resources, using the property label passed
* as argument.
*
* @param propLabel
* @return
* @throws NamespaceResolvingException
*/
public Collection<KiWiTriple> listOutgoing(String propLabel) throws NamespaceResolvingException {
return listOutgoing(propLabel, -1, false);
}
/**
* List outgoing edges from this resource to other resources, using the property label passed
* as argument. If limit is bigger than 0, then a maximum of limit triples will be returned.
* Otherwise, all triples will be returned.
* <p>
* The parameter propLabel is in the form of a SeRQL or SPARQL id. It can take one of the following
* values:
* <ul>
* <li>a URI enclosed in < >, e.g. <http://www.example.com/myProp></li>
* <li>a uri prefix, followed by a colon and the property name, e.g. ex:myProp</li>
* <li>the value "null", in which case all outgoing edges are listed regardless of their label
* (wildcard)</li>
* </ul>
* The result will be an iterable that allows to iterate over KiWiTriples.
*
* @param propLabel the label of the property to be queried, or null for wildcard
* @param limit the maximum number of triples to retrieve
* @return an iterable over the KiWiTriples that are outgoing edges of this resource
* @throws NamespaceResolvingException
*/
public Collection<KiWiTriple> listOutgoing(String propLabel, int limit, boolean includeDeleted) throws NamespaceResolvingException {
Log log = Logging.getLog(KiWiResource.class);
if(this.getId() == null) {
log.warn("PLEASE AVOID: calling listOutgoing() on Sesame triplestore, since resource is not yet in database");
throw new UnsupportedOperationException("listOutgoing() no longer supports non-persistent resources");
} else {
// resource is already a persisted entity
// perform entityManager query ...
EntityManager em = (EntityManager) Component.getInstance("entityManager");
//TripleStore ts = (TripleStore) Component.getInstance("tripleStore");
Query q;
if(includeDeleted == false)
{
q = em.createNamedQuery("tripleStore.tripleByS"+(propLabel != null?"P2":""));
}
else
{
q = em.createNamedQuery("tripleStore.tripleByS"+(propLabel != null?"P2":"") + "includeDeleted");
}
q.setHint("org.hibernate.cacheable", true);
q.setParameter("subject", this);
if(propLabel != null) {
String uri = _resolvePropLabel(propLabel);
log.debug("querying property with uri #0", uri);
//KiWiUriResource property = ts.createUriResource(uri);
q.setParameter("property_uri",uri);
}
if(limit > 0) {
q.setMaxResults(limit);
}
return (List<KiWiTriple>) q.getResultList();
}
}
/**
* List the objects that are related to this resource through a certain property
* @return
* @throws NamespaceResolvingException
*/
public Iterable<KiWiNode> listOutgoingNodes(String propLabel) throws NamespaceResolvingException {
Collection<KiWiTriple> out = listOutgoing(propLabel);
List<KiWiNode> result = new ArrayList<KiWiNode>(out.size());
for(KiWiTriple t : out) {
result.add(t.getObject());
}
return result;
}
// CHECK
public <C extends KiWiNode> void setOutgoingNode(String propLabel, C target) throws NamespaceResolvingException {
TripleStore ts = getContext();
// remove all existing triples
for( KiWiTriple t : listOutgoing(propLabel)) {
ts.removeTriple(t);
}
KiWiUriResource propResource = ts.createUriResourceBySPARQLId(propLabel);
ts.createTriple(this, propResource, target);
}
public void addOutgoingNode(String propLabel, KiWiResource target) throws NamespaceResolvingException {
TripleStore ts = getContext();
String property_uri = _resolvePropLabel(propLabel);
ts.createTriple(this, property_uri, target);
// KiWiUriResource propResource = ts.createUriResourceBySPARQLId(propLabel);
// ts.createTriple(this, propResource, target);
}
public void addOutgoingNode(KiWiUriResource prop, KiWiNode target) throws NamespaceResolvingException {
TripleStore ts = getContext();
ts.createTriple(this, prop, target);
}
public void removeOutgoingNode(String propLabel, KiWiResource target) throws NamespaceResolvingException {
TripleStore ts = getContext();
String property_uri = _resolvePropLabel(propLabel);
ts.removeTriple(this, property_uri, target);
// KiWiUriResource propResource = ts.createUriResourceBySPARQLId(propLabel);
// ts.removeTriple(this, propResource, target);
}
/**
* List all incoming edges from other resources to this resource
* @return
* @throws NamespaceResolvingException
*/
public Collection<KiWiTriple> listIncoming() {
try {
return listIncoming(null);
} catch (NamespaceResolvingException e) {
e.printStackTrace();
return Collections.emptySet();
}
}
/**
* List incoming edges from other resources to this resource, using the property label passed
* as argument.
* @throws NamespaceResolvingException
*/
public Collection<KiWiTriple> listIncoming(String propLabel) throws NamespaceResolvingException {
return listIncoming(propLabel, -1);
}
/**
* List incoming edges from other resources to this resource, using the property label passed
* as argument. If limit is bigger than 0, then a maximum of limit triples will be returned.
* Otherwise, all triples will be returned.
* <p>
* The parameter propLabel is in the form of a SeRQL or SPARQL id. It can take one of the following
* values:
* <ul>
* <li>a URI enclosed in < >, e.g. <http://www.example.com/myProp></li>
* <li>a uri prefix, followed by a colon and the property name, e.g. ex:myProp</li>
* <li>the value "null", in which case all outgoing edges are listed regardless of their label
* (wildcard)</li>
* </ul>
* The result will be an iterable that allows to iterate over KiWiTriples.
*
* @param propLabel the label of the property to be queried, or null for wildcard
* @param limit the maximum number of triples to retrieve
* @return an iterable over the KiWiTriples that are incoming edges of this resource
* @throws NamespaceResolvingException
*/
public Collection<KiWiTriple> listIncoming(String propLabel, int limit) throws NamespaceResolvingException {
Log log = Logging.getLog(KiWiResource.class);
if(this.getId() == null) {
log.warn("PLEASE AVOID: calling listIncoming() on Sesame triplestore, since resource is not yet in database");
throw new UnsupportedOperationException("listIncoming() no longer supports non-persistent resources");
} else {
// resource is already a persisted entity
// perform entityManager query ...
EntityManager em = (EntityManager) Component.getInstance("entityManager");
//TripleStore ts = (TripleStore) Component.getInstance("tripleStore");
Query q = em.createNamedQuery("tripleStore.tripleBy"+(propLabel != null?"PO2":"O"));
q.setHint("org.hibernate.cacheable", true);
q.setParameter("object", this);
if(propLabel != null) {
String uri = _resolvePropLabel(propLabel);
log.debug("querying property with uri #0", uri);
//KiWiUriResource property = ts.createUriResource(uri);
q.setParameter("property_uri",uri);
}
if(limit > 0) {
q.setMaxResults(limit);
}
return (List<KiWiTriple>) q.getResultList();
}
}
/**
* Return a list of nodes that are the sources for edges with propLabel that have this resource
* as endpoint. This is mostly a convenience method that wraps listIncoming(propLabel).
*
* @param propLabel the label that all edges listed must have, or null for wildcard
* @return a list of resources that are sources of edges that have this resource as endpoint
* @throws NamespaceResolvingException
*/
public Iterable<KiWiResource> listIncomingNodes(String propLabel) throws NamespaceResolvingException {
Collection<KiWiTriple> in = listIncoming(propLabel);
List<KiWiResource> result = new ArrayList<KiWiResource>(in.size());
for(KiWiTriple t : in) {
result.add(t.getSubject());
}
return result;
}
public void addIncomingNode(String propLabel, KiWiResource source) {
TripleStore ts = getContext();
KiWiUriResource propResource = ts.createUriResourceBySPARQLId(propLabel);
ts.createTriple(source, propResource, this);
}
/* convenience wrappers around common RDF properties */
/**
* Return the label of this resource in the language provided as parameter
*
* If no label is available for the given language, returns the identifier.
*
* @return
* @throws NonUniqueRelationException
*/
public String getLabel() {
return getLabel(null);
}
/**
* Return the label of this resource in the language provided as parameter
* within the getContext().knowledge space of this KiWiResource.
*
* If no label is available for the given language, returns the identifier.
*
* @param loc
* @return
* @throws NonUniqueRelationException
*/
public String getLabel(Locale loc) {
String label = null;
label = getProperty("<"+Constants.NS_RDFS+"label>",loc);
// no label available, take content item title
if(label == null) {
label = getContentItem().getTitle();
}
// still no label available, try to get last part from uri
if(label == null && this.isUriResource()) {
String uri = ((KiWiUriResource)this).getUri();
if(uri.lastIndexOf("#") > 0) {
label = uri.substring(uri.lastIndexOf("#")+1);
} else {
label = uri.substring(uri.lastIndexOf("/")+1);
}
} else if(label == null && this.isAnonymousResource()){
label = ((KiWiAnonResource)this).getAnonId();
}
return label;
}
/**
* Set the rdfs:label of this KiWiResource in the configured getContext().TripleStore
* for the given Locale.
*
*
* @param loc
* @param label
* @throws NamespaceResolvingException
*/
public void setLabel(Locale loc, String label) {
try {
setProperty("<"+Constants.NS_RDFS+"label>", label, loc);
} catch (NamespaceResolvingException e) {
e.printStackTrace();
}
}
/**
* Return the identifier of this resource in its getContext().TripleStore for
* the Locale passed as parameter. The identifier is the name used e.g. in links or
* in short URIs. It thus needs to be unique within a knowledge space and
* the language.
*
* @param loc
* @return
*/
public String getIdentifier(Locale loc) {
// TODO
return "";
}
/**
*
* @param loc
* @param identifier
*/
public void setIdentifier(Locale loc, String identifier) {
// TODO
}
/**
* Return the rdfs:comment for this resource for the given locale in the getContext().
* knowledge space.
* @param loc
* @throws NonUniqueRelationException
*/
public String getComment(Locale loc) throws NonUniqueRelationException {
return getProperty("<"+Constants.NS_RDFS+"comment>",loc);
}
/**
* Set the rdfs:comment for this resource
* @param loc
* @param comment
*/
public void setComment(Locale loc, String comment) {
try {
setProperty("<"+Constants.NS_RDFS+"comment>", comment, loc);
} catch (NamespaceResolvingException e) {
e.printStackTrace();
}
}
/**
* Return the list of types as KiWiResources that are associated with this resource using the
* rdf:type RDF property.
*
* @return an iterable of KiWiResource instances that represent the RDF types of this resource
*/
public Collection<KiWiResource> getTypes() {
EntityManager em = (EntityManager) Component.getInstance("entityManager");
Query q = em.createNamedQuery("tripleStore.getTypes");
q.setParameter("subject", this);
q.setHint("org.hibernate.cacheable", true);
return q.getResultList();
}
/**
* Add a new type to the list of RDF types of this KiWiResource.
*
* @param type the type to add
*/
public void addType(KiWiUriResource type) {
try {
addOutgoingNode("<"+Constants.NS_RDF+"type>", type);
} catch (NamespaceResolvingException e) {
e.printStackTrace();
}
}
/**
* Remove one of the RDF types of this KiWiResource
* @param type a URI resource representing the type of this KiWiResource
*/
public void removeType(KiWiUriResource type) {
try {
removeOutgoingNode("<"+Constants.NS_RDF+"type>", type);
} catch (NamespaceResolvingException e) {
e.printStackTrace();
}
}
public boolean hasType(String typeUri) {
TripleStore ts = getContext();
return hasType(ts.createUriResource(typeUri));
}
/**
* Check whether this KiWiResource has a certain RDF type
* @param type the resource representing the type to check for
* @return true if the type is in the list of RDF types of this resource, false otherwise
*/
public boolean hasType(KiWiUriResource type) {
EntityManager em = (EntityManager) Component.getInstance("entityManager");
Query q = em.createNamedQuery("tripleStore.hasType");
q.setParameter("type", type);
q.setParameter("subject", this);
q.setMaxResults(1);
q.setHint("org.hibernate.cacheable", true);
return q.getResultList().size() > 0;
}
/**
* Return the SeRQL identifier of this resource; this is
* - <URI> for URI resources and
* - _:ID for anonymous resources
* Needs to be implemented by subclasses.
* @return
*/
public abstract String getSeRQLID();
public abstract String getKiwiIdentifier();
/**
* Generic method to query for literal values related to this resource with the property
* identified by "propLabel" (SeRQL/SPARQL short or long notation) and the given locale.
*
* @param propLabel label of the property; either RDF short form (e.g. "foaf:mbox") or long form (e.g. <http://xmlns.com/foaf/0.1/mbox>)
* @param loc
* @return
* @throws NamespaceResolvingException
*/
private KiWiLiteral getLiteral(String propLabel, Locale loc) throws NamespaceResolvingException {
Log log = Logging.getLog(KiWiResource.class);
if(!prefetched) {
prefetchProperties();
}
String key = propLabel + (loc == null?"":"@"+loc.getLanguage());
if(undef_properties.get(key) != null && undef_properties.get(key)) {
return null;
}
KiWiLiteral result = properties.get(key);
// prefetchProperties retrieves all properties anyways, so it should be safe enough to not do this:
if(result == null) {
// perform entityManager query ...
log.debug("resource is stored in database, performing entity manager query");
EntityManager em = (EntityManager) Component.getInstance("entityManager");
//TripleStore ts = (TripleStore) Component.getInstance("tripleStore");
String uri = _resolvePropLabel(propLabel);
log.debug("#0: querying property with uri #1", this, uri);
//KiWiUriResource property = ts.createUriResource(uri);
Query q = em.createNamedQuery("tripleStore.literal2BySubjectProperty"+(loc != null?"Locale":""));
q.setHint("org.hibernate.cacheable", true);
q.setParameter("subject", this);
q.setParameter("property_uri",uri);
q.setMaxResults(1);
if(loc != null) {
q.setParameter("locale",loc);
}
try {
result = (KiWiLiteral) q.getSingleResult();
} catch(NoResultException ex) {
// valid, no result
undef_properties.put(key, Boolean.TRUE);
return null;
} catch(NonUniqueResultException ex) {
// invalid state of database
log.error("non-unique result while querying resource #0 with property #1",this.getKiwiIdentifier(),uri);
result = (KiWiLiteral) q.getResultList().get(0);
}
properties.put(key, result);
}
return result;
}
/**
* Generic method to query for literal values related to this resource with the property
* identified by "propLabel" (SeRQL/SPARQL short or long notation) and the given locale.
*
* @param propLabel label of the property; either RDF short form (e.g. "foaf:mbox") or long form (e.g. <http://xmlns.com/foaf/0.1/mbox>)
* @param loc
* @return
* @throws NamespaceResolvingException
*/
private List<KiWiLiteral> listLiterals(String propLabel, Locale loc) throws NamespaceResolvingException {
// TODO: should be implemented as an RDF set or sequence!
Log log = Logging.getLog(KiWiResource.class);
LinkedList<KiWiLiteral> result = new LinkedList<KiWiLiteral>();
if(this.getId() == null || this.getId() <= 0) {
log.warn("PLEASE AVOID: calling listLiterals() on Sesame triplestore, since resource is not yet in database");
String myLabel = propLabel;
// allow slightly incorrect passing of URI without <> brackets ...
if(!myLabel.startsWith("<") && myLabel.contains("://")) {
myLabel = "<" + myLabel + ">";
}
String query = "SELECT label(L) FROM {"+getSeRQLID()+"} "+myLabel+" {L} WHERE isLiteral(L)"+_resolveSeRQLLang(loc);
SparqlService ss = (SparqlService) Component.getInstance("kiwi.query.sparqlService");
for(KiWiLiteral l : ss.queryLiteral(query, KiWiQueryLanguage.SERQL)) {
result.add(l);
}
} else {
// perform entityManager query ...
EntityManager em = (EntityManager) Component.getInstance("entityManager");
//TripleStore ts = (TripleStore) Component.getInstance("tripleStore");
String uri = _resolvePropLabel(propLabel);
log.debug("querying property with uri #0", uri);
//KiWiUriResource property = ts.createUriResource(uri);
Query q = em.createNamedQuery("tripleStore.literal2BySubjectProperty"+(loc != null?"Locale":""));
q.setHint("org.hibernate.cacheable", true);
q.setParameter("subject", this);
q.setParameter("property_uri",uri);
if(loc != null) {
q.setParameter("locale",loc);
}
result.addAll( (List<KiWiLiteral>) q.getResultList() );
}
return result;
}
private static final String _resolvePropLabel(String propLabel) throws NamespaceResolvingException {
Log log = Logging.getLog(KiWiResource.class);
String uri = propLabel;
// find out which kind of propLabel we got passed
if(uri.startsWith("<") && uri.endsWith(">")) {
// uri is a real uri enclosed in < >
uri = uri.substring(1,uri.length()-1);
} else if(!uri.contains("://") && uri.contains(":")) {
// uri is a SeQRQL/SPARQL identifier with abbreviated namespace, we need to lookup the namespace...
String[] components = uri.split(":");
if(components.length == 2) {
String ns_prefix = components[0];
String ns_local = components[1];
TripleStore ts = (TripleStore) Component.getInstance("tripleStore");
KiWiNamespace ns = ts.getNamespace(ns_prefix);
if(ns == null) {
throw new NamespaceResolvingException("The namespace with prefix " + ns_prefix +
"could not be resolved. Have you imported the kiwi core ontology?");
}
uri = ns.getUri() + ns_local;
} else {
log.error("could not properly split property identifier #0, as it contained more than one ':'",uri);
}
}
return uri;
}
/**
* Helper method. Return an appropriate query string for querying the language provided as parameter in SeRQL query strings.
* @param loc
* @return
*/
protected static final String _resolveSeRQLLang(Locale loc) {
if(loc == null) {
return "";
} else {
return " AND lang(L) LIKE \""+loc.getLanguage()+"*\"";
}
}
protected void prefetchProperties() {
Log log = Logging.getLog(KiWiResource.class);
EntityManager em = (EntityManager) Component.getInstance("entityManager");
TripleStore ts = (TripleStore) Component.getInstance("tripleStore");
//em.setFlushMode(FlushModeType.COMMIT); // avoid flushing transaction for query
log.debug("prefetching resource properties for #0", this);
Query q = em.createNamedQuery("tripleStore.prefetchTripleByS");
q.setHint("org.hibernate.cacheable", true);
q.setParameter("subject", this);
List<Object[]> result = q.getResultList();
for(Object[] tuple : result) {
KiWiUriResource property = (KiWiUriResource)tuple[0];
KiWiLiteral lit = (KiWiLiteral)tuple[1];
String key = "<"+property.getUri()+">" + (lit.getLanguage() == null?"":"@"+lit.getLanguage().getLanguage());
properties.put(key, lit);
log.debug("prefetched #0 = #1", key, lit);
}
prefetched = true;
}
/**
* A comparator that allows to compare KiWiResources by label. Useful for sorting.
*
* @author Sebastian Schaffert
*
*/
public static class LabelComparator implements Comparator<KiWiResource> {
@Override
public int compare(KiWiResource o1, KiWiResource o2) {
if(o1.getLabel() != null && o2.getLabel() != null) {
return Collator.getInstance().compare(o1.getLabel(), o2.getLabel());
} else {
return 0;
}
}
private static LabelComparator _c;
public static LabelComparator getInstance() {
if(_c == null) {
_c = new LabelComparator();
}
return _c;
}
}
public abstract String getNamespacePrefix();
}
| |
package io.datakernel.ot;
import io.datakernel.common.exception.StacklessException;
import io.datakernel.ot.OTUplink.FetchData;
import io.datakernel.ot.utils.OTGraphBuilder;
import io.datakernel.ot.utils.OTRepositoryStub;
import io.datakernel.ot.utils.TestOp;
import io.datakernel.ot.utils.TestOpState;
import io.datakernel.test.rules.EventloopRule;
import org.jetbrains.annotations.Nullable;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import java.util.function.Consumer;
import static io.datakernel.ot.OTCommit.ofRoot;
import static io.datakernel.ot.utils.Utils.add;
import static io.datakernel.ot.utils.Utils.createTestOp;
import static io.datakernel.promise.TestUtils.await;
import static io.datakernel.promise.TestUtils.awaitException;
import static java.util.Collections.emptyList;
import static java.util.Collections.singleton;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.instanceOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
public class OTUplinkImplTest {
private static final TestOpState state = new TestOpState();
@ClassRule
public static final EventloopRule eventloopRule = new EventloopRule();
private final OTRepositoryStub<Integer, TestOp> REPOSITORY = OTRepositoryStub.create();
private OTUplink<Integer, TestOp, OTCommit<Integer, TestOp>> node;
@Before
public void setUp() {
REPOSITORY.reset();
node = OTUplinkImpl.create(REPOSITORY, createTestOp());
resetRepo(null);
}
@Test
public void testFetchLinearGraph() {
REPOSITORY.setGraph(g -> {
g.add(0, 1, add(1));
g.add(1, 2, add(2));
g.add(2, 3, add(3));
g.add(3, 4, add(4));
g.add(4, 5, add(5));
g.add(5, 6, add(6));
});
FetchData<Integer, TestOp> fetchData1 = await(node.fetch(0));
assertFetchData(6, 7, 21, fetchData1);
FetchData<Integer, TestOp> fetchData2 = await(node.fetch(3));
assertFetchData(6, 7, 15, fetchData2);
}
@Test
public void testFetch2BranchesGraph() {
resetRepo(g -> {
g.add(0, 1, add(1));
g.add(1, 2, add(2));
g.add(2, 3, add(3));
g.add(0, 4, add(4));
g.add(4, 5, add(5));
});
FetchData<Integer, TestOp> fetchData1 = await(node.fetch(0));
assertFetchData(3, 4, 6, fetchData1);
resetRepo(g -> {
g.add(0, 1, add(1));
g.add(1, 2, add(2));
g.add(2, 3, add(3));
g.add(0, 4, add(4));
g.add(4, 5, add(5));
});
FetchData<Integer, TestOp> fetchData2 = await(node.fetch(1));
assertFetchData(3, 4, 5, fetchData2);
resetRepo(g -> {
g.add(0, 1, add(1));
g.add(1, 2, add(2));
g.add(2, 3, add(3));
g.add(0, 4, add(4));
g.add(4, 5, add(5));
});
FetchData<Integer, TestOp> fetchData3 = await(node.fetch(4));
assertFetchData(5, 3, 5, fetchData3);
}
@Test
public void testFetchSplittingGraph() {
resetRepo(g -> {
g.add(0, 1, add(1));
g.add(1, 2, add(2));
g.add(2, 3, add(3));
g.add(0, 4, add(4));
g.add(4, 5, add(5));
g.add(3, 6, add(9));
g.add(5, 6, add(6));
g.add(6, 7, add(7));
});
FetchData<Integer, TestOp> fetchData1 = await(node.fetch(0));
assertFetchData(7, 6, 22, fetchData1);
resetRepo(g -> {
g.add(0, 1, add(1));
g.add(1, 2, add(2));
g.add(2, 3, add(3));
g.add(0, 4, add(4));
g.add(4, 5, add(5));
g.add(3, 6, add(9));
g.add(5, 6, add(6));
g.add(6, 7, add(7));
});
FetchData<Integer, TestOp> fetchData2 = await(node.fetch(1));
assertFetchData(7, 6, 21, fetchData2);
resetRepo(g -> {
g.add(0, 1, add(1));
g.add(1, 2, add(2));
g.add(2, 3, add(3));
g.add(0, 4, add(4));
g.add(4, 5, add(5));
g.add(3, 6, add(9));
g.add(5, 6, add(6));
g.add(6, 7, add(7));
});
FetchData<Integer, TestOp> fetchData3 = await(node.fetch(4));
assertFetchData(7, 6, 18, fetchData3);
}
@Test
public void testFetchInvalidRevision() {
Throwable exception = awaitException(node.fetch(100));
assertThat(exception, instanceOf(StacklessException.class));
assertThat(exception.getMessage(), containsString("Graph exhausted"));
}
@Test
public void testCheckoutEmptyGraph() {
FetchData<Integer, TestOp> fetchData = await(node.checkout());
assertFetchData(0, 1, 0, fetchData);
}
@Test
public void testCheckoutLinearGraph() {
REPOSITORY.setGraph(g -> {
g.add(0, 1, add(1));
g.add(1, 2, add(2));
g.add(2, 3, add(3));
g.add(3, 4, add(4));
g.add(4, 5, add(5));
g.add(5, 6, add(6));
});
FetchData<Integer, TestOp> fetchData = await(node.checkout());
assertFetchData(6, 7, 21, fetchData);
}
/*
@Test
public void testCheckout2BranchesGraph() {
REPOSITORY.revisionIdSupplier = () -> 6; // id of merge commit
REPOSITORY.setGraph(g -> {
g.add(0, 1, add(1));
g.add(1, 2, add(2));
g.add(2, 3, add(3));
g.add(0, 4, add(4));
g.add(4, 5, add(5));
});
FetchData<Integer, TestOp> fetchData = await(node.checkout());
assertFetchData(6, 5, 15, fetchData);
// Additional snapshot in branch1
REPOSITORY.saveSnapshot(4, singletonList(add(4)));
FetchData<Integer, TestOp> fetchData2 = await(node.checkout());
assertFetchData(6, 5, 15, fetchData2);
// Additional snapshot in branch2
REPOSITORY.saveSnapshot(1, singletonList(add(1)));
FetchData<Integer, TestOp> fetchData3 = await(node.checkout());
assertFetchData(6, 5, 15, fetchData3);
}
*/
private static void assertFetchData(Integer expectedId, long expectedLevel, Integer expectedState, FetchData<Integer, TestOp> fetchData) {
assertEquals(expectedId, fetchData.getCommitId());
assertEquals(expectedLevel, fetchData.getLevel());
state.init();
fetchData.getDiffs().forEach(state::apply);
assertEquals(expectedState, (Integer) state.getValue());
}
private void resetRepo(@Nullable Consumer<OTGraphBuilder<Integer, TestOp>> builder) {
// Initializing repo
REPOSITORY.reset();
REPOSITORY.doPushAndUpdateHeads(singleton(ofRoot(0)));
await(REPOSITORY.saveSnapshot(0, emptyList()));
if (builder != null) {
REPOSITORY.setGraph(builder);
}
}
}
| |
/*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.kuujo.copycat.log;
import com.esotericsoftware.kryo.io.ByteBufferInput;
import com.esotericsoftware.kryo.io.ByteBufferOutput;
import net.kuujo.copycat.internal.log.CopycatEntry;
import net.kuujo.copycat.internal.util.Assert;
import net.kuujo.copycat.internal.util.concurrent.NamedThreadFactory;
import net.openhft.chronicle.*;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.*;
/**
* Java chronicle based log implementation.
* <p>
*
* This is a naive thread-safe log implementation. In the future, internal read/write locks should
* be used for concurrent operations.
*
* @author <a href="http://github.com/kuujo">Jordan Halterman</a>
*/
public class ChronicleLog extends BaseFileLog {
private static final ThreadFactory THREAD_FACTORY = new NamedThreadFactory("chronicle-syncer-%s");
private final ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor(THREAD_FACTORY);
private static final byte DELETED = 0;
private static final byte ACTIVE = 1;
private static final int EXTRA_BYTES = 9;
private final ByteBuffer buffer = ByteBuffer.allocate(4096);
private final ByteBufferOutput output = new ByteBufferOutput(buffer);
private final ByteBufferInput input = new ByteBufferInput(buffer);
private File logFile;
private Chronicle chronicle;
private Excerpt excerpt;
private ExcerptAppender appender;
private ExcerptTailer tailer;
private volatile long firstIndex;
private volatile long lastIndex;
private volatile long size;
private long syncInterval = 0;
private ScheduledFuture<Void> syncFuture;
public ChronicleLog(File baseFile) {
this(baseFile, CopycatEntry.class);
}
public ChronicleLog(File baseFile, Class<? extends Entry> entryType) {
super(baseFile, entryType);
}
public ChronicleLog(String baseName) {
this(baseName, CopycatEntry.class);
}
public ChronicleLog(String baseName, Class<? extends Entry> entryType) {
this(new File(baseName), entryType);
}
@Override
public synchronized long appendEntry(Entry entry) {
Assert.isNotNull(entry, "entry");
assertIsOpen();
long index = lastIndex + 1;
appender.startExcerpt();
appender.writeLong(index);
appender.writeByte(ACTIVE);
kryo.writeClassAndObject(output, entry);
byte[] bytes = output.toBytes();
appender.writeInt(bytes.length);
appender.write(bytes);
output.clear();
appender.finish();
size += bytes.length + EXTRA_BYTES; // 9 bytes for index and status
lastIndex = index;
if (firstIndex == 0) {
firstIndex = 1;
}
return index;
}
@Override
public synchronized void close() throws IOException {
assertIsOpen();
try {
chronicle.close();
} finally {
excerpt = null;
firstIndex = 0;
lastIndex = 0;
if (syncFuture != null) {
syncFuture.cancel(false);
}
}
}
@Override
public synchronized void compact(long index, Entry snapshot) throws IOException {
Assert.isNotNull(snapshot, "snapshot");
assertIsOpen();
if (index > firstIndex) {
// Create a new log file using the most recent timestamp.
File newLogFile = createLogFile();
File tempLogFile = createTempFile();
File oldLogFile = logFile;
long newSize = 0;
// Create a new chronicle for the new log file.
Chronicle chronicle = new IndexedChronicle(tempLogFile.getAbsolutePath());
ExcerptAppender appender = chronicle.createAppender();
appender.startExcerpt();
appender.writeLong(index);
appender.writeByte(ACTIVE);
kryo.writeClassAndObject(output, snapshot);
byte[] snapshotBytes = output.toBytes();
appender.writeInt(snapshotBytes.length);
appender.write(snapshotBytes);
output.clear();
appender.finish();
newSize += snapshotBytes.length + EXTRA_BYTES; // 9 bytes for index and status
// Iterate through entries greater than the given index and copy them to the new chronicle.
long matchIndex = findAbsoluteIndex(index);
tailer.index(matchIndex);
while (tailer.nextIndex()) {
long entryIndex = tailer.readLong();
byte entryStatus = tailer.readByte();
if (entryStatus == ACTIVE) {
int length = tailer.readInt();
byte[] bytes = new byte[length];
tailer.read(bytes);
appender.startExcerpt();
appender.writeLong(entryIndex);
appender.writeByte(entryStatus);
appender.writeInt(length);
appender.write(bytes);
appender.finish();
newSize += bytes.length + EXTRA_BYTES;
}
}
moveTempFile(tempLogFile, newLogFile);
// Override existing chronicle types.
this.logFile = newLogFile;
this.chronicle = new IndexedChronicle(newLogFile.getAbsolutePath());
this.excerpt = chronicle.createExcerpt();
this.appender = chronicle.createAppender();
this.tailer = chronicle.createTailer();
this.firstIndex = index;
this.size = newSize;
// Finally, delete the old log file.
deleteLogFile(oldLogFile);
}
}
@Override
public boolean containsEntry(long index) {
assertIsOpen();
long matchIndex = findAbsoluteIndex(index);
excerpt.index(matchIndex);
excerpt.skip(8);
return excerpt.readByte() == ACTIVE;
}
@Override
public synchronized void delete() {
if (chronicle != null) {
chronicle.clear();
}
}
@Override
public synchronized <T extends Entry> T firstEntry() {
return getEntry(firstIndex);
}
@Override
public long firstIndex() {
assertIsOpen();
return firstIndex;
}
@Override
@SuppressWarnings("unchecked")
public synchronized <T extends Entry> List<T> getEntries(long from, long to) {
assertIsOpen();
if (!indexInRange(from)) {
throw new LogIndexOutOfBoundsException("From index out of bounds.");
}
if (!indexInRange(to)) {
throw new LogIndexOutOfBoundsException("To index out of bounds.");
}
List<T> entries = new ArrayList<>((int) (to - from + 1));
long matchIndex = findAbsoluteIndex(from);
tailer.index(matchIndex);
do {
long index = tailer.readLong();
byte status = tailer.readByte();
if (status == ACTIVE) {
int length = tailer.readInt();
byte[] bytes = new byte[length];
tailer.read(bytes);
buffer.put(bytes);
entries.add((T) kryo.readClassAndObject(input));
buffer.clear();
matchIndex = index;
}
} while (tailer.nextIndex() && matchIndex < to);
return entries;
}
@Override
@SuppressWarnings("unchecked")
public synchronized <T extends Entry> T getEntry(long index) {
assertIsOpen();
long matchIndex = findAbsoluteIndex(index);
excerpt.index(matchIndex);
excerpt.skip(9);
int length = excerpt.readInt();
byte[] bytes = new byte[length];
excerpt.read(bytes);
buffer.put(bytes);
buffer.rewind();
input.setBuffer(buffer);
T entry = (T) kryo.readClassAndObject(input);
buffer.clear();
return entry;
}
/**
* Returns the interval at which to sync the log to disk.
*
* @return The interval at which to sync the log to disk.
*/
public long getSyncInterval() {
return syncInterval;
}
@Override
public synchronized boolean isEmpty() {
assertIsOpen();
return lastIndex == firstIndex && size == 0;
}
@Override
public boolean isOpen() {
return excerpt != null;
}
@Override
public synchronized <T extends Entry> T lastEntry() {
assertIsOpen();
return getEntry(lastIndex);
}
@Override
public long lastIndex() {
assertIsOpen();
return lastIndex;
}
@Override
@SuppressWarnings("unchecked")
public synchronized void open() throws IOException {
assertIsNotOpen();
logFile = findLogFile();
chronicle = new IndexedChronicle(logFile.getAbsolutePath());
excerpt = chronicle.createExcerpt();
appender = chronicle.createAppender();
tailer = chronicle.createTailer();
tailer.toStart();
while (tailer.nextIndex()) {
long index = tailer.readLong();
byte status = tailer.readByte();
int length = excerpt.readInt();
if (status == ACTIVE) {
if (firstIndex == 0) {
firstIndex = index;
}
lastIndex = index;
}
size += length + EXTRA_BYTES; // 9 bytes for index and status
}
if (syncInterval > 0 && syncFuture == null) {
syncFuture =
(ScheduledFuture<Void>) scheduler.scheduleAtFixedRate(this::sync, syncInterval,
syncInterval, TimeUnit.MILLISECONDS);
}
}
@Override
public synchronized void removeAfter(long index) {
assertIsOpen();
if (!indexInRange(index)) {
throw new LogIndexOutOfBoundsException("Cannot remove entry at index %s", index);
}
long matchIndex = findAbsoluteIndex(index);
if (matchIndex > -1) {
tailer.index(matchIndex);
while (tailer.nextIndex()) {
tailer.skip(8);
tailer.writeByte(DELETED);
}
}
lastIndex = index;
}
/**
* Sets the interval at which to sync the log to disk.
*
* @param interval The interval at which to sync the log to disk.
*/
public void setSyncInterval(long interval) {
this.syncInterval = interval;
}
@Override
public synchronized long size() {
assertIsOpen();
return size;
}
@Override
public synchronized void sync() {
assertIsOpen();
appender.nextSynchronous(true);
}
@Override
public String toString() {
return String.format("%s[size=%d]", getClass().getSimpleName(), size());
}
/**
* Sets the interval at which to sync the log to disk, returning the log for method chaining.
*
* @param interval The interval at which to sync the log to disk.
* @return The memory mapped file log for method chaining.
*/
public ChronicleLog withSyncInterval(long interval) {
this.syncInterval = interval;
return this;
}
/**
* Finds the absolute index of a log entry in the chronicle by log index.
*/
private long findAbsoluteIndex(long index) {
return excerpt.findMatch((excerpt) -> {
long match = excerpt.readLong();
if (match < index) {
return -1;
} else if (match > index) {
return 1;
} else {
byte status = excerpt.readByte();
if (status == DELETED) {
return -1;
}
}
return 0;
});
}
/**
* Returns a boolean indicating whether the given index is within the range of the log.
*/
private boolean indexInRange(long index) {
return index >= firstIndex && index <= lastIndex;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.utils;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
/**
* This classes exposes low-level methods for reading/writing from byte streams or buffers.
*/
public final class ByteUtils {
private ByteUtils() {}
/**
* Read an unsigned integer from the current position in the buffer, incrementing the position by 4 bytes
*
* @param buffer The buffer to read from
* @return The integer read, as a long to avoid signedness
*/
public static long readUnsignedInt(ByteBuffer buffer) {
return buffer.getInt() & 0xffffffffL;
}
/**
* Read an unsigned integer from the given position without modifying the buffers position
*
* @param buffer the buffer to read from
* @param index the index from which to read the integer
* @return The integer read, as a long to avoid signedness
*/
public static long readUnsignedInt(ByteBuffer buffer, int index) {
return buffer.getInt(index) & 0xffffffffL;
}
/**
* Read an unsigned integer stored in little-endian format from the {@link InputStream}.
*
* @param in The stream to read from
* @return The integer read (MUST BE TREATED WITH SPECIAL CARE TO AVOID SIGNEDNESS)
*/
public static int readUnsignedIntLE(InputStream in) throws IOException {
return in.read()
| (in.read() << 8)
| (in.read() << 16)
| (in.read() << 24);
}
/**
* Read an unsigned integer stored in little-endian format from a byte array
* at a given offset.
*
* @param buffer The byte array to read from
* @param offset The position in buffer to read from
* @return The integer read (MUST BE TREATED WITH SPECIAL CARE TO AVOID SIGNEDNESS)
*/
public static int readUnsignedIntLE(byte[] buffer, int offset) {
return (buffer[offset] << 0 & 0xff)
| ((buffer[offset + 1] & 0xff) << 8)
| ((buffer[offset + 2] & 0xff) << 16)
| ((buffer[offset + 3] & 0xff) << 24);
}
/**
* Write the given long value as a 4 byte unsigned integer. Overflow is ignored.
*
* @param buffer The buffer to write to
* @param index The position in the buffer at which to begin writing
* @param value The value to write
*/
public static void writeUnsignedInt(ByteBuffer buffer, int index, long value) {
buffer.putInt(index, (int) (value & 0xffffffffL));
}
/**
* Write the given long value as a 4 byte unsigned integer. Overflow is ignored.
*
* @param buffer The buffer to write to
* @param value The value to write
*/
public static void writeUnsignedInt(ByteBuffer buffer, long value) {
buffer.putInt((int) (value & 0xffffffffL));
}
/**
* Write an unsigned integer in little-endian format to the {@link OutputStream}.
*
* @param out The stream to write to
* @param value The value to write
*/
public static void writeUnsignedIntLE(OutputStream out, int value) throws IOException {
out.write(value);
out.write(value >>> 8);
out.write(value >>> 16);
out.write(value >>> 24);
}
/**
* Write an unsigned integer in little-endian format to a byte array
* at a given offset.
*
* @param buffer The byte array to write to
* @param offset The position in buffer to write to
* @param value The value to write
*/
public static void writeUnsignedIntLE(byte[] buffer, int offset, int value) {
buffer[offset] = (byte) value;
buffer[offset + 1] = (byte) (value >>> 8);
buffer[offset + 2] = (byte) (value >>> 16);
buffer[offset + 3] = (byte) (value >>> 24);
}
/**
* Read an integer stored in variable-length format using zig-zag decoding from
* <a href="http://code.google.com/apis/protocolbuffers/docs/encoding.html"> Google Protocol Buffers</a>.
*
* @param buffer The buffer to read from
* @return The integer read
*
* @throws IllegalArgumentException if variable-length value does not terminate after 5 bytes have been read
*/
public static int readVarint(ByteBuffer buffer) {
int value = 0;
int i = 0;
int b;
while (((b = buffer.get()) & 0x80) != 0) {
value |= (b & 0x7f) << i;
i += 7;
if (i > 28)
throw illegalVarintException(value);
}
value |= b << i;
return (value >>> 1) ^ -(value & 1);
}
/**
* Read an integer stored in variable-length format using zig-zag decoding from
* <a href="http://code.google.com/apis/protocolbuffers/docs/encoding.html"> Google Protocol Buffers</a>.
*
* @param in The input to read from
* @return The integer read
*
* @throws IllegalArgumentException if variable-length value does not terminate after 5 bytes have been read
* @throws IOException if {@link DataInput} throws {@link IOException}
*/
public static int readVarint(DataInput in) throws IOException {
int value = 0;
int i = 0;
int b;
while (((b = in.readByte()) & 0x80) != 0) {
value |= (b & 0x7f) << i;
i += 7;
if (i > 28)
throw illegalVarintException(value);
}
value |= b << i;
return (value >>> 1) ^ -(value & 1);
}
/**
* Read a long stored in variable-length format using zig-zag decoding from
* <a href="http://code.google.com/apis/protocolbuffers/docs/encoding.html"> Google Protocol Buffers</a>.
*
* @param in The input to read from
* @return The long value read
*
* @throws IllegalArgumentException if variable-length value does not terminate after 10 bytes have been read
* @throws IOException if {@link DataInput} throws {@link IOException}
*/
public static long readVarlong(DataInput in) throws IOException {
long value = 0L;
int i = 0;
long b;
while (((b = in.readByte()) & 0x80) != 0) {
value |= (b & 0x7f) << i;
i += 7;
if (i > 63)
throw illegalVarlongException(value);
}
value |= b << i;
return (value >>> 1) ^ -(value & 1);
}
/**
* Read a long stored in variable-length format using zig-zag decoding from
* <a href="http://code.google.com/apis/protocolbuffers/docs/encoding.html"> Google Protocol Buffers</a>.
*
* @param buffer The buffer to read from
* @return The long value read
*
* @throws IllegalArgumentException if variable-length value does not terminate after 10 bytes have been read
*/
public static long readVarlong(ByteBuffer buffer) {
long value = 0L;
int i = 0;
long b;
while (((b = buffer.get()) & 0x80) != 0) {
value |= (b & 0x7f) << i;
i += 7;
if (i > 63)
throw illegalVarlongException(value);
}
value |= b << i;
return (value >>> 1) ^ -(value & 1);
}
/**
* Write the given integer following the variable-length zig-zag encoding from
* <a href="http://code.google.com/apis/protocolbuffers/docs/encoding.html"> Google Protocol Buffers</a>
* into the output.
*
* @param value The value to write
* @param out The output to write to
*/
public static void writeVarint(int value, DataOutput out) throws IOException {
int v = (value << 1) ^ (value >> 31);
while ((v & 0xffffff80) != 0L) {
out.writeByte((v & 0x7f) | 0x80);
v >>>= 7;
}
out.writeByte((byte) v);
}
/**
* Write the given integer following the variable-length zig-zag encoding from
* <a href="http://code.google.com/apis/protocolbuffers/docs/encoding.html"> Google Protocol Buffers</a>
* into the buffer.
*
* @param value The value to write
* @param buffer The output to write to
*/
public static void writeVarint(int value, ByteBuffer buffer) {
int v = (value << 1) ^ (value >> 31);
while ((v & 0xffffff80) != 0L) {
byte b = (byte) ((v & 0x7f) | 0x80);
buffer.put(b);
v >>>= 7;
}
buffer.put((byte) v);
}
/**
* Write the given integer following the variable-length zig-zag encoding from
* <a href="http://code.google.com/apis/protocolbuffers/docs/encoding.html"> Google Protocol Buffers</a>
* into the output.
*
* @param value The value to write
* @param out The output to write to
*/
public static void writeVarlong(long value, DataOutput out) throws IOException {
long v = (value << 1) ^ (value >> 63);
while ((v & 0xffffffffffffff80L) != 0L) {
out.writeByte(((int) v & 0x7f) | 0x80);
v >>>= 7;
}
out.writeByte((byte) v);
}
/**
* Write the given integer following the variable-length zig-zag encoding from
* <a href="http://code.google.com/apis/protocolbuffers/docs/encoding.html"> Google Protocol Buffers</a>
* into the buffer.
*
* @param value The value to write
* @param buffer The buffer to write to
*/
public static void writeVarlong(long value, ByteBuffer buffer) {
long v = (value << 1) ^ (value >> 63);
while ((v & 0xffffffffffffff80L) != 0L) {
byte b = (byte) ((v & 0x7f) | 0x80);
buffer.put(b);
v >>>= 7;
}
buffer.put((byte) v);
}
/**
* Number of bytes needed to encode an integer in variable-length format.
*
* @param value The signed value
*/
public static int sizeOfVarint(int value) {
int v = (value << 1) ^ (value >> 31);
int bytes = 1;
while ((v & 0xffffff80) != 0L) {
bytes += 1;
v >>>= 7;
}
return bytes;
}
/**
* Number of bytes needed to encode a long in variable-length format.
*
* @param value The signed value
*/
public static int sizeOfVarlong(long value) {
long v = (value << 1) ^ (value >> 63);
int bytes = 1;
while ((v & 0xffffffffffffff80L) != 0L) {
bytes += 1;
v >>>= 7;
}
return bytes;
}
private static IllegalArgumentException illegalVarintException(int value) {
throw new IllegalArgumentException("Varint is too long, the most significant bit in the 5th byte is set, " +
"converted value: " + Integer.toHexString(value));
}
private static IllegalArgumentException illegalVarlongException(long value) {
throw new IllegalArgumentException("Varlong is too long, most significant bit in the 10th byte is set, " +
"converted value: " + Long.toHexString(value));
}
}
| |
package org.shypl.common.util;
import java.util.Collection;
import java.util.Map;
public final class StringUtils {
public static final String STRING_NULL = "<null>";
public static final String STRING_TRUE = "<true>";
public static final String STRING_FALSE = "<false>";
public static final String STRING_SEQUENCE_SEPARATOR = ", ";
public static final String STRING_KEY_VALUE_SEPARATOR = ": ";
public static final String STRING_LIST_EMPTY = "[]";
public static final String STRING_MAP_EMPTY = "{}";
public static final char STRING_LIST_OPEN = '[';
public static final char STRING_LIST_CLOSE = ']';
public static final char STRING_MAP_OPEN = '{';
public static final char STRING_MAP_CLOSE = '}';
private static final char[] DIGITS = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'};
public static String toString(boolean v) {
return v ? STRING_TRUE : STRING_FALSE;
}
public static String toString(byte v) {
int i = v & 0xFF;
return new String(new char[]{DIGITS[i >>> 4], DIGITS[i & 0x0F]});
}
public static String toString(char v) {
return String.valueOf(v);
}
public static String toString(short v) {
return String.valueOf(v);
}
public static String toString(int v) {
return String.valueOf(v);
}
public static String toString(long v) {
return String.valueOf(v);
}
public static String toString(float v) {
return String.valueOf(v);
}
public static String toString(double v) {
return String.valueOf(v);
}
public static String toString(Object v) {
if (v == null) {
return STRING_NULL;
}
if (v instanceof Boolean) {
return toString(((Boolean)v).booleanValue());
}
if (v instanceof Byte) {
return toString(((Byte)v).byteValue());
}
if (v instanceof Character) {
return toString(((Character)v).charValue());
}
if (v instanceof Short) {
return toString(((Short)v).shortValue());
}
if (v instanceof Integer) {
return toString(((Integer)v).intValue());
}
if (v instanceof Long) {
return toString(((Long)v).longValue());
}
if (v instanceof Float) {
return toString(((Float)v).floatValue());
}
if (v instanceof Double) {
return toString(((Double)v).doubleValue());
}
if (v instanceof Collection) {
return toString((Collection)v);
}
if (v instanceof Map) {
return toString((Map)v);
}
if (v instanceof boolean[]) {
return toString((boolean[])v);
}
if (v instanceof byte[]) {
return toString((byte[])v);
}
if (v instanceof char[]) {
return toString((char[])v);
}
if (v instanceof short[]) {
return toString((short[])v);
}
if (v instanceof int[]) {
return toString((int[])v);
}
if (v instanceof long[]) {
return toString((long[])v);
}
if (v instanceof float[]) {
return toString((float[])v);
}
if (v instanceof double[]) {
return toString((double[])v);
}
if (v.getClass().isArray()) {
return toString((Object[])v);
}
return v.toString();
}
public static String toString(boolean[] a) {
if (a == null) {
return STRING_NULL;
}
if (a.length == 0) {
return STRING_LIST_EMPTY;
}
StringBuilder string = new StringBuilder(9 * a.length);
boolean sep = false;
string.append(STRING_LIST_OPEN);
for (boolean v : a) {
if (sep) {
string.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
string.append(toString(v));
}
return string.append(STRING_LIST_CLOSE).toString();
}
public static String toString(byte[] a) {
if (a == null) {
return STRING_NULL;
}
if (a.length == 0) {
return STRING_LIST_EMPTY;
}
StringBuilder string = new StringBuilder(4 * a.length);
boolean sep = false;
string.append(STRING_LIST_OPEN);
for (byte v : a) {
if (sep) {
string.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
int i = v & 0xFF;
string.append(DIGITS[i >>> 4]).append(DIGITS[i & 0x0F]);
}
return string.append(STRING_LIST_CLOSE).toString();
}
public static String toString(char[] a) {
if (a == null) {
return STRING_NULL;
}
if (a.length == 0) {
return STRING_LIST_EMPTY;
}
StringBuilder string = new StringBuilder(3 * a.length);
boolean sep = false;
string.append(STRING_LIST_OPEN);
for (char v : a) {
if (sep) {
string.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
string.append(toString(v));
}
return string.append(STRING_LIST_CLOSE).toString();
}
public static String toString(short[] a) {
if (a == null) {
return STRING_NULL;
}
if (a.length == 0) {
return STRING_LIST_EMPTY;
}
StringBuilder string = new StringBuilder(4 * a.length);
boolean sep = false;
string.append(STRING_LIST_OPEN);
for (short v : a) {
if (sep) {
string.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
string.append(toString(v));
}
return string.append(STRING_LIST_CLOSE).toString();
}
public static String toString(int[] a) {
if (a == null) {
return STRING_NULL;
}
if (a.length == 0) {
return STRING_LIST_EMPTY;
}
StringBuilder string = new StringBuilder(4 * a.length);
boolean sep = false;
string.append(STRING_LIST_OPEN);
for (int v : a) {
if (sep) {
string.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
string.append(toString(v));
}
return string.append(STRING_LIST_CLOSE).toString();
}
public static String toString(long[] a) {
if (a == null) {
return STRING_NULL;
}
if (a.length == 0) {
return STRING_LIST_EMPTY;
}
StringBuilder string = new StringBuilder(4 * a.length);
boolean sep = false;
string.append(STRING_LIST_OPEN);
for (long v : a) {
if (sep) {
string.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
string.append(toString(v));
}
return string.append(STRING_LIST_CLOSE).toString();
}
public static String toString(float[] a) {
if (a == null) {
return STRING_NULL;
}
if (a.length == 0) {
return STRING_LIST_EMPTY;
}
StringBuilder string = new StringBuilder(4 * a.length);
boolean sep = false;
string.append(STRING_LIST_OPEN);
for (float v : a) {
if (sep) {
string.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
string.append(toString(v));
}
return string.append(STRING_LIST_CLOSE).toString();
}
public static String toString(double[] a) {
if (a == null) {
return STRING_NULL;
}
if (a.length == 0) {
return STRING_LIST_EMPTY;
}
StringBuilder string = new StringBuilder(4 * a.length);
boolean sep = false;
string.append(STRING_LIST_OPEN);
for (double v : a) {
if (sep) {
string.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
string.append(toString(v));
}
return string.append(STRING_LIST_CLOSE).toString();
}
public static String toString(Object[] a) {
if (a == null) {
return STRING_NULL;
}
if (a.length == 0) {
return STRING_LIST_EMPTY;
}
StringBuilder string = new StringBuilder(4 * a.length);
boolean sep = false;
string.append(STRING_LIST_OPEN);
for (Object v : a) {
if (sep) {
string.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
string.append(toString(v));
}
return string.append(STRING_LIST_CLOSE).toString();
}
public static String toString(Collection<?> c) {
if (c == null) {
return STRING_NULL;
}
if (c.isEmpty()) {
return STRING_LIST_EMPTY;
}
StringBuilder string = new StringBuilder(4 * c.size());
boolean sep = false;
string.append(STRING_LIST_OPEN);
for (Object v : c) {
if (sep) {
string.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
string.append(toString(v));
}
return string.append(STRING_LIST_CLOSE).toString();
}
public static String toString(Map<?, ?> m) {
if (m == null) {
return STRING_NULL;
}
if (m.isEmpty()) {
return STRING_MAP_EMPTY;
}
StringBuilder string = new StringBuilder(6 * m.size());
boolean sep = false;
string.append(STRING_MAP_OPEN);
for (Map.Entry<?, ?> e : m.entrySet()) {
if (sep) {
string.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
string.append(toString(e.getKey()))
.append(STRING_KEY_VALUE_SEPARATOR)
.append(toString(e.getValue()));
}
return string.append(STRING_MAP_CLOSE).toString();
}
public static void toString(StringBuilder builder, boolean v) {
builder.append(v ? STRING_TRUE : STRING_FALSE);
}
public static void toString(StringBuilder builder, byte v) {
int i = v & 0xFF;
builder.ensureCapacity(builder.capacity() + 2);
builder.append(DIGITS[i >>> 4]).append(DIGITS[i & 0x0F]);
}
public static void toString(StringBuilder builder, char v) {
builder.append(v);
}
public static void toString(StringBuilder builder, short v) {
builder.append(v);
}
public static void toString(StringBuilder builder, int v) {
builder.append(v);
}
public static void toString(StringBuilder builder, long v) {
builder.append(v);
}
public static void toString(StringBuilder builder, float v) {
builder.append(v);
}
public static void toString(StringBuilder builder, double v) {
builder.append(v);
}
public static void toString(StringBuilder builder, Object v) {
if (v == null) {
builder.append(STRING_NULL);
}
else if (v instanceof Boolean) {
toString(builder, ((Boolean)v).booleanValue());
}
else if (v instanceof Byte) {
toString(builder, ((Byte)v).byteValue());
}
else if (v instanceof Character) {
toString(builder, ((Character)v).charValue());
}
else if (v instanceof Short) {
toString(builder, ((Short)v).shortValue());
}
else if (v instanceof Integer) {
toString(builder, ((Integer)v).intValue());
}
else if (v instanceof Long) {
toString(builder, ((Long)v).longValue());
}
else if (v instanceof Float) {
toString(builder, ((Float)v).floatValue());
}
else if (v instanceof Double) {
toString(builder, ((Double)v).doubleValue());
}
else if (v instanceof Collection) {
toString(builder, (Collection)v);
}
else if (v instanceof Map) {
toString(builder, (Map)v);
}
else if (v instanceof boolean[]) {
toString(builder, (boolean[])v);
}
else if (v instanceof byte[]) {
toString(builder, (byte[])v);
}
else if (v instanceof char[]) {
toString(builder, (char[])v);
}
else if (v instanceof short[]) {
toString(builder, (short[])v);
}
else if (v instanceof int[]) {
toString(builder, (int[])v);
}
else if (v instanceof long[]) {
toString(builder, (long[])v);
}
else if (v instanceof float[]) {
toString(builder, (float[])v);
}
else if (v instanceof double[]) {
toString(builder, (double[])v);
}
else if (v.getClass().isArray()) {
toString(builder, (Object[])v);
}
else {
builder.append(v.toString());
}
}
public static void toString(StringBuilder builder, boolean[] a) {
if (a == null) {
builder.append(STRING_NULL);
}
else if (a.length == 0) {
builder.append(STRING_LIST_EMPTY);
}
else {
builder.ensureCapacity(builder.capacity() + 9 * a.length);
builder.append(STRING_LIST_OPEN);
boolean sep = false;
for (boolean v : a) {
if (sep) {
builder.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
toString(builder, v);
}
builder.append(STRING_LIST_CLOSE);
}
}
public static void toString(StringBuilder builder, byte[] a) {
if (a == null) {
builder.append(STRING_NULL);
}
else if (a.length == 0) {
builder.append(STRING_LIST_EMPTY);
}
else {
builder.ensureCapacity(builder.capacity() + 4 * a.length);
builder.append(STRING_LIST_OPEN);
boolean sep = false;
for (byte v : a) {
if (sep) {
builder.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
int i = v & 0xFF;
builder.append(DIGITS[i >>> 4]).append(DIGITS[i & 0x0F]);
}
builder.append(STRING_LIST_CLOSE);
}
}
public static void toString(StringBuilder builder, char[] a) {
if (a == null) {
builder.append(STRING_NULL);
}
else if (a.length == 0) {
builder.append(STRING_LIST_EMPTY);
}
else {
builder.ensureCapacity(builder.capacity() + 3 * a.length);
builder.append(STRING_LIST_OPEN);
boolean sep = false;
for (char v : a) {
if (sep) {
builder.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
builder.append(v);
}
builder.append(STRING_LIST_CLOSE);
}
}
public static void toString(StringBuilder builder, short[] a) {
if (a == null) {
builder.append(STRING_NULL);
}
else if (a.length == 0) {
builder.append(STRING_LIST_EMPTY);
}
else {
builder.ensureCapacity(builder.capacity() + 4 * a.length);
builder.append(STRING_LIST_OPEN);
boolean sep = false;
for (short v : a) {
if (sep) {
builder.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
builder.append(v);
}
builder.append(STRING_LIST_CLOSE);
}
}
public static void toString(StringBuilder builder, int[] a) {
if (a == null) {
builder.append(STRING_NULL);
}
else if (a.length == 0) {
builder.append(STRING_LIST_EMPTY);
}
else {
builder.ensureCapacity(builder.capacity() + 4 * a.length);
builder.append(STRING_LIST_OPEN);
boolean sep = false;
for (int v : a) {
if (sep) {
builder.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
builder.append(v);
}
builder.append(STRING_LIST_CLOSE);
}
}
public static void toString(StringBuilder builder, long[] a) {
if (a == null) {
builder.append(STRING_NULL);
}
else if (a.length == 0) {
builder.append(STRING_LIST_EMPTY);
}
else {
builder.ensureCapacity(builder.capacity() + 4 * a.length);
builder.append(STRING_LIST_OPEN);
boolean sep = false;
for (long v : a) {
if (sep) {
builder.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
builder.append(v);
}
builder.append(STRING_LIST_CLOSE);
}
}
public static void toString(StringBuilder builder, float[] a) {
if (a == null) {
builder.append(STRING_NULL);
}
else if (a.length == 0) {
builder.append(STRING_LIST_EMPTY);
}
else {
builder.ensureCapacity(builder.capacity() + 4 * a.length);
builder.append(STRING_LIST_OPEN);
boolean sep = false;
for (float v : a) {
if (sep) {
builder.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
builder.append(v);
}
builder.append(STRING_LIST_CLOSE);
}
}
public static void toString(StringBuilder builder, double[] a) {
if (a == null) {
builder.append(STRING_NULL);
}
else if (a.length == 0) {
builder.append(STRING_LIST_EMPTY);
}
else {
builder.ensureCapacity(builder.capacity() + 4 * a.length);
builder.append(STRING_LIST_OPEN);
boolean sep = false;
for (double v : a) {
if (sep) {
builder.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
builder.append(v);
}
builder.append(STRING_LIST_CLOSE);
}
}
public static void toString(StringBuilder builder, Object[] a) {
if (a == null) {
builder.append(STRING_NULL);
}
else if (a.length == 0) {
builder.append(STRING_LIST_EMPTY);
}
else {
builder.ensureCapacity(builder.capacity() + 4 * a.length);
builder.append(STRING_LIST_OPEN);
boolean sep = false;
for (Object v : a) {
if (sep) {
builder.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
toString(builder, v);
}
builder.append(STRING_LIST_CLOSE);
}
}
public static void toString(StringBuilder builder, Collection<?> c) {
if (c == null) {
builder.append(STRING_NULL);
}
else if (c.isEmpty()) {
builder.append(STRING_LIST_EMPTY);
}
else {
builder.ensureCapacity(builder.capacity() + 4 * c.size());
builder.append(STRING_LIST_OPEN);
boolean sep = false;
for (Object v : c) {
if (sep) {
builder.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
toString(builder, v);
}
builder.append(STRING_LIST_CLOSE);
}
}
public static void toString(StringBuilder builder, Map<?, ?> m) {
if (m == null) {
builder.append(STRING_NULL);
}
else if (m.isEmpty()) {
builder.append(STRING_MAP_EMPTY);
}
else {
builder.ensureCapacity(builder.capacity() + 6 * m.size());
builder.append(STRING_MAP_OPEN);
boolean sep = false;
for (Map.Entry<?, ?> e : m.entrySet()) {
if (sep) {
builder.append(STRING_SEQUENCE_SEPARATOR);
}
else {
sep = true;
}
toString(builder, e.getKey());
builder.append(STRING_KEY_VALUE_SEPARATOR);
toString(builder, e.getValue());
}
builder.append(STRING_MAP_CLOSE);
}
}
}
| |
/*
* Copyright 2007 The Apache Software Foundation.
* Copyright 2007 International Business Machines Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sandesha2.scenarios;
import java.io.File;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import junit.framework.AssertionFailedError;
import org.apache.axis2.Constants;
import org.apache.axis2.addressing.AddressingConstants;
import org.apache.axis2.addressing.EndpointReference;
import org.apache.axis2.client.Options;
import org.apache.axis2.client.ServiceClient;
import org.apache.axis2.context.ConfigurationContext;
import org.apache.axis2.context.ConfigurationContextFactory;
import org.apache.sandesha2.Sandesha2Constants;
import org.apache.sandesha2.SandeshaTestCase;
import org.apache.sandesha2.client.SandeshaClient;
import org.apache.sandesha2.client.SandeshaClientConstants;
import org.apache.sandesha2.client.SequenceReport;
import org.apache.sandesha2.util.SandeshaUtil;
public class RMScenariosTest extends SandeshaTestCase {
private boolean serverStarted = false;
protected ConfigurationContext configContext = null;
protected String to = "http://127.0.0.1:" + serverPort + "/axis2/services/RMSampleService";
protected String repoPath = "target" + File.separator + "repos" + File.separator + "server";
protected String axis2_xml = "target" + File.separator + "repos" + File.separator + "server" + File.separator + "server_axis2.xml";
protected String repoPathClient = "target" + File.separator + "repos" + File.separator + "client";
protected String axis2_xmlClient = "target" + File.separator + "repos" + File.separator + "client" + File.separator + "client_axis2.xml";
public RMScenariosTest () {
super ("RMScenariosTest");
}
public RMScenariosTest (String name) {
super(name);
}
public void setUp () throws Exception {
super.setUp();
if (!serverStarted) {
startServer(repoPath, axis2_xml);
configContext = ConfigurationContextFactory.createConfigurationContextFromFileSystem(repoPathClient,axis2_xmlClient);
}
serverStarted = true;
}
/**
* Override the teardown processing
*/
public void tearDown () throws Exception {
super.tearDown();
}
public void testPing() throws Exception {
// Run a ping test with sync acks
runPing(false, false);
// Run a ping test with async acks
runPing(true, true);
}
public void testAsyncEchoWithSyncAcks() throws Exception {
Options clientOptions = new Options();
runEcho(clientOptions, true, false, false,true,false);
}
public void testAsyncEchoWithAsyncAcks() throws Exception {
Options clientOptions = new Options();
runEcho(clientOptions, true, true, false,true,false);
}
public void testAsyncEchoWithAsyncAcksAndOffer() throws Exception {
Options clientOptions = new Options();
clientOptions.setProperty(SandeshaClientConstants.OFFERED_SEQUENCE_ID,SandeshaUtil.getUUID());
runEcho(clientOptions, true, true, false,true,true);
}
public void testSyncEchoWithOffer() throws Exception {
// Test sync echo with an offer, and the 1.1 spec
Options clientOptions = new Options();
clientOptions.setProperty(SandeshaClientConstants.OFFERED_SEQUENCE_ID,SandeshaUtil.getUUID());
clientOptions.setProperty(SandeshaClientConstants.RM_SPEC_VERSION,Sandesha2Constants.SPEC_VERSIONS.v1_1);
runEcho(clientOptions, false, false, true,true,false);
// // Test sync echo with an offer, and the 1.0 spec. The offer is not automatic as this
// // is a client that hasn't been built from WSDL. If the user's operations had been
// // modelled properly then the offer would happen automatically.
// clientOptions = new Options();
// clientOptions.setProperty(SandeshaClientConstants.OFFERED_SEQUENCE_ID,SandeshaUtil.getUUID());
// clientOptions.setProperty(SandeshaClientConstants.RM_SPEC_VERSION,Sandesha2Constants.SPEC_VERSIONS.v1_0);
// runEcho(clientOptions, false, false, true,false,false);
}
public void testSyncEcho() throws Exception {
// Test sync echo with no offer, and the 1.1 spec
Options clientOptions = new Options();
clientOptions.setProperty(SandeshaClientConstants.RM_SPEC_VERSION,Sandesha2Constants.SPEC_VERSIONS.v1_1);
runEcho(clientOptions, false, false, true,true,true);
}
public void runPing(boolean asyncAcks, boolean stopListener) throws Exception {
runPing(asyncAcks, stopListener, 1);
}
public void runPing(boolean asyncAcks, boolean stopListener, int msgCount) throws Exception {
Options clientOptions = new Options();
ServiceClient serviceClient = new ServiceClient (configContext,null);
serviceClient.setOptions(clientOptions);
String sequenceKey = SandeshaUtil.getUUID();
clientOptions.setAction(pingAction);
clientOptions.setTo(new EndpointReference (to));
clientOptions.setProperty(SandeshaClientConstants.SEQUENCE_KEY,sequenceKey);
if(asyncAcks) {
String acksTo = serviceClient.getMyEPR(Constants.TRANSPORT_HTTP).getAddress();
clientOptions.setProperty(SandeshaClientConstants.AcksTo,acksTo);
clientOptions.setTransportInProtocol(Constants.TRANSPORT_HTTP);
clientOptions.setUseSeparateListener(true);
}
for(int i=0; i<msgCount; i++){
String text = "ping" + (i+1);
if(i == (msgCount-1)){
clientOptions.setProperty(SandeshaClientConstants.LAST_MESSAGE, "true");
}
serviceClient.fireAndForget(getPingOMBlock(text)); //start the pingX text at X=1
}
long limit = System.currentTimeMillis() + waitTime;
Error lastError = null;
while(System.currentTimeMillis() < limit) {
Thread.sleep(tickTime); // Try the assertions each tick interval, until they pass or we time out
try {
SequenceReport sequenceReport = SandeshaClient.getOutgoingSequenceReport(serviceClient);
System.out.println("Checking Outbound Sequence: " + sequenceReport.getSequenceID());
assertTrue("Checking completed messages", sequenceReport.getCompletedMessages().contains(new Long(msgCount)));
assertEquals("Checking sequence terminated", SequenceReport.SEQUENCE_STATUS_TERMINATED, sequenceReport.getSequenceStatus());
assertEquals("Checking sequence direction", SequenceReport.SEQUENCE_DIRECTION_OUT, sequenceReport.getSequenceDirection());
lastError = null;
break;
} catch(Error e) {
System.out.println("Possible error:" + e);
lastError = e;
}
}
if(lastError != null) throw lastError;
if (stopListener)
configContext.getListenerManager().stop();
serviceClient.cleanup();
}
public void runEcho(Options clientOptions, boolean asyncReply, boolean asyncAcks, boolean explicitTermination, boolean checkInboundTermination, boolean stopListener) throws Exception {
String sequenceKey = SandeshaUtil.getUUID();
ServiceClient serviceClient = new ServiceClient (configContext,null);
serviceClient.setOptions(clientOptions);
clientOptions.setAction(echoAction);
clientOptions.setTo(new EndpointReference (to));
clientOptions.setProperty(SandeshaClientConstants.SEQUENCE_KEY,sequenceKey);
clientOptions.setTransportInProtocol(Constants.TRANSPORT_HTTP);
if(asyncReply || asyncAcks) {
clientOptions.setUseSeparateListener(true);
if(asyncAcks) {
String acksTo = serviceClient.getMyEPR(Constants.TRANSPORT_HTTP).getAddress();
clientOptions.setProperty(SandeshaClientConstants.AcksTo,acksTo);
} else {
String acksTo = AddressingConstants.Final.WSA_ANONYMOUS_URL;
clientOptions.setProperty(SandeshaClientConstants.AcksTo,acksTo);
}
}
if(asyncAcks) {
String acksTo = serviceClient.getMyEPR(Constants.TRANSPORT_HTTP).getAddress();
clientOptions.setProperty(SandeshaClientConstants.AcksTo,acksTo);
} else {
String acksTo = AddressingConstants.Final.WSA_ANONYMOUS_URL;
clientOptions.setProperty(SandeshaClientConstants.AcksTo,acksTo);
}
// Establish a baseline count for inbound sequences
List<SequenceReport> oldIncomingReports = SandeshaClient.getIncomingSequenceReports(configContext);
TestCallback callback1 = new TestCallback ("Callback 1");
serviceClient.sendReceiveNonBlocking (getEchoOMBlock("echo1",sequenceKey),callback1);
TestCallback callback2 = new TestCallback ("Callback 2");
serviceClient.sendReceiveNonBlocking (getEchoOMBlock("echo2",sequenceKey),callback2);
if (!explicitTermination
&&
!Sandesha2Constants.SPEC_VERSIONS.v1_1.equals(clientOptions.getProperty(SandeshaClientConstants.RM_SPEC_VERSION))) {
clientOptions.setProperty(SandeshaClientConstants.LAST_MESSAGE, "true");
}
TestCallback callback3 = new TestCallback ("Callback 3");
serviceClient.sendReceiveNonBlocking (getEchoOMBlock("echo3",sequenceKey),callback3);
if (explicitTermination) {
Thread.sleep(10000);
SandeshaClient.terminateSequence(serviceClient);
}
long limit = System.currentTimeMillis() + waitTime;
Error lastError = null;
while(System.currentTimeMillis() < limit) {
Thread.sleep(tickTime); // Try the assertions each tick interval, until they pass or we time out
try {
//assertions for the out sequence.
SequenceReport outgoingSequenceReport = SandeshaClient.getOutgoingSequenceReport(serviceClient);
System.out.println("Checking Outbound Sequence: " + outgoingSequenceReport.getSequenceID());
assertTrue("Outbound message #1", outgoingSequenceReport.getCompletedMessages().contains(new Long(1)));
assertTrue("Outbound message #2", outgoingSequenceReport.getCompletedMessages().contains(new Long(2)));
assertTrue("Outbound message #3", outgoingSequenceReport.getCompletedMessages().contains(new Long(3)));
assertEquals("Outbound sequence status: TERMINATED", SequenceReport.SEQUENCE_STATUS_TERMINATED, outgoingSequenceReport.getSequenceStatus());
assertEquals("Outbound sequence direction: OUT", SequenceReport.SEQUENCE_DIRECTION_OUT, outgoingSequenceReport.getSequenceDirection());
//assertions for the inbound sequence. The one we care about is a new sequence,
//so it will not exist in the oldSequences list.
List<SequenceReport> incomingSequences = SandeshaClient.getIncomingSequenceReports(configContext);
SequenceReport incomingSequenceReport = getNewReport(incomingSequences, oldIncomingReports);
System.out.println("Checking Inbound Sequence: " + incomingSequenceReport.getSequenceID());
String offer = (String) clientOptions.getProperty(SandeshaClientConstants.OFFERED_SEQUENCE_ID);
if(offer != null) assertEquals("Inbound seq id", offer, incomingSequenceReport.getSequenceID());
assertEquals ("Inbound message count", 3, incomingSequenceReport.getCompletedMessages().size());
assertTrue("Inbound message #1", incomingSequenceReport.getCompletedMessages().contains(new Long(1)));
assertTrue("Inbound message #2", incomingSequenceReport.getCompletedMessages().contains(new Long(2)));
assertTrue("Inbound message #3", incomingSequenceReport.getCompletedMessages().contains(new Long(3)));
if (checkInboundTermination)
assertEquals("Inbound sequence status: TERMINATED", SequenceReport.SEQUENCE_STATUS_TERMINATED, incomingSequenceReport.getSequenceStatus());
assertEquals("Inbound sequence direction: IN", SequenceReport.SEQUENCE_DIRECTION_IN, incomingSequenceReport.getSequenceDirection());
assertTrue("Callback #1", callback1.isComplete());
assertEquals("Callback #1 data", "echo1", callback1.getResult());
assertTrue("Callback #2", callback2.isComplete());
assertEquals("Callback #2 data", "echo1echo2", callback2.getResult());
assertTrue("Callback #3", callback3.isComplete());
assertEquals("Callback #3 data", "echo1echo2echo3", callback3.getResult());
lastError = null;
break;
} catch(Error e) {
System.out.println("Possible error:" + e);
lastError = e;
}
}
if(lastError != null) throw lastError;
if (stopListener)
configContext.getListenerManager().stop();
serviceClient.cleanup();
}
// Scan through lists of old and new incoming sequences, to find the sequence that
// was established by this test. Note that some of the old sequences may have timed out.
private SequenceReport getNewReport(List<SequenceReport> incomingSequences, List<SequenceReport> oldIncomingReports) {
HashSet<String> sequenceIds = new HashSet<String>();
for(Iterator<SequenceReport> oldSequences = oldIncomingReports.iterator(); oldSequences.hasNext(); ) {
SequenceReport report = (SequenceReport) oldSequences.next();
sequenceIds.add(report.getSequenceID());
}
for(Iterator<SequenceReport> currentSequences = incomingSequences.iterator(); currentSequences.hasNext(); ) {
SequenceReport report = (SequenceReport) currentSequences.next();
if(!sequenceIds.contains(report.getSequenceID())) {
return report;
}
}
throw new AssertionFailedError("Failed to find a new reply sequence");
}
}
| |
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2015 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.selenium;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.InvalidPathException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.PosixFileAttributes;
import java.nio.file.attribute.PosixFilePermission;
import java.util.Set;
import org.apache.commons.lang.Validate;
import org.apache.commons.lang3.SystemUtils;
import org.apache.log4j.Logger;
import org.parosproxy.paros.Constant;
/** Defines the browsers supported by the add-on. */
public enum Browser {
CHROME("chrome", false),
CHROME_HEADLESS("chrome-headless", true),
FIREFOX("firefox", false),
FIREFOX_HEADLESS("firefox-headless", true),
/**
* Headless browser, guaranteed to be always available.
*
* @see #getFailSafeBrowser()
*/
HTML_UNIT("htmlunit", true),
/**
* @deprecated Does not support required capabilities ({@link
* org.openqa.selenium.remote.CapabilityType#ACCEPT_INSECURE_CERTS ACCEPT_INSECURE_CERTS}).
*/
@Deprecated
INTERNET_EXPLORER("ie", false),
OPERA("opera", false),
PHANTOM_JS("phantomjs", true),
SAFARI("safari", false);
private static final String WEB_DRIVERS_DIR_NAME = "webdriver";
private static final Logger logger = Logger.getLogger(Browser.class);
private static Path zapHomeDir;
private final String id;
private boolean isHeadless = false;
private Browser(String id, boolean isHeadless) {
this.id = id;
this.isHeadless = isHeadless;
}
/**
* Gets the ID of this browser.
*
* <p>The ID can be used for persistence and later creation, using the method {@code
* getBrowserWithId(String)}.
*
* @return the ID of the browser
* @see #getBrowserWithId(String)
*/
public String getId() {
return id;
}
/**
* Gets the browser that has the given {@code id}.
*
* <p>If no match is found returns the browser guaranteed to be always available, as returned by
* {@code getFailSafeBrowser()}.
*
* @param id the ID of the browser
* @return the browser that matches the given {@code id}, or if not found the browser returned
* by {@code getFailSafeBrowser()}
* @throws IllegalArgumentException if the given {@code id} is {@code null} or empty.
* @see #getId()
* @see #getFailSafeBrowser()
*/
public static Browser getBrowserWithId(String id) {
Validate.notEmpty(id, "Parameter id must not be null or empty.");
Browser browser = getBrowserWithIdNoFailSafe(id);
if (browser != null) {
return browser;
}
return getFailSafeBrowser();
}
public static Browser getBrowserWithIdNoFailSafe(String id) {
Validate.notEmpty(id, "Parameter id must not be null or empty.");
if (CHROME.id.equals(id)) {
return CHROME;
} else if (CHROME_HEADLESS.id.equals(id)) {
return CHROME_HEADLESS;
} else if (FIREFOX.id.equals(id)) {
return FIREFOX;
} else if (FIREFOX_HEADLESS.id.equals(id)) {
return FIREFOX_HEADLESS;
} else if (HTML_UNIT.id.equals(id)) {
return HTML_UNIT;
} else if (INTERNET_EXPLORER.id.equals(id)) {
return INTERNET_EXPLORER;
} else if (OPERA.id.equals(id)) {
return OPERA;
} else if (PHANTOM_JS.id.equals(id)) {
return PHANTOM_JS;
} else if (SAFARI.id.equals(id)) {
return SAFARI;
}
return null;
}
/**
* Gets the browser that is guaranteed to be always available.
*
* @return the {@code Browser} that is guaranteed to be always available.
* @see #HTML_UNIT
*/
public static Browser getFailSafeBrowser() {
return HTML_UNIT;
}
/**
* Tells whether or not the given path is a bundled WebDriver.
*
* <p>No actual check is done to test whether or not the WebDriver really exists, just that it's
* under the directory of the bundled WebDrivers.
*
* @param path the path to check
* @return {@code true} if the path is a bundled WebDriver, {@code false} otherwise.
*/
public static boolean isBundledWebDriverPath(String path) {
if (path == null || path.isEmpty()) {
return false;
}
try {
return Paths.get(path).startsWith(getWebDriversDir());
} catch (InvalidPathException e) {
logger.warn("Failed to create path for " + path, e);
return false;
}
}
private static Path getWebDriversDir() {
return getZapHomeDir().resolve(WEB_DRIVERS_DIR_NAME);
}
/**
* Tells whether or not a bundled WebDriver exists for the given browser.
*
* @param browser the browser that will be checked
* @return {@code true} if the bundled WebDriver exists, {@code false} otherwise.
* @see #getBundledWebDriverPath(Browser)
*/
public static boolean hasBundledWebDriver(Browser browser) {
return getBundledWebDriverPath(browser) != null;
}
/**
* Gets the path to the bundled WebDriver of the given browser.
*
* @param browser the target browser
* @return the path to the bundled WebDriver, or {@code null} if none available.
* @see #hasBundledWebDriver(Browser)
*/
public static String getBundledWebDriverPath(Browser browser) {
String osDirName = getOsDirName();
if (osDirName == null) {
return null;
}
String driverName = getWebDriverName(browser);
if (driverName == null) {
return null;
}
if ("windows".equals(osDirName)) {
driverName += ".exe";
}
Path basePath = getWebDriversDir().resolve(osDirName);
Path driver;
if (isOs64Bits()) {
driver = basePath.resolve("64").resolve(driverName);
if (Files.exists(driver)) {
try {
setExecutable(driver);
return driver.toAbsolutePath().toString();
} catch (IOException e) {
logger.warn("Failed to set the bundled WebDriver executable:", e);
}
}
}
driver = basePath.resolve("32").resolve(driverName);
if (Files.exists(driver)) {
try {
setExecutable(driver);
return driver.toAbsolutePath().toString();
} catch (IOException e) {
logger.warn("Failed to set the bundled WebDriver executable:", e);
}
}
return null;
}
private static String getWebDriverName(Browser browser) {
switch (browser) {
case CHROME:
case CHROME_HEADLESS:
return "chromedriver";
case FIREFOX:
case FIREFOX_HEADLESS:
return "geckodriver";
default:
return null;
}
}
private static String getOsDirName() {
if (SystemUtils.IS_OS_WINDOWS) {
return "windows";
}
if (SystemUtils.IS_OS_MAC) {
return "macos";
}
if (SystemUtils.IS_OS_UNIX) {
return "linux";
}
return null;
}
private static boolean isOs64Bits() {
String arch = System.getProperty("os.arch");
return arch.contains("amd64") || arch.contains("x86_64");
}
private static void setExecutable(Path file) throws IOException {
if (!SystemUtils.IS_OS_MAC && !SystemUtils.IS_OS_UNIX) {
return;
}
Set<PosixFilePermission> perms =
Files.readAttributes(file, PosixFileAttributes.class).permissions();
if (perms.contains(PosixFilePermission.OWNER_EXECUTE)) {
return;
}
perms.add(PosixFilePermission.OWNER_EXECUTE);
Files.setPosixFilePermissions(file, perms);
}
static boolean ensureExecutable(Path driver) {
try {
setExecutable(driver);
return true;
} catch (IOException e) {
logger.warn("Failed to set the bundled WebDriver executable:", e);
}
return false;
}
static void setZapHomeDir(Path path) {
zapHomeDir = path;
}
private static Path getZapHomeDir() {
if (zapHomeDir == null) {
zapHomeDir = Paths.get(Constant.getZapHome());
}
return zapHomeDir;
}
public boolean isHeadless() {
return isHeadless;
}
public void setHeadless(boolean isHeadless) {
this.isHeadless = isHeadless;
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.authz.store;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.AliasMetadata;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.mock.orig.Mockito;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.security.authz.RoleDescriptor;
import org.junit.Before;
import org.mockito.stubbing.Answer;
import java.util.Arrays;
import java.util.concurrent.ExecutorService;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
public final class DeprecationRoleDescriptorConsumerTests extends ESTestCase {
private ThreadPool threadPool;
@Before
public void init() throws Exception {
this.threadPool = mock(ThreadPool.class);
ExecutorService executorService = mock(ExecutorService.class);
Mockito.doAnswer((Answer) invocation -> {
final Runnable arg0 = (Runnable) invocation.getArguments()[0];
arg0.run();
return null;
}).when(executorService).execute(Mockito.isA(Runnable.class));
when(threadPool.generic()).thenReturn(executorService);
when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY));
}
public void testSimpleAliasAndIndexPair() throws Exception {
final DeprecationLogger deprecationLogger = mock(DeprecationLogger.class);
final Metadata.Builder metadataBuilder = Metadata.builder();
addIndex(metadataBuilder, "index", "alias");
final RoleDescriptor roleOverAlias = new RoleDescriptor("roleOverAlias", new String[] { "read" },
new RoleDescriptor.IndicesPrivileges[] { indexPrivileges(randomFrom("read", "write", "delete", "index"), "alias") }, null);
final RoleDescriptor roleOverIndex = new RoleDescriptor("roleOverIndex", new String[] { "manage" },
new RoleDescriptor.IndicesPrivileges[] { indexPrivileges(randomFrom("read", "write", "delete", "index"), "index") }, null);
DeprecationRoleDescriptorConsumer deprecationConsumer = new DeprecationRoleDescriptorConsumer(
mockClusterService(metadataBuilder.build()), threadPool, deprecationLogger);
deprecationConsumer.accept(Arrays.asList(roleOverAlias, roleOverIndex));
verifyLogger(deprecationLogger, "roleOverAlias", "alias", "index");
verifyNoMoreInteractions(deprecationLogger);
}
public void testRoleGrantsOnIndexAndAliasPair() throws Exception {
final DeprecationLogger deprecationLogger = mock(DeprecationLogger.class);
final Metadata.Builder metadataBuilder = Metadata.builder();
addIndex(metadataBuilder, "index", "alias");
addIndex(metadataBuilder, "index1", "alias2");
final RoleDescriptor roleOverIndexAndAlias = new RoleDescriptor("roleOverIndexAndAlias", new String[] { "manage_watcher" },
new RoleDescriptor.IndicesPrivileges[] {
indexPrivileges(randomFrom("read", "write", "delete", "index"), "index", "alias") },
null);
DeprecationRoleDescriptorConsumer deprecationConsumer = new DeprecationRoleDescriptorConsumer(
mockClusterService(metadataBuilder.build()), threadPool, deprecationLogger);
deprecationConsumer.accept(Arrays.asList(roleOverIndexAndAlias));
verifyNoMoreInteractions(deprecationLogger);
}
public void testMultiplePrivilegesLoggedOnce() throws Exception {
final DeprecationLogger deprecationLogger = mock(DeprecationLogger.class);
final Metadata.Builder metadataBuilder = Metadata.builder();
addIndex(metadataBuilder, "index", "alias");
addIndex(metadataBuilder, "index2", "alias2");
final RoleDescriptor roleOverAlias = new RoleDescriptor("roleOverAlias", new String[] { "manage_watcher" },
new RoleDescriptor.IndicesPrivileges[] {
indexPrivileges("write", "alias"),
indexPrivileges("manage_ilm", "alias") },
null);
DeprecationRoleDescriptorConsumer deprecationConsumer = new DeprecationRoleDescriptorConsumer(
mockClusterService(metadataBuilder.build()), threadPool, deprecationLogger);
deprecationConsumer.accept(Arrays.asList(roleOverAlias));
verifyLogger(deprecationLogger, "roleOverAlias", "alias", "index");
verifyNoMoreInteractions(deprecationLogger);
}
public void testMultiplePrivilegesLoggedForEachAlias() throws Exception {
final DeprecationLogger deprecationLogger = mock(DeprecationLogger.class);
final Metadata.Builder metadataBuilder = Metadata.builder();
addIndex(metadataBuilder, "index", "alias", "alias3");
addIndex(metadataBuilder, "index2", "alias2", "alias", "alias4");
addIndex(metadataBuilder, "index3", "alias3", "alias");
addIndex(metadataBuilder, "index4", "alias4", "alias");
addIndex(metadataBuilder, "foo", "bar");
final RoleDescriptor roleMultiplePrivileges = new RoleDescriptor("roleMultiplePrivileges", new String[] { "manage_watcher" },
new RoleDescriptor.IndicesPrivileges[] {
indexPrivileges("write", "index2", "alias"),
indexPrivileges("read", "alias4"),
indexPrivileges("delete_index", "alias3", "index"),
indexPrivileges("create_index", "alias3", "index3")},
null);
DeprecationRoleDescriptorConsumer deprecationConsumer = new DeprecationRoleDescriptorConsumer(
mockClusterService(metadataBuilder.build()), threadPool, deprecationLogger);
deprecationConsumer.accept(Arrays.asList(roleMultiplePrivileges));
verifyLogger(deprecationLogger, "roleMultiplePrivileges", "alias", "index, index3, index4");
verifyLogger(deprecationLogger, "roleMultiplePrivileges", "alias4", "index2, index4");
verifyNoMoreInteractions(deprecationLogger);
}
public void testPermissionsOverlapping() throws Exception {
final DeprecationLogger deprecationLogger = mock(DeprecationLogger.class);
final Metadata.Builder metadataBuilder = Metadata.builder();
addIndex(metadataBuilder, "index1", "alias1", "bar");
addIndex(metadataBuilder, "index2", "alias2", "baz");
addIndex(metadataBuilder, "foo", "bar");
final RoleDescriptor roleOverAliasAndIndex = new RoleDescriptor("roleOverAliasAndIndex", new String[] { "read_ilm" },
new RoleDescriptor.IndicesPrivileges[] {
indexPrivileges("monitor", "index2", "alias1"),
indexPrivileges("monitor", "index1", "alias2")},
null);
DeprecationRoleDescriptorConsumer deprecationConsumer = new DeprecationRoleDescriptorConsumer(
mockClusterService(metadataBuilder.build()), threadPool, deprecationLogger);
deprecationConsumer.accept(Arrays.asList(roleOverAliasAndIndex));
verifyNoMoreInteractions(deprecationLogger);
}
public void testOverlappingAcrossMultipleRoleDescriptors() throws Exception {
final DeprecationLogger deprecationLogger = mock(DeprecationLogger.class);
final Metadata.Builder metadataBuilder = Metadata.builder();
addIndex(metadataBuilder, "index1", "alias1", "bar");
addIndex(metadataBuilder, "index2", "alias2", "baz");
addIndex(metadataBuilder, "foo", "bar");
final RoleDescriptor role1 = new RoleDescriptor("role1", new String[] { "monitor_watcher" },
new RoleDescriptor.IndicesPrivileges[] {
indexPrivileges("monitor", "index2", "alias1")},
null);
final RoleDescriptor role2 = new RoleDescriptor("role2", new String[] { "read_ccr" },
new RoleDescriptor.IndicesPrivileges[] {
indexPrivileges("monitor", "index1", "alias2")},
null);
final RoleDescriptor role3 = new RoleDescriptor("role3", new String[] { "monitor_ml" },
new RoleDescriptor.IndicesPrivileges[] {
indexPrivileges("index", "bar")},
null);
DeprecationRoleDescriptorConsumer deprecationConsumer = new DeprecationRoleDescriptorConsumer(
mockClusterService(metadataBuilder.build()), threadPool, deprecationLogger);
deprecationConsumer.accept(Arrays.asList(role1, role2, role3));
verifyLogger(deprecationLogger, "role1", "alias1", "index1");
verifyLogger(deprecationLogger, "role2", "alias2", "index2");
verifyLogger(deprecationLogger, "role3", "bar", "foo, index1");
verifyNoMoreInteractions(deprecationLogger);
}
public void testDailyRoleCaching() throws Exception {
final DeprecationLogger deprecationLogger = mock(DeprecationLogger.class);
final Metadata.Builder metadataBuilder = Metadata.builder();
addIndex(metadataBuilder, "index1", "alias1", "far");
addIndex(metadataBuilder, "index2", "alias2", "baz");
addIndex(metadataBuilder, "foo", "bar");
final Metadata metadata = metadataBuilder.build();
RoleDescriptor someRole = new RoleDescriptor("someRole", new String[] { "monitor_rollup" },
new RoleDescriptor.IndicesPrivileges[] {
indexPrivileges("monitor", "i*", "bar")},
null);
final DeprecationRoleDescriptorConsumer deprecationConsumer = new DeprecationRoleDescriptorConsumer(mockClusterService(metadata),
threadPool, deprecationLogger);
final String cacheKeyBefore = DeprecationRoleDescriptorConsumer.buildCacheKey(someRole);
deprecationConsumer.accept(Arrays.asList(someRole));
verifyLogger(deprecationLogger, "someRole", "bar", "foo");
verifyNoMoreInteractions(deprecationLogger);
deprecationConsumer.accept(Arrays.asList(someRole));
final String cacheKeyAfter = DeprecationRoleDescriptorConsumer.buildCacheKey(someRole);
// we don't do this test if it crosses days
if (false == cacheKeyBefore.equals(cacheKeyAfter)) {
return;
}
verifyNoMoreInteractions(deprecationLogger);
RoleDescriptor differentRoleSameName = new RoleDescriptor("someRole", new String[] { "manage_pipeline" },
new RoleDescriptor.IndicesPrivileges[] {
indexPrivileges("write", "i*", "baz")},
null);
deprecationConsumer.accept(Arrays.asList(differentRoleSameName));
final String cacheKeyAfterParty = DeprecationRoleDescriptorConsumer.buildCacheKey(differentRoleSameName);
// we don't do this test if it crosses days
if (false == cacheKeyBefore.equals(cacheKeyAfterParty)) {
return;
}
verifyNoMoreInteractions(deprecationLogger);
}
public void testWildcards() throws Exception {
final DeprecationLogger deprecationLogger = mock(DeprecationLogger.class);
final Metadata.Builder metadataBuilder = Metadata.builder();
addIndex(metadataBuilder, "index", "alias", "alias3");
addIndex(metadataBuilder, "index2", "alias", "alias2", "alias4");
addIndex(metadataBuilder, "index3", "alias", "alias3");
addIndex(metadataBuilder, "index4", "alias", "alias4");
addIndex(metadataBuilder, "foo", "bar", "baz");
Metadata metadata = metadataBuilder.build();
final RoleDescriptor roleGlobalWildcard = new RoleDescriptor("roleGlobalWildcard", new String[] { "manage_token" },
new RoleDescriptor.IndicesPrivileges[] {
indexPrivileges(randomFrom("write", "delete_index", "read_cross_cluster"), "*")},
null);
new DeprecationRoleDescriptorConsumer(mockClusterService(metadata), threadPool, deprecationLogger)
.accept(Arrays.asList(roleGlobalWildcard));
verifyNoMoreInteractions(deprecationLogger);
final RoleDescriptor roleGlobalWildcard2 = new RoleDescriptor("roleGlobalWildcard2", new String[] { "manage_index_templates" },
new RoleDescriptor.IndicesPrivileges[] {
indexPrivileges(randomFrom("write", "delete_index", "read_cross_cluster"), "i*", "a*")},
null);
new DeprecationRoleDescriptorConsumer(mockClusterService(metadata), threadPool, deprecationLogger)
.accept(Arrays.asList(roleGlobalWildcard2));
verifyNoMoreInteractions(deprecationLogger);
final RoleDescriptor roleWildcardOnIndices = new RoleDescriptor("roleWildcardOnIndices", new String[] { "manage_watcher" },
new RoleDescriptor.IndicesPrivileges[] {
indexPrivileges("write", "index*", "alias", "alias3"),
indexPrivileges("read", "foo")},
null);
new DeprecationRoleDescriptorConsumer(mockClusterService(metadata), threadPool, deprecationLogger)
.accept(Arrays.asList(roleWildcardOnIndices));
verifyNoMoreInteractions(deprecationLogger);
final RoleDescriptor roleWildcardOnAliases = new RoleDescriptor("roleWildcardOnAliases", new String[] { "manage_watcher" },
new RoleDescriptor.IndicesPrivileges[] {
indexPrivileges("write", "alias*", "index", "index3"),
indexPrivileges("read", "foo", "index2")},
null);
new DeprecationRoleDescriptorConsumer(mockClusterService(metadata), threadPool, deprecationLogger)
.accept(Arrays.asList(roleWildcardOnAliases));
verifyLogger(deprecationLogger, "roleWildcardOnAliases", "alias", "index2, index4");
verifyLogger(deprecationLogger, "roleWildcardOnAliases", "alias2", "index2");
verifyLogger(deprecationLogger, "roleWildcardOnAliases", "alias4", "index2, index4");
verifyNoMoreInteractions(deprecationLogger);
}
public void testMultipleIndicesSameAlias() throws Exception {
final DeprecationLogger deprecationLogger = mock(DeprecationLogger.class);
final Metadata.Builder metadataBuilder = Metadata.builder();
addIndex(metadataBuilder, "index1", "alias1");
addIndex(metadataBuilder, "index2", "alias1", "alias2");
addIndex(metadataBuilder, "index3", "alias2");
final RoleDescriptor roleOverAliasAndIndex = new RoleDescriptor("roleOverAliasAndIndex", new String[] { "manage_ml" },
new RoleDescriptor.IndicesPrivileges[] {
indexPrivileges("delete_index", "alias1", "index1") },
null);
DeprecationRoleDescriptorConsumer deprecationConsumer = new DeprecationRoleDescriptorConsumer(
mockClusterService(metadataBuilder.build()), threadPool, deprecationLogger);
deprecationConsumer.accept(Arrays.asList(roleOverAliasAndIndex));
verifyLogger(deprecationLogger, "roleOverAliasAndIndex", "alias1", "index2");
verifyNoMoreInteractions(deprecationLogger);
final RoleDescriptor roleOverAliases = new RoleDescriptor("roleOverAliases", new String[] { "manage_security" },
new RoleDescriptor.IndicesPrivileges[] {
indexPrivileges("monitor", "alias1", "alias2") },
null);
deprecationConsumer.accept(Arrays.asList(roleOverAliases));
verifyLogger(deprecationLogger, "roleOverAliases", "alias1", "index1, index2");
verifyLogger(deprecationLogger, "roleOverAliases", "alias2", "index2, index3");
verifyNoMoreInteractions(deprecationLogger);
}
private void addIndex(Metadata.Builder metadataBuilder, String index, String... aliases) {
final IndexMetadata.Builder indexMetadataBuilder = IndexMetadata.builder(index)
.settings(Settings.builder().put("index.version.created", VersionUtils.randomVersion(random())))
.numberOfShards(1)
.numberOfReplicas(1);
for (final String alias : aliases) {
indexMetadataBuilder.putAlias(AliasMetadata.builder(alias).build());
}
metadataBuilder.put(indexMetadataBuilder.build(), false);
}
private ClusterService mockClusterService(Metadata metadata) {
final ClusterService clusterService = mock(ClusterService.class);
final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build();
when(clusterService.state()).thenReturn(clusterState);
return clusterService;
}
private RoleDescriptor.IndicesPrivileges indexPrivileges(String priv, String... indicesOrAliases) {
return RoleDescriptor.IndicesPrivileges.builder()
.indices(indicesOrAliases)
.privileges(priv)
.grantedFields(randomArray(0, 2, String[]::new, () -> randomBoolean() ? null : randomAlphaOfLengthBetween(1, 4)))
.query(randomBoolean() ? null : "{ }")
.build();
}
private void verifyLogger(DeprecationLogger deprecationLogger, String roleName, String aliasName, String indexNames) {
verify(deprecationLogger).deprecated("Role [" + roleName + "] contains index privileges covering the [" + aliasName
+ "] alias but which do not cover some of the indices that it points to [" + indexNames + "]. Granting privileges over an"
+ " alias and hence granting privileges over all the indices that the alias points to is deprecated and will be removed"
+ " in a future version of Elasticsearch. Instead define permissions exclusively on index names or index name patterns.");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.persistence;
import org.apache.ignite.DataStorageMetrics;
import org.apache.ignite.internal.util.typedef.internal.S;
/**
*
*/
public class DataStorageMetricsSnapshot implements DataStorageMetrics {
/** */
private float walLoggingRate;
/** */
private float walWritingRate;
/** */
private int walArchiveSegments;
/** */
private float walFsyncTimeAvg;
/** */
private long walBuffPollSpinsNum;
/** */
private long lastCpDuration;
/** */
private long lastCpLockWaitDuration;
/** */
private long lastCpMmarkDuration;
/** */
private long lastCpPagesWriteDuration;
/** */
private long lastCpFsyncDuration;
/** */
private long lastCpTotalPages;
/** */
private long lastCpDataPages;
/** */
private long lastCpCowPages;
/** */
private long walTotalSize;
/** */
private long walLastRollOverTime;
/** */
private long checkpointTotalTime;
/** */
private long usedCheckpointBufferSize;
/** */
private long usedCheckpointBufferPages;
/** */
private long checkpointBufferSize;
/** */
private long dirtyPages;
/** */
private long readPages;
/** */
private long writtenPages;
/** */
private long replacedPages;
/** */
private long offHeapSize;
/** */
private long offHeadUsedSize;
/** */
private long totalAllocatedSize;
/**
* @param metrics Metrics.
*/
public DataStorageMetricsSnapshot(DataStorageMetrics metrics) {
walLoggingRate = metrics.getWalLoggingRate();
walWritingRate = metrics.getWalWritingRate();
walArchiveSegments = metrics.getWalArchiveSegments();
walFsyncTimeAvg = metrics.getWalFsyncTimeAverage();
walBuffPollSpinsNum = metrics.getWalBuffPollSpinsRate();
lastCpDuration = metrics.getLastCheckpointDuration();
lastCpLockWaitDuration = metrics.getLastCheckpointLockWaitDuration();
lastCpMmarkDuration = metrics.getLastCheckpointMarkDuration();
lastCpPagesWriteDuration = metrics.getLastCheckpointPagesWriteDuration();
lastCpFsyncDuration = metrics.getLastCheckpointFsyncDuration();
lastCpTotalPages = metrics.getLastCheckpointTotalPagesNumber();
lastCpDataPages = metrics.getLastCheckpointDataPagesNumber();
lastCpCowPages = metrics.getLastCheckpointCopiedOnWritePagesNumber();
walTotalSize = metrics.getWalTotalSize();
walLastRollOverTime = metrics.getWalLastRollOverTime();
checkpointTotalTime = metrics.getCheckpointTotalTime();
usedCheckpointBufferSize = metrics.getUsedCheckpointBufferSize();
usedCheckpointBufferPages = metrics.getUsedCheckpointBufferPages();
checkpointBufferSize = metrics.getCheckpointBufferSize();
dirtyPages = metrics.getDirtyPages();
readPages = metrics.getPagesRead();
writtenPages = metrics.getPagesWritten();
replacedPages = metrics.getPagesReplaced();
offHeapSize = metrics.getOffHeapSize();
offHeadUsedSize = metrics.getOffheapUsedSize();
totalAllocatedSize = metrics.getTotalAllocatedSize();
}
/** {@inheritDoc} */
@Override public float getWalLoggingRate() {
return walLoggingRate;
}
/** {@inheritDoc} */
@Override public float getWalWritingRate() {
return walWritingRate;
}
/** {@inheritDoc} */
@Override public int getWalArchiveSegments() {
return walArchiveSegments;
}
/** {@inheritDoc} */
@Override public float getWalFsyncTimeAverage() {
return walFsyncTimeAvg;
}
/** {@inheritDoc} */
@Override public long getWalBuffPollSpinsRate() {
return walBuffPollSpinsNum;
}
/** {@inheritDoc} */
@Override public long getLastCheckpointDuration() {
return lastCpDuration;
}
/** {@inheritDoc} */
@Override public long getLastCheckpointLockWaitDuration() {
return lastCpLockWaitDuration;
}
/** {@inheritDoc} */
@Override public long getLastCheckpointMarkDuration() {
return lastCpMmarkDuration;
}
/** {@inheritDoc} */
@Override public long getLastCheckpointPagesWriteDuration() {
return lastCpPagesWriteDuration;
}
/** {@inheritDoc} */
@Override public long getLastCheckpointFsyncDuration() {
return lastCpFsyncDuration;
}
/** {@inheritDoc} */
@Override public long getLastCheckpointTotalPagesNumber() {
return lastCpTotalPages;
}
/** {@inheritDoc} */
@Override public long getLastCheckpointDataPagesNumber() {
return lastCpDataPages;
}
/** {@inheritDoc} */
@Override public long getLastCheckpointCopiedOnWritePagesNumber() {
return lastCpCowPages;
}
/** {@inheritDoc} */
@Override public long getWalTotalSize() {
return walTotalSize;
}
/** {@inheritDoc} */
@Override public long getWalLastRollOverTime() {
return walLastRollOverTime;
}
/** {@inheritDoc} */
@Override public long getCheckpointTotalTime() {
return checkpointTotalTime;
}
/** {@inheritDoc} */
@Override public long getDirtyPages() {
return dirtyPages;
}
/** {@inheritDoc} */
@Override public long getPagesRead() {
return readPages;
}
/** {@inheritDoc} */
@Override public long getPagesWritten() {
return writtenPages;
}
/** {@inheritDoc} */
@Override public long getPagesReplaced() {
return replacedPages;
}
/** {@inheritDoc} */
@Override public long getOffHeapSize() {
return offHeapSize;
}
/** {@inheritDoc} */
@Override public long getOffheapUsedSize() {
return offHeadUsedSize;
}
/** {@inheritDoc} */
@Override public long getTotalAllocatedSize() {
return totalAllocatedSize;
}
/** {@inheritDoc} */
@Override public long getUsedCheckpointBufferPages() {
return usedCheckpointBufferPages;
}
/** {@inheritDoc} */
@Override public long getUsedCheckpointBufferSize() {
return usedCheckpointBufferSize;
}
/** {@inheritDoc} */
@Override public long getCheckpointBufferSize(){
return checkpointBufferSize;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(DataStorageMetricsSnapshot.class, this);
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.compiler.kie.builder.impl;
import org.drools.compiler.builder.impl.KnowledgeBuilderConfigurationImpl;
import org.drools.compiler.commons.jci.compilers.CompilationResult;
import org.drools.compiler.commons.jci.compilers.EclipseJavaCompiler;
import org.drools.compiler.commons.jci.compilers.JavaCompiler;
import org.drools.compiler.commons.jci.compilers.JavaCompilerFactory;
import org.drools.compiler.commons.jci.problems.CompilationProblem;
import org.drools.compiler.commons.jci.readers.DiskResourceReader;
import org.drools.compiler.commons.jci.readers.ResourceReader;
import org.drools.compiler.compiler.io.memory.MemoryFileSystem;
import org.drools.compiler.kproject.ReleaseIdImpl;
import org.drools.compiler.kproject.models.KieModuleModelImpl;
import org.drools.compiler.kproject.xml.DependencyFilter;
import org.drools.compiler.kproject.xml.PomModel;
import org.drools.compiler.rule.builder.dialect.java.JavaDialectConfiguration;
import org.drools.core.builder.conf.impl.ResourceConfigurationImpl;
import org.drools.core.util.IoUtils;
import org.drools.core.util.StringUtils;
import org.kie.api.KieServices;
import org.kie.api.builder.KieBuilder;
import org.kie.api.builder.KieFileSystem;
import org.kie.api.builder.KieModule;
import org.kie.api.builder.KieRepository;
import org.kie.api.builder.Message.Level;
import org.kie.api.builder.ReleaseId;
import org.kie.api.builder.Results;
import org.kie.api.builder.model.KieBaseModel;
import org.kie.api.builder.model.KieModuleModel;
import org.kie.api.builder.model.KieSessionModel;
import org.kie.api.io.Resource;
import org.kie.api.io.ResourceConfiguration;
import org.kie.api.io.ResourceType;
import org.kie.internal.builder.IncrementalResults;
import org.kie.internal.builder.InternalKieBuilder;
import org.kie.internal.builder.KieBuilderSet;
import org.kie.internal.io.ResourceTypeImpl;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Enumeration;
import java.util.List;
import java.util.Properties;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
public class KieBuilderImpl
implements
InternalKieBuilder {
static final String RESOURCES_ROOT = "src/main/resources/";
static final String JAVA_ROOT = "src/main/java/";
static final String JAVA_TEST_ROOT = "src/test/java/";
private static final String RESOURCES_ROOT_DOT_SEPARATOR = RESOURCES_ROOT.replace( '/', '.' );
private ResultsImpl results;
private final ResourceReader srcMfs;
private MemoryFileSystem trgMfs;
private MemoryKieModule kModule;
private byte[] pomXml;
private ReleaseId releaseId;
private byte[] kModuleModelXml;
private KieModuleModel kModuleModel;
private Collection<KieModule> kieDependencies;
private KieBuilderSetImpl kieBuilderSet;
private ClassLoader classLoader;
private PomModel pomModel;
public KieBuilderImpl( File file ) {
this.srcMfs = new DiskResourceReader( file );
}
public KieBuilderImpl( KieFileSystem kieFileSystem ) {
this( kieFileSystem, null );
}
public KieBuilderImpl( KieFileSystem kieFileSystem,
ClassLoader classLoader ) {
this.classLoader = classLoader;
srcMfs = ( (KieFileSystemImpl) kieFileSystem ).asMemoryFileSystem();
}
public KieBuilder setDependencies( KieModule... dependencies ) {
this.kieDependencies = Arrays.asList( dependencies );
return this;
}
public KieBuilder setDependencies( Resource... resources ) {
KieRepositoryImpl kr = (KieRepositoryImpl) KieServices.Factory.get().getRepository();
List<KieModule> list = new ArrayList<KieModule>();
for ( Resource res : resources ) {
InternalKieModule depKieMod = (InternalKieModule) kr.getKieModule( res );
list.add( depKieMod );
}
this.kieDependencies = list;
return this;
}
private PomModel init() {
KieServices ks = KieServices.Factory.get();
results = new ResultsImpl();
// if pomXML is null it will generate a default, using default ReleaseId
// if pomXml is invalid, it assign pomModel to null
PomModel pomModel = getPomModel();
// if kModuleModelXML is null it will generate a default kModule, with a default kbase name
// if kModuleModelXML is invalid, it will kModule to null
buildKieModuleModel();
if ( pomModel != null ) {
// creates ReleaseId from build pom
// If the pom was generated, it will be the same as teh default ReleaseId
releaseId = pomModel.getReleaseId();
// add all the pom dependencies to this builder ... not sure this is a good idea (?)
KieRepositoryImpl repository = (KieRepositoryImpl) ks.getRepository();
for ( ReleaseId dep : pomModel.getDependencies( DependencyFilter.COMPILE_FILTER ) ) {
KieModule depModule = repository.getKieModule( dep, pomModel );
if ( depModule != null ) {
addKieDependency( depModule );
}
}
} else {
// if the pomModel is null it means that the provided pom.xml is invalid so use the default releaseId
releaseId = KieServices.Factory.get().getRepository().getDefaultReleaseId();
}
return pomModel;
}
private void addKieDependency( KieModule depModule ) {
if ( kieDependencies == null ) {
kieDependencies = new ArrayList<KieModule>();
}
kieDependencies.add( depModule );
}
public KieBuilder buildAll() {
PomModel pomModel = init();
// kModuleModel will be null if a provided pom.xml or kmodule.xml is invalid
if ( !isBuilt() && kModuleModel != null ) {
trgMfs = new MemoryFileSystem();
writePomAndKModule();
addKBasesFilesToTrg();
markSource();
kModule = new MemoryKieModule( releaseId,
kModuleModel,
trgMfs );
if ( kieDependencies != null && !kieDependencies.isEmpty() ) {
for ( KieModule kieModule : kieDependencies ) {
kModule.addKieDependency( (InternalKieModule) kieModule );
}
}
if ( pomModel != null ) {
kModule.setPomModel( pomModel );
}
KieModuleKieProject kProject = new KieModuleKieProject( kModule, classLoader );
for ( ReleaseId unresolvedDep : kModule.getUnresolvedDependencies() ) {
results.addMessage( Level.ERROR, "pom.xml", "Unresolved dependency " + unresolvedDep );
}
compileJavaClasses( kProject.getClassLoader() );
buildKieProject( kModule, results, kProject, trgMfs );
}
return this;
}
void markSource() {
srcMfs.mark();
}
Collection<String> getModifiedResourcesSinceLastMark() {
return srcMfs.getModifiedResourcesSinceLastMark();
}
void updateKieModuleMetaInfo() {
new KieMetaInfoBuilder( trgMfs, kModule ).writeKieModuleMetaInfo();
}
public static String getCompilationCachePath( ReleaseId releaseId,
String kbaseName ) {
return ( (ReleaseIdImpl) releaseId ).getCompilationCachePathPrefix() + kbaseName.replace( '.', '/' ) + "/kbase.cache";
}
public static void buildKieModule( InternalKieModule kModule,
ResultsImpl messages ) {
buildKieProject( kModule, messages, new KieModuleKieProject( kModule ), null );
}
private static void buildKieProject( InternalKieModule kModule,
ResultsImpl messages,
KieModuleKieProject kProject,
MemoryFileSystem trgMfs ) {
kProject.init();
kProject.verify( messages );
if ( messages.filterMessages( Level.ERROR ).isEmpty() ) {
if ( trgMfs != null ) {
new KieMetaInfoBuilder( trgMfs, kModule ).writeKieModuleMetaInfo();
}
KieRepository kieRepository = KieServices.Factory.get().getRepository();
kieRepository.addKieModule( kModule );
for ( InternalKieModule kDep : kModule.getKieDependencies().values() ) {
kieRepository.addKieModule( kDep );
}
}
}
private void addKBasesFilesToTrg() {
for ( KieBaseModel kieBaseModel : kModuleModel.getKieBaseModels().values() ) {
addKBaseFilesToTrg( kieBaseModel );
}
}
private void addKBaseFilesToTrg( KieBaseModel kieBase ) {
for ( String fileName : srcMfs.getFileNames() ) {
fileName = fileName.replace( File.separatorChar, '/' );
if ( fileName.startsWith( RESOURCES_ROOT ) && isFileInKieBase( kieBase, fileName ) ) {
copySourceToTarget( fileName );
}
}
}
String copySourceToTarget( String fileName ) {
if ( !fileName.startsWith( RESOURCES_ROOT ) ) {
return null;
}
byte[] bytes = srcMfs.getBytes( fileName );
String trgFileName = fileName.substring( RESOURCES_ROOT.length() );
if ( bytes != null ) {
trgMfs.write( trgFileName, bytes, true );
} else {
trgMfs.remove( trgFileName );
}
return trgFileName;
}
private ResourceType getResourceType( String fileName ) {
if ( srcMfs.isAvailable( fileName + ".properties" ) ) {
// configuration file available
Properties prop = new Properties();
try {
prop.load( new ByteArrayInputStream( srcMfs.getBytes( fileName + ".properties" ) ) );
return getResourceType( ResourceTypeImpl.fromProperties( prop ) );
} catch ( IOException e ) {
}
}
return null;
}
void cloneKieModuleForIncrementalCompilation() {
if ( !Arrays.equals( pomXml, getOrGeneratePomXml( srcMfs ) ) ) {
pomModel = null;
}
trgMfs = trgMfs.clone();
init();
kModule = kModule.cloneForIncrementalCompilation( releaseId, kModuleModel, trgMfs );
}
private void addMetaInfBuilder() {
for ( String fileName : srcMfs.getFileNames()) {
if ( fileName.startsWith( RESOURCES_ROOT ) && !isKieExtension( fileName ) ) {
byte[] bytes = srcMfs.getBytes( fileName );
trgMfs.write( fileName.substring( RESOURCES_ROOT.length() - 1 ),
bytes,
true );
}
}
}
private static ResourceType getResourceType( InternalKieModule kieModule,
String fileName ) {
return getResourceType( kieModule.getResourceConfiguration( fileName ) );
}
private static ResourceType getResourceType( ResourceConfiguration conf ) {
return conf instanceof ResourceConfigurationImpl ? ( (ResourceConfigurationImpl) conf ).getResourceType() : null;
}
public static boolean filterFileInKBase( InternalKieModule kieModule,
KieBaseModel kieBase,
String fileName ) {
return isFileInKieBase( kieBase, fileName ) &&
( isKieExtension( fileName ) || getResourceType( kieModule, fileName ) != null );
}
private static boolean isKieExtension(String fileName) {
return !fileName.endsWith(".java") && ResourceType.determineResourceType(fileName) != null;
}
private static boolean isFileInKieBase( KieBaseModel kieBase,
String fileName ) {
int lastSep = fileName.lastIndexOf( "/" );
if ( lastSep + 1 < fileName.length() && fileName.charAt( lastSep + 1 ) == '.' ) {
// skip dot files
return false;
}
if ( kieBase.getPackages().isEmpty() ) {
return true;
} else {
String pkgNameForFile = lastSep > 0 ? fileName.substring( 0, lastSep ) : "";
if ( pkgNameForFile.startsWith( RESOURCES_ROOT ) ) {
pkgNameForFile = pkgNameForFile.substring( RESOURCES_ROOT.length() );
}
pkgNameForFile = pkgNameForFile.replace( '/', '.' );
for ( String pkgName : kieBase.getPackages() ) {
boolean isNegative = pkgName.startsWith( "!" );
if ( isNegative ) {
pkgName = pkgName.substring( 1 );
}
if ( pkgName.equals( "*" ) || pkgNameForFile.equals( pkgName ) || pkgNameForFile.endsWith( "." + pkgName ) ) {
return !isNegative;
}
if ( pkgName.endsWith( ".*" ) ) {
String relativePkgNameForFile = pkgNameForFile.startsWith( RESOURCES_ROOT_DOT_SEPARATOR ) ?
pkgNameForFile.substring( RESOURCES_ROOT_DOT_SEPARATOR.length() ) :
pkgNameForFile;
String pkgNameNoWildcard = pkgName.substring( 0, pkgName.length() - 2 );
if ( relativePkgNameForFile.equals( pkgNameNoWildcard ) || relativePkgNameForFile.startsWith( pkgNameNoWildcard + "." ) ) {
return !isNegative;
}
if ( relativePkgNameForFile.startsWith( kieBase.getName() + "." ) ) {
relativePkgNameForFile = relativePkgNameForFile.substring( kieBase.getName().length() + 1 );
if ( relativePkgNameForFile.equals( pkgNameNoWildcard ) || relativePkgNameForFile.startsWith( pkgNameNoWildcard + "." ) ) {
return !isNegative;
}
}
}
}
return false;
}
}
public Results getResults() {
if ( !isBuilt() ) {
buildAll();
}
return results;
}
public KieModule getKieModule() {
return getKieModule( false );
}
public KieModule getKieModuleIgnoringErrors() {
return getKieModule( true );
}
private KieModule getKieModule( boolean ignoreErrors ) {
if ( !isBuilt() ) {
buildAll();
}
if ( !ignoreErrors && ( getResults().hasMessages( Level.ERROR ) || kModule == null ) ) {
throw new RuntimeException( "Unable to get KieModule, Errors Existed" );
}
return kModule;
}
private boolean isBuilt() {
return kModule != null;
}
private void buildKieModuleModel() {
if ( srcMfs.isAvailable( KieModuleModelImpl.KMODULE_SRC_PATH ) ) {
kModuleModelXml = srcMfs.getBytes( KieModuleModelImpl.KMODULE_SRC_PATH );
try {
kModuleModel = KieModuleModelImpl.fromXML( new ByteArrayInputStream( kModuleModelXml ) );
} catch ( Exception e ) {
results.addMessage( Level.ERROR,
"kmodule.xml",
"kmodule.xml found, but unable to read\n" + e.getMessage() );
}
} else {
// There's no kmodule.xml, create a defualt one
kModuleModel = KieServices.Factory.get().newKieModuleModel();
}
if ( setDefaultsforEmptyKieModule( kModuleModel ) ) {
kModuleModelXml = kModuleModel.toXML().getBytes( IoUtils.UTF8_CHARSET );
}
}
public static boolean setDefaultsforEmptyKieModule( KieModuleModel kModuleModel ) {
if ( kModuleModel != null && kModuleModel.getKieBaseModels().isEmpty() ) {
// would be null if they pass a corrupted kModuleModel
KieBaseModel kieBaseModel = kModuleModel.newKieBaseModel( "defaultKieBase" ).addPackage( "*" ).setDefault( true );
kieBaseModel.newKieSessionModel( "defaultKieSession" ).setDefault( true );
kieBaseModel.newKieSessionModel( "defaultStatelessKieSession" ).setType( KieSessionModel.KieSessionType.STATELESS ).setDefault( true );
return true;
}
return false;
}
public PomModel getPomModel() {
if ( pomModel == null ) {
pomModel = buildPomModel();
}
return pomModel;
}
/**
* This can be used for performance reason to avoid the recomputation of the pomModel when it is already available
*/
public void setPomModel( PomModel pomModel ) {
this.pomModel = pomModel;
if ( srcMfs.isAvailable( "pom.xml" ) ) {
this.pomXml = srcMfs.getBytes( "pom.xml" );
}
}
private PomModel buildPomModel() {
pomXml = getOrGeneratePomXml( srcMfs );
if ( pomXml == null ) {
// will be null if the provided pom is invalid
return null;
}
try {
PomModel tempPomModel = PomModel.Parser.parse( "pom.xml",
new ByteArrayInputStream( pomXml ) );
validatePomModel( tempPomModel ); // throws an exception if invalid
return tempPomModel;
} catch ( Exception e ) {
results.addMessage( Level.ERROR,
"pom.xml",
"maven pom.xml found, but unable to read\n" + e.getMessage() );
}
return null;
}
public static void validatePomModel( PomModel pomModel ) {
ReleaseId pomReleaseId = pomModel.getReleaseId();
if ( StringUtils.isEmpty( pomReleaseId.getGroupId() ) || StringUtils.isEmpty( pomReleaseId.getArtifactId() ) || StringUtils.isEmpty( pomReleaseId.getVersion() ) ) {
throw new RuntimeException( "Maven pom.properties exists but ReleaseId content is malformed" );
}
}
public static byte[] getOrGeneratePomXml( ResourceReader mfs ) {
if ( mfs.isAvailable( "pom.xml" ) ) {
return mfs.getBytes( "pom.xml" );
} else {
// There is no pom.xml, and thus no ReleaseId, so generate a pom.xml from the global detault.
return generatePomXml( KieServices.Factory.get().getRepository().getDefaultReleaseId() ).getBytes( IoUtils.UTF8_CHARSET );
}
}
public void writePomAndKModule() {
addMetaInfBuilder();
if ( pomXml != null ) {
ReleaseIdImpl g = (ReleaseIdImpl) releaseId;
trgMfs.write( g.getPomXmlPath(),
pomXml,
true );
trgMfs.write( g.getPomPropertiesPath(),
generatePomProperties( releaseId ).getBytes( IoUtils.UTF8_CHARSET ),
true );
}
if ( kModuleModelXml != null ) {
trgMfs.write( KieModuleModelImpl.KMODULE_JAR_PATH,
kModuleModel.toXML().getBytes( IoUtils.UTF8_CHARSET ),
true );
}
}
public static String generatePomXml( ReleaseId releaseId ) {
StringBuilder sBuilder = new StringBuilder();
sBuilder.append( "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" \n" );
sBuilder.append( " xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\"> \n" );
sBuilder.append( " <modelVersion>4.0.0</modelVersion> \n" );
sBuilder.append( " <groupId>" );
sBuilder.append( releaseId.getGroupId() );
sBuilder.append( "</groupId> \n" );
sBuilder.append( " <artifactId>" );
sBuilder.append( releaseId.getArtifactId() );
sBuilder.append( "</artifactId> \n" );
sBuilder.append( " <version>" );
sBuilder.append( releaseId.getVersion() );
sBuilder.append( "</version> \n" );
sBuilder.append( " <packaging>jar</packaging> \n" );
sBuilder.append( " <name>Default</name> \n" );
sBuilder.append( "</project> \n" );
return sBuilder.toString();
}
public static String generatePomProperties( ReleaseId releaseId ) {
StringBuilder sBuilder = new StringBuilder();
sBuilder.append( "groupId=" );
sBuilder.append( releaseId.getGroupId() );
sBuilder.append( "\n" );
sBuilder.append( "artifactId=" );
sBuilder.append( releaseId.getArtifactId() );
sBuilder.append( "\n" );
sBuilder.append( "version=" );
sBuilder.append( releaseId.getVersion() );
sBuilder.append( "\n" );
return sBuilder.toString();
}
private void compileJavaClasses( ClassLoader classLoader ) {
List<String> classFiles = new ArrayList<String>();
for ( String fileName : srcMfs.getFileNames() ) {
if ( fileName.endsWith( ".class" ) ) {
trgMfs.write( fileName,
srcMfs.getBytes( fileName ),
true );
classFiles.add( fileName.substring( 0,
fileName.length() - ".class".length() ) );
}
}
List<String> javaFiles = new ArrayList<String>();
List<String> javaTestFiles = new ArrayList<String>();
for ( String fileName : srcMfs.getFileNames() ) {
if ( fileName.endsWith( ".java" ) && !classFiles.contains( fileName.substring( 0,
fileName.length() - ".java".length() ) ) ) {
fileName = fileName.replace( File.separatorChar, '/' );
if ( !fileName.startsWith( JAVA_ROOT ) && !fileName.startsWith( JAVA_TEST_ROOT ) ) {
results.addMessage( Level.WARNING, fileName, "Found Java file out of the Java source folder: \"" + fileName + "\"" );
} else if ( fileName.substring( JAVA_ROOT.length() ).indexOf( '/' ) < 0 ) {
results.addMessage( Level.ERROR, fileName, "A Java class must have a package: " + fileName.substring( JAVA_ROOT.length() ) + " is not allowed" );
} else {
if ( fileName.startsWith( JAVA_ROOT ) ) {
javaFiles.add( fileName );
} else {
javaTestFiles.add( fileName );
}
}
}
}
if ( !javaFiles.isEmpty() || !javaTestFiles.isEmpty() ) {
KnowledgeBuilderConfigurationImpl kconf = new KnowledgeBuilderConfigurationImpl( classLoader );
JavaDialectConfiguration javaConf = (JavaDialectConfiguration) kconf.getDialectConfiguration( "java" );
compileJavaClasses( javaConf, classLoader, javaFiles, JAVA_ROOT );
compileJavaClasses( javaConf, classLoader, javaTestFiles, JAVA_TEST_ROOT );
}
}
private void compileJavaClasses( JavaDialectConfiguration javaConf,
ClassLoader classLoader,
List<String> javaFiles,
String rootFolder ) {
if ( !javaFiles.isEmpty() ) {
String[] sourceFiles = javaFiles.toArray( new String[ javaFiles.size() ] );
JavaCompiler javaCompiler = createCompiler( javaConf, rootFolder );
CompilationResult res = javaCompiler.compile( sourceFiles,
srcMfs,
trgMfs,
classLoader );
for ( CompilationProblem problem : res.getErrors() ) {
results.addMessage( problem );
}
for ( CompilationProblem problem : res.getWarnings() ) {
results.addMessage( problem );
}
}
}
private JavaCompiler createCompiler( JavaDialectConfiguration javaConf,
String prefix ) {
JavaCompiler javaCompiler = JavaCompilerFactory.getInstance().loadCompiler( javaConf );
if ( javaCompiler instanceof EclipseJavaCompiler ) {
( (EclipseJavaCompiler) javaCompiler ).setPrefix( prefix );
}
return javaCompiler;
}
public static String findPomProperties( ZipFile zipFile ) {
Enumeration<? extends ZipEntry> zipEntries = zipFile.entries();
while ( zipEntries.hasMoreElements() ) {
ZipEntry zipEntry = zipEntries.nextElement();
String fileName = zipEntry.getName();
if ( fileName.endsWith( "pom.properties" ) && fileName.startsWith( "META-INF/maven/" ) ) {
return fileName;
}
}
return null;
}
public static File findPomProperties( java.io.File root ) {
File mavenRoot = new File( root,
"META-INF/maven" );
return recurseToPomProperties( mavenRoot );
}
public static File recurseToPomProperties( File file ) {
if ( file.isDirectory() ) {
for ( java.io.File child : file.listFiles() ) {
if ( child.isDirectory() ) {
File returnedFile = recurseToPomProperties( child );
if ( returnedFile != null ) {
return returnedFile;
}
} else if ( child.getName().endsWith( "pom.properties" ) ) {
return child;
}
}
}
return null;
}
@Override
public KieBuilderSet createFileSet( String... files ) {
if ( kieBuilderSet == null ) {
kieBuilderSet = new KieBuilderSetImpl( this );
}
return kieBuilderSet.setFiles( files );
}
public IncrementalResults incrementalBuild() {
return new KieBuilderSetImpl( this ).build();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.mina.filter.firewall;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.mina.core.filterchain.IoFilter;
import org.apache.mina.core.filterchain.IoFilterAdapter;
import org.apache.mina.core.session.IoSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A {@link IoFilter} which blocks connections from connecting
* at a rate faster than the specified interval.
*
* @author <a href="http://mina.apache.org">Apache MINA Project</a>
*/
public class ConnectionThrottleFilter extends IoFilterAdapter {
/** A logger for this class */
private final static Logger LOGGER = LoggerFactory.getLogger(ConnectionThrottleFilter.class);
/** The default delay to wait for a session to be accepted again */
private static final long DEFAULT_TIME = 1000;
/**
* The minimal delay the sessions will have to wait before being created
* again
*/
private long allowedInterval;
/** The map of created sessiosn, associated with the time they were created */
private final Map<String, Long> clients;
/** A lock used to protect the map from concurrent modifications */
private Lock lock = new ReentrantLock();
// A thread that is used to remove sessions that have expired since they
// have
// been added.
private class ExpiredSessionThread extends Thread {
public void run() {
try {
// Wait for the delay to be expired
Thread.sleep(allowedInterval);
} catch (InterruptedException e) {
// We have been interrupted, get out of the loop.
return;
}
// now, remove all the sessions that have been created
// before the delay
long currentTime = System.currentTimeMillis();
lock.lock();
try {
Iterator<String> sessions = clients.keySet().iterator();
while (sessions.hasNext()) {
String session = sessions.next();
long creationTime = clients.get(session);
if (creationTime + allowedInterval < currentTime) {
clients.remove(session);
}
}
} finally {
lock.unlock();
}
}
}
/**
* Default constructor. Sets the wait time to 1 second
*/
public ConnectionThrottleFilter() {
this(DEFAULT_TIME);
}
/**
* Constructor that takes in a specified wait time.
*
* @param allowedInterval
* The number of milliseconds a client is allowed to wait
* before making another successful connection
*
*/
public ConnectionThrottleFilter(long allowedInterval) {
this.allowedInterval = allowedInterval;
clients = new ConcurrentHashMap<String, Long>();
// Create the cleanup thread
ExpiredSessionThread cleanupThread = new ExpiredSessionThread();
// And make it a daemon so that it's killed when the server exits
cleanupThread.setDaemon(true);
// start the cleanuo thread now
cleanupThread.start();
}
/**
* Sets the interval between connections from a client.
* This value is measured in milliseconds.
*
* @param allowedInterval
* The number of milliseconds a client is allowed to wait
* before making another successful connection
*/
public void setAllowedInterval(long allowedInterval) {
lock.lock();
try {
this.allowedInterval = allowedInterval;
} finally {
lock.unlock();
}
}
/**
* Method responsible for deciding if a connection is OK
* to continue
*
* @param session
* The new session that will be verified
* @return
* True if the session meets the criteria, otherwise false
*/
protected boolean isConnectionOk(IoSession session) {
SocketAddress remoteAddress = session.getRemoteAddress();
if (remoteAddress instanceof InetSocketAddress) {
InetSocketAddress addr = (InetSocketAddress) remoteAddress;
long now = System.currentTimeMillis();
lock.lock();
try {
if (clients.containsKey(addr.getAddress().getHostAddress())) {
LOGGER.debug("This is not a new client");
Long lastConnTime = clients.get(addr.getAddress().getHostAddress());
clients.put(addr.getAddress().getHostAddress(), now);
// if the interval between now and the last connection is
// less than the allowed interval, return false
if (now - lastConnTime < allowedInterval) {
LOGGER.warn("Session connection interval too short");
return false;
}
return true;
}
clients.put(addr.getAddress().getHostAddress(), now);
} finally {
lock.unlock();
}
return true;
}
return false;
}
@Override
public void sessionCreated(NextFilter nextFilter, IoSession session) throws Exception {
if (!isConnectionOk(session)) {
LOGGER.warn("Connections coming in too fast; closing.");
session.close(true);
}
nextFilter.sessionCreated(session);
}
}
| |
/*
* Copyright (c) 2010-2013 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.schema.processor;
import javax.xml.namespace.QName;
import org.apache.commons.lang.StringUtils;
import org.w3c.dom.Element;
import com.evolveum.midpoint.prism.ComplexTypeDefinition;
import com.evolveum.midpoint.prism.Objectable;
import com.evolveum.midpoint.prism.PrismContainerDefinition;
import com.evolveum.midpoint.prism.PrismContext;
import com.evolveum.midpoint.prism.PrismObjectDefinition;
import com.evolveum.midpoint.prism.PrismPropertyDefinition;
import com.evolveum.midpoint.prism.schema.SchemaDefinitionFactory;
import com.evolveum.midpoint.prism.schema.SchemaProcessorUtil;
import com.evolveum.midpoint.prism.schema.SchemaToDomProcessor;
import com.evolveum.midpoint.schema.constants.MidPointConstants;
import com.evolveum.midpoint.util.DOMUtil;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowKindType;
import com.sun.xml.xsom.XSAnnotation;
import com.sun.xml.xsom.XSComplexType;
import com.sun.xml.xsom.XSParticle;
import java.util.Collection;
/**
* @author semancik
*
*/
public class MidPointSchemaDefinitionFactory extends SchemaDefinitionFactory {
@Override
public ComplexTypeDefinition createComplexTypeDefinition(XSComplexType complexType,
PrismContext prismContext, XSAnnotation annotation) throws SchemaException {
if (isResourceObject(annotation)) {
return createObjectClassDefinition(complexType, prismContext, annotation);
}
return super.createComplexTypeDefinition(complexType, prismContext, annotation);
}
private ComplexTypeDefinition createObjectClassDefinition(XSComplexType complexType,
PrismContext prismContext, XSAnnotation annotation) throws SchemaException {
QName typeName = new QName(complexType.getTargetNamespace(),complexType.getName());
ObjectClassComplexTypeDefinition ocDef = new ObjectClassComplexTypeDefinition(typeName, prismContext);
// nativeObjectClass
Element nativeAttrElement = SchemaProcessorUtil.getAnnotationElement(annotation, MidPointConstants.RA_NATIVE_OBJECT_CLASS);
String nativeObjectClass = nativeAttrElement == null ? null : nativeAttrElement.getTextContent();
ocDef.setNativeObjectClass(nativeObjectClass);
ShadowKindType kind = null;
Element kindElement = SchemaProcessorUtil.getAnnotationElement(annotation, MidPointConstants.RA_KIND);
if (kindElement != null) {
String kindString = kindElement.getTextContent();
if (StringUtils.isEmpty(kindString)) {
throw new SchemaException("The definition of kind is empty in the annotation of object class "+typeName);
}
try {
kind = ShadowKindType.fromValue(kindString);
} catch (IllegalArgumentException e) {
throw new SchemaException("Definition of unknown kind '"+kindString+"' in the annotation of object class "+typeName);
}
ocDef.setKind(kind);
}
Boolean defaultInAKind = SchemaProcessorUtil.getAnnotationBooleanMarker(annotation, MidPointConstants.RA_DEFAULT);
if (defaultInAKind == null) {
ocDef.setDefaultInAKind(false);
} else {
ocDef.setDefaultInAKind(defaultInAKind);
}
String intent = null;
Element intentElement = SchemaProcessorUtil.getAnnotationElement(annotation, MidPointConstants.RA_INTENT);
if (intentElement != null) {
intent = intentElement.getTextContent();
if (StringUtils.isEmpty(intent)) {
throw new SchemaException("The definition of intent is empty in the annotation of object class "+typeName);
}
ocDef.setIntent(intent);
}
// accountType: DEPRECATED
if (isAccountObject(annotation)) {
if (kind != null && kind != ShadowKindType.ACCOUNT) {
throw new SchemaException("Conflicting definition of kind and legacy account annotation in the annotation of object class "+typeName);
}
ocDef.setKind(ShadowKindType.ACCOUNT);
Element account = SchemaProcessorUtil.getAnnotationElement(annotation, MidPointConstants.RA_ACCOUNT);
if (account != null && !defaultInAKind) {
String defaultValue = account.getAttribute("default");
// Compatibility (DEPRECATED)
if (defaultValue != null) {
ocDef.setDefaultInAKind(Boolean.parseBoolean(defaultValue));
}
}
Element accountTypeElement = SchemaProcessorUtil.getAnnotationElement(annotation, MidPointConstants.RA_ACCOUNT_TYPE);
if (accountTypeElement != null) {
String accountType = accountTypeElement.getTextContent();
if (intent != null && !intent.equals(accountType)) {
throw new SchemaException("Conflicting definition of intent and legacy accountType annotation in the annotation of object class "+typeName);
}
ocDef.setIntent(accountType);
}
}
return ocDef;
}
@Override
public void finishComplexTypeDefinition(ComplexTypeDefinition complexTypeDefinition, XSComplexType complexType,
PrismContext prismContext, XSAnnotation annotation) throws SchemaException {
super.finishComplexTypeDefinition(complexTypeDefinition, complexType, prismContext, annotation);
if (complexTypeDefinition instanceof ObjectClassComplexTypeDefinition) {
finishObjectClassDefinition((ObjectClassComplexTypeDefinition)complexTypeDefinition, complexType, prismContext, annotation);
}
}
private void finishObjectClassDefinition(ObjectClassComplexTypeDefinition ocDef,
XSComplexType complexType, PrismContext prismContext, XSAnnotation annotation) throws SchemaException {
// displayNameAttribute
ResourceAttributeDefinition attrDefinition = getAnnotationReference(annotation, MidPointConstants.RA_DISPLAY_NAME_ATTRIBUTE, ocDef);
if (attrDefinition != null) {
ocDef.setDisplayNameAttribute(attrDefinition);
}
// namingAttribute
attrDefinition = getAnnotationReference(annotation, MidPointConstants.RA_NAMING_ATTRIBUTE, ocDef);
if (attrDefinition != null) {
ocDef.setNamingAttribute(attrDefinition);
}
// descriptionAttribute
attrDefinition = getAnnotationReference(annotation, MidPointConstants.RA_DESCRIPTION_ATTRIBUTE, ocDef);
if (attrDefinition != null) {
ocDef.setDescriptionAttribute(attrDefinition);
}
// identifier
attrDefinition = getAnnotationReference(annotation, MidPointConstants.RA_IDENTIFIER, ocDef);
if (attrDefinition != null) {
((Collection<ResourceAttributeDefinition>)ocDef.getIdentifiers()).add(attrDefinition);
}
// secondaryIdentifier
attrDefinition = getAnnotationReference(annotation, MidPointConstants.RA_SECONDARY_IDENTIFIER, ocDef);
if (attrDefinition != null) {
((Collection<ResourceAttributeDefinition>)ocDef.getSecondaryIdentifiers()).add(attrDefinition);
}
}
@Override
public void addExtraComplexTypeAnnotations(ComplexTypeDefinition definition, Element appinfo, SchemaToDomProcessor schemaToDomProcessor) {
super.addExtraComplexTypeAnnotations(definition, appinfo, schemaToDomProcessor);
if (definition instanceof ObjectClassComplexTypeDefinition) {
addExtraObjectClassAnnotations((ObjectClassComplexTypeDefinition)definition, appinfo, schemaToDomProcessor);
}
}
private void addExtraObjectClassAnnotations(ObjectClassComplexTypeDefinition definition, Element appinfo, SchemaToDomProcessor processor) {
processor.addAnnotation(MidPointConstants.RA_RESOURCE_OBJECT, appinfo);
// displayName, identifier, secondaryIdentifier
for (ResourceAttributeDefinition identifier : definition.getIdentifiers()) {
processor.addRefAnnotation(MidPointConstants.RA_IDENTIFIER, identifier.getName(), appinfo);
}
for (ResourceAttributeDefinition identifier : definition.getSecondaryIdentifiers()) {
processor.addRefAnnotation(MidPointConstants.RA_SECONDARY_IDENTIFIER,identifier.getName(),appinfo);
}
if (definition.getDisplayNameAttribute() != null) {
processor.addRefAnnotation(MidPointConstants.RA_DISPLAY_NAME_ATTRIBUTE, definition.getDisplayNameAttribute().getName(), appinfo);
}
if (definition.getDescriptionAttribute() != null) {
processor.addRefAnnotation(MidPointConstants.RA_DESCRIPTION_ATTRIBUTE, definition.getDescriptionAttribute().getName(), appinfo);
}
if (definition.getNamingAttribute() != null) {
processor.addRefAnnotation(MidPointConstants.RA_NAMING_ATTRIBUTE, definition.getNamingAttribute().getName(), appinfo);
}
// TODO: what to do with native object class, composite
// // nativeObjectClass
if (!StringUtils.isEmpty(definition.getNativeObjectClass())) {
processor.addAnnotation(MidPointConstants.RA_NATIVE_OBJECT_CLASS, definition.getNativeObjectClass(), appinfo);
}
// kind
if (definition.getKind() != null) {
processor.addAnnotation(MidPointConstants.RA_KIND, definition.getKind().value(), appinfo);
}
if (definition.isDefaultInAKind()) {
processor.addAnnotation(MidPointConstants.RA_DEFAULT, true, appinfo);
}
if (definition.getIntent() != null) {
processor.addAnnotation(MidPointConstants.RA_INTENT, definition.getIntent(), appinfo);
}
}
@Override
public PrismContainerDefinition createExtraDefinitionFromComplexType(XSComplexType complexType,
ComplexTypeDefinition complexTypeDefinition, PrismContext prismContext, XSAnnotation annotation) throws SchemaException {
// if (complexTypeDefinition instanceof ObjectClassComplexTypeDefinition) {
// return createResourceAttributeContainerDefinition(complexType, (ObjectClassComplexTypeDefinition)complexTypeDefinition,
// prismContext, annotation);
// }
return super.createExtraDefinitionFromComplexType(complexType, complexTypeDefinition, prismContext, annotation);
}
private PrismContainerDefinition createResourceAttributeContainerDefinition(XSComplexType complexType,
ObjectClassComplexTypeDefinition complexTypeDefinition, PrismContext prismContext, XSAnnotation annotation) {
ResourceAttributeContainerDefinition attrContDef = new ResourceAttributeContainerDefinition(null, complexTypeDefinition, prismContext);
return attrContDef;
}
@Override
public PrismPropertyDefinition createPropertyDefinition(QName elementName, QName typeName,
ComplexTypeDefinition complexTypeDefinition, PrismContext prismContext, XSAnnotation annotation,
XSParticle elementParticle) throws SchemaException {
if (complexTypeDefinition != null && complexTypeDefinition instanceof ObjectClassComplexTypeDefinition) {
return createResourceAttributeDefinition(elementName, typeName, prismContext, annotation);
}
return super.createPropertyDefinition(elementName, typeName, complexTypeDefinition, prismContext, annotation, elementParticle);
}
private PrismPropertyDefinition createResourceAttributeDefinition(QName elementName, QName typeName,
PrismContext prismContext, XSAnnotation annotation) throws SchemaException {
ResourceAttributeDefinition attrDef = new ResourceAttributeDefinition(elementName, typeName, prismContext);
// nativeAttributeName
Element nativeAttrElement = SchemaProcessorUtil.getAnnotationElement(annotation, MidPointConstants.RA_NATIVE_ATTRIBUTE_NAME);
String nativeAttributeName = nativeAttrElement == null ? null : nativeAttrElement.getTextContent();
if (!StringUtils.isEmpty(nativeAttributeName)) {
attrDef.setNativeAttributeName(nativeAttributeName);
}
// returnedByDefault
attrDef.setReturnedByDefault(SchemaProcessorUtil.getAnnotationBoolean(annotation, MidPointConstants.RA_RETURNED_BY_DEFAULT_NAME));
return attrDef;
}
@Override
public void addExtraPropertyAnnotations(PrismPropertyDefinition definition, Element appinfo,
SchemaToDomProcessor schemaToDomProcessor) {
super.addExtraPropertyAnnotations(definition, appinfo, schemaToDomProcessor);
if (definition instanceof ResourceAttributeDefinition) {
ResourceAttributeDefinition rad = (ResourceAttributeDefinition)definition;
if (rad.getNativeAttributeName() != null) {
schemaToDomProcessor.addAnnotation(MidPointConstants.RA_NATIVE_ATTRIBUTE_NAME, rad.getNativeAttributeName(), appinfo);
}
if (rad.getReturnedByDefault() != null) {
schemaToDomProcessor.addAnnotation(MidPointConstants.RA_RETURNED_BY_DEFAULT_NAME, rad.getReturnedByDefault().toString(), appinfo);
}
}
}
private boolean isResourceObject(XSAnnotation annotation) {
// annotation: resourceObject
if (SchemaProcessorUtil.getAnnotationElement(annotation, MidPointConstants.RA_RESOURCE_OBJECT) != null) {
return true;
}
// annotation: accountType DEPRECATED
if (SchemaProcessorUtil.getAnnotationElement(annotation, MidPointConstants.RA_ACCOUNT) != null) {
// <accountType> implies <resourceObject> ... at least for now (compatibility)
return true;
}
return false;
}
private boolean isAccountObject(XSAnnotation annotation) {
if (annotation == null || annotation.getAnnotation() == null) {
return false;
}
Element accountType = SchemaProcessorUtil.getAnnotationElement(annotation, MidPointConstants.RA_ACCOUNT);
if (accountType != null) {
return true;
}
return false;
}
private ResourceAttributeDefinition getAnnotationReference(XSAnnotation annotation, QName qname, ObjectClassComplexTypeDefinition objectClass) throws SchemaException {
Element element = SchemaProcessorUtil.getAnnotationElement(annotation, qname);
if (element != null) {
String reference = element.getTextContent();
if (reference == null || reference.isEmpty()) {
// Compatibility
reference = element.getAttribute("ref");
}
if (reference != null && !reference.isEmpty()) {
QName referenceItemName = DOMUtil.resolveQName(element, reference);
PrismPropertyDefinition definition = objectClass.findPropertyDefinition(referenceItemName);
if (definition == null) {
throw new SchemaException("The annotation "+qname+" in "+objectClass+" is pointing to "+referenceItemName+" which does not exist");
}
if (definition instanceof ResourceAttributeDefinition) {
return (ResourceAttributeDefinition) definition;
} else {
throw new SchemaException("The annotation "+qname+" in "+objectClass+" is pointing to "+referenceItemName+" which is not an attribute, it is "+definition);
}
}
}
return null;
}
}
| |
package g7.bluesky.launcher3;
import android.animation.AnimatorSet;
import android.animation.ObjectAnimator;
import android.animation.PropertyValuesHolder;
import android.animation.ValueAnimator;
import android.animation.ValueAnimator.AnimatorUpdateListener;
import android.appwidget.AppWidgetHostView;
import android.appwidget.AppWidgetProviderInfo;
import android.content.Context;
import android.graphics.Rect;
import android.view.Gravity;
import android.widget.FrameLayout;
import android.widget.ImageView;
public class AppWidgetResizeFrame extends FrameLayout {
private LauncherAppWidgetHostView mWidgetView;
private CellLayout mCellLayout;
private DragLayer mDragLayer;
private ImageView mLeftHandle;
private ImageView mRightHandle;
private ImageView mTopHandle;
private ImageView mBottomHandle;
private boolean mLeftBorderActive;
private boolean mRightBorderActive;
private boolean mTopBorderActive;
private boolean mBottomBorderActive;
private int mWidgetPaddingLeft;
private int mWidgetPaddingRight;
private int mWidgetPaddingTop;
private int mWidgetPaddingBottom;
private int mBaselineWidth;
private int mBaselineHeight;
private int mBaselineX;
private int mBaselineY;
private int mResizeMode;
private int mRunningHInc;
private int mRunningVInc;
private int mMinHSpan;
private int mMinVSpan;
private int mDeltaX;
private int mDeltaY;
private int mDeltaXAddOn;
private int mDeltaYAddOn;
private int mBackgroundPadding;
private int mTouchTargetWidth;
private int mTopTouchRegionAdjustment = 0;
private int mBottomTouchRegionAdjustment = 0;
int[] mDirectionVector = new int[2];
int[] mLastDirectionVector = new int[2];
int[] mTmpPt = new int[2];
final int SNAP_DURATION = 150;
final int BACKGROUND_PADDING = 24;
final float DIMMED_HANDLE_ALPHA = 0f;
final float RESIZE_THRESHOLD = 0.66f;
private static Rect mTmpRect = new Rect();
public static final int LEFT = 0;
public static final int TOP = 1;
public static final int RIGHT = 2;
public static final int BOTTOM = 3;
private Launcher mLauncher;
public AppWidgetResizeFrame(Context context,
LauncherAppWidgetHostView widgetView, CellLayout cellLayout, DragLayer dragLayer) {
super(context);
mLauncher = (Launcher) context;
mCellLayout = cellLayout;
mWidgetView = widgetView;
mResizeMode = widgetView.getAppWidgetInfo().resizeMode;
mDragLayer = dragLayer;
final AppWidgetProviderInfo info = widgetView.getAppWidgetInfo();
int[] result = Launcher.getMinSpanForWidget(mLauncher, info);
mMinHSpan = result[0];
mMinVSpan = result[1];
setBackgroundResource(R.drawable.widget_resize_frame_holo);
setPadding(0, 0, 0, 0);
LayoutParams lp;
mLeftHandle = new ImageView(context);
mLeftHandle.setImageResource(R.drawable.widget_resize_handle_left);
lp = new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT,
Gravity.LEFT | Gravity.CENTER_VERTICAL);
addView(mLeftHandle, lp);
mRightHandle = new ImageView(context);
mRightHandle.setImageResource(R.drawable.widget_resize_handle_right);
lp = new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT,
Gravity.RIGHT | Gravity.CENTER_VERTICAL);
addView(mRightHandle, lp);
mTopHandle = new ImageView(context);
mTopHandle.setImageResource(R.drawable.widget_resize_handle_top);
lp = new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT,
Gravity.CENTER_HORIZONTAL | Gravity.TOP);
addView(mTopHandle, lp);
mBottomHandle = new ImageView(context);
mBottomHandle.setImageResource(R.drawable.widget_resize_handle_bottom);
lp = new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT,
Gravity.CENTER_HORIZONTAL | Gravity.BOTTOM);
addView(mBottomHandle, lp);
Rect p = AppWidgetHostView.getDefaultPaddingForWidget(context,
widgetView.getAppWidgetInfo().provider, null);
mWidgetPaddingLeft = p.left;
mWidgetPaddingTop = p.top;
mWidgetPaddingRight = p.right;
mWidgetPaddingBottom = p.bottom;
if (mResizeMode == AppWidgetProviderInfo.RESIZE_HORIZONTAL) {
mTopHandle.setVisibility(GONE);
mBottomHandle.setVisibility(GONE);
} else if (mResizeMode == AppWidgetProviderInfo.RESIZE_VERTICAL) {
mLeftHandle.setVisibility(GONE);
mRightHandle.setVisibility(GONE);
}
final float density = mLauncher.getResources().getDisplayMetrics().density;
mBackgroundPadding = (int) Math.ceil(density * BACKGROUND_PADDING);
mTouchTargetWidth = 2 * mBackgroundPadding;
// When we create the resize frame, we first mark all cells as unoccupied. The appropriate
// cells (same if not resized, or different) will be marked as occupied when the resize
// frame is dismissed.
mCellLayout.markCellsAsUnoccupiedForView(mWidgetView);
}
public boolean beginResizeIfPointInRegion(int x, int y) {
boolean horizontalActive = (mResizeMode & AppWidgetProviderInfo.RESIZE_HORIZONTAL) != 0;
boolean verticalActive = (mResizeMode & AppWidgetProviderInfo.RESIZE_VERTICAL) != 0;
mLeftBorderActive = (x < mTouchTargetWidth) && horizontalActive;
mRightBorderActive = (x > getWidth() - mTouchTargetWidth) && horizontalActive;
mTopBorderActive = (y < mTouchTargetWidth + mTopTouchRegionAdjustment) && verticalActive;
mBottomBorderActive = (y > getHeight() - mTouchTargetWidth + mBottomTouchRegionAdjustment)
&& verticalActive;
boolean anyBordersActive = mLeftBorderActive || mRightBorderActive
|| mTopBorderActive || mBottomBorderActive;
mBaselineWidth = getMeasuredWidth();
mBaselineHeight = getMeasuredHeight();
mBaselineX = getLeft();
mBaselineY = getTop();
if (anyBordersActive) {
mLeftHandle.setAlpha(mLeftBorderActive ? 1.0f : DIMMED_HANDLE_ALPHA);
mRightHandle.setAlpha(mRightBorderActive ? 1.0f :DIMMED_HANDLE_ALPHA);
mTopHandle.setAlpha(mTopBorderActive ? 1.0f : DIMMED_HANDLE_ALPHA);
mBottomHandle.setAlpha(mBottomBorderActive ? 1.0f : DIMMED_HANDLE_ALPHA);
}
return anyBordersActive;
}
/**
* Here we bound the deltas such that the frame cannot be stretched beyond the extents
* of the CellLayout, and such that the frame's borders can't cross.
*/
public void updateDeltas(int deltaX, int deltaY) {
if (mLeftBorderActive) {
mDeltaX = Math.max(-mBaselineX, deltaX);
mDeltaX = Math.min(mBaselineWidth - 2 * mTouchTargetWidth, mDeltaX);
} else if (mRightBorderActive) {
mDeltaX = Math.min(mDragLayer.getWidth() - (mBaselineX + mBaselineWidth), deltaX);
mDeltaX = Math.max(-mBaselineWidth + 2 * mTouchTargetWidth, mDeltaX);
}
if (mTopBorderActive) {
mDeltaY = Math.max(-mBaselineY, deltaY);
mDeltaY = Math.min(mBaselineHeight - 2 * mTouchTargetWidth, mDeltaY);
} else if (mBottomBorderActive) {
mDeltaY = Math.min(mDragLayer.getHeight() - (mBaselineY + mBaselineHeight), deltaY);
mDeltaY = Math.max(-mBaselineHeight + 2 * mTouchTargetWidth, mDeltaY);
}
}
public void visualizeResizeForDelta(int deltaX, int deltaY) {
visualizeResizeForDelta(deltaX, deltaY, false);
}
/**
* Based on the deltas, we resize the frame, and, if needed, we resize the widget.
*/
private void visualizeResizeForDelta(int deltaX, int deltaY, boolean onDismiss) {
updateDeltas(deltaX, deltaY);
DragLayer.LayoutParams lp = (DragLayer.LayoutParams) getLayoutParams();
if (mLeftBorderActive) {
lp.x = mBaselineX + mDeltaX;
lp.width = mBaselineWidth - mDeltaX;
} else if (mRightBorderActive) {
lp.width = mBaselineWidth + mDeltaX;
}
if (mTopBorderActive) {
lp.y = mBaselineY + mDeltaY;
lp.height = mBaselineHeight - mDeltaY;
} else if (mBottomBorderActive) {
lp.height = mBaselineHeight + mDeltaY;
}
resizeWidgetIfNeeded(onDismiss);
requestLayout();
}
/**
* Based on the current deltas, we determine if and how to resize the widget.
*/
private void resizeWidgetIfNeeded(boolean onDismiss) {
int xThreshold = mCellLayout.getCellWidth() + mCellLayout.getWidthGap();
int yThreshold = mCellLayout.getCellHeight() + mCellLayout.getHeightGap();
int deltaX = mDeltaX + mDeltaXAddOn;
int deltaY = mDeltaY + mDeltaYAddOn;
float hSpanIncF = 1.0f * deltaX / xThreshold - mRunningHInc;
float vSpanIncF = 1.0f * deltaY / yThreshold - mRunningVInc;
int hSpanInc = 0;
int vSpanInc = 0;
int cellXInc = 0;
int cellYInc = 0;
int countX = mCellLayout.getCountX();
int countY = mCellLayout.getCountY();
if (Math.abs(hSpanIncF) > RESIZE_THRESHOLD) {
hSpanInc = Math.round(hSpanIncF);
}
if (Math.abs(vSpanIncF) > RESIZE_THRESHOLD) {
vSpanInc = Math.round(vSpanIncF);
}
if (!onDismiss && (hSpanInc == 0 && vSpanInc == 0)) return;
CellLayout.LayoutParams lp = (CellLayout.LayoutParams) mWidgetView.getLayoutParams();
int spanX = lp.cellHSpan;
int spanY = lp.cellVSpan;
int cellX = lp.useTmpCoords ? lp.tmpCellX : lp.cellX;
int cellY = lp.useTmpCoords ? lp.tmpCellY : lp.cellY;
int hSpanDelta = 0;
int vSpanDelta = 0;
// For each border, we bound the resizing based on the minimum width, and the maximum
// expandability.
if (mLeftBorderActive) {
cellXInc = Math.max(-cellX, hSpanInc);
cellXInc = Math.min(lp.cellHSpan - mMinHSpan, cellXInc);
hSpanInc *= -1;
hSpanInc = Math.min(cellX, hSpanInc);
hSpanInc = Math.max(-(lp.cellHSpan - mMinHSpan), hSpanInc);
hSpanDelta = -hSpanInc;
} else if (mRightBorderActive) {
hSpanInc = Math.min(countX - (cellX + spanX), hSpanInc);
hSpanInc = Math.max(-(lp.cellHSpan - mMinHSpan), hSpanInc);
hSpanDelta = hSpanInc;
}
if (mTopBorderActive) {
cellYInc = Math.max(-cellY, vSpanInc);
cellYInc = Math.min(lp.cellVSpan - mMinVSpan, cellYInc);
vSpanInc *= -1;
vSpanInc = Math.min(cellY, vSpanInc);
vSpanInc = Math.max(-(lp.cellVSpan - mMinVSpan), vSpanInc);
vSpanDelta = -vSpanInc;
} else if (mBottomBorderActive) {
vSpanInc = Math.min(countY - (cellY + spanY), vSpanInc);
vSpanInc = Math.max(-(lp.cellVSpan - mMinVSpan), vSpanInc);
vSpanDelta = vSpanInc;
}
mDirectionVector[0] = 0;
mDirectionVector[1] = 0;
// Update the widget's dimensions and position according to the deltas computed above
if (mLeftBorderActive || mRightBorderActive) {
spanX += hSpanInc;
cellX += cellXInc;
if (hSpanDelta != 0) {
mDirectionVector[0] = mLeftBorderActive ? -1 : 1;
}
}
if (mTopBorderActive || mBottomBorderActive) {
spanY += vSpanInc;
cellY += cellYInc;
if (vSpanDelta != 0) {
mDirectionVector[1] = mTopBorderActive ? -1 : 1;
}
}
if (!onDismiss && vSpanDelta == 0 && hSpanDelta == 0) return;
// We always want the final commit to match the feedback, so we make sure to use the
// last used direction vector when committing the resize / reorder.
if (onDismiss) {
mDirectionVector[0] = mLastDirectionVector[0];
mDirectionVector[1] = mLastDirectionVector[1];
} else {
mLastDirectionVector[0] = mDirectionVector[0];
mLastDirectionVector[1] = mDirectionVector[1];
}
if (mCellLayout.createAreaForResize(cellX, cellY, spanX, spanY, mWidgetView,
mDirectionVector, onDismiss)) {
lp.tmpCellX = cellX;
lp.tmpCellY = cellY;
lp.cellHSpan = spanX;
lp.cellVSpan = spanY;
mRunningVInc += vSpanDelta;
mRunningHInc += hSpanDelta;
if (!onDismiss) {
updateWidgetSizeRanges(mWidgetView, mLauncher, spanX, spanY);
}
}
mWidgetView.requestLayout();
}
static void updateWidgetSizeRanges(AppWidgetHostView widgetView, Launcher launcher,
int spanX, int spanY) {
getWidgetSizeRanges(launcher, spanX, spanY, mTmpRect);
widgetView.updateAppWidgetSize(null, mTmpRect.left, mTmpRect.top,
mTmpRect.right, mTmpRect.bottom);
}
static Rect getWidgetSizeRanges(Launcher launcher, int spanX, int spanY, Rect rect) {
if (rect == null) {
rect = new Rect();
}
Rect landMetrics = Workspace.getCellLayoutMetrics(launcher, CellLayout.LANDSCAPE);
Rect portMetrics = Workspace.getCellLayoutMetrics(launcher, CellLayout.PORTRAIT);
final float density = launcher.getResources().getDisplayMetrics().density;
// Compute landscape size
int cellWidth = landMetrics.left;
int cellHeight = landMetrics.top;
int widthGap = landMetrics.right;
int heightGap = landMetrics.bottom;
int landWidth = (int) ((spanX * cellWidth + (spanX - 1) * widthGap) / density);
int landHeight = (int) ((spanY * cellHeight + (spanY - 1) * heightGap) / density);
// Compute portrait size
cellWidth = portMetrics.left;
cellHeight = portMetrics.top;
widthGap = portMetrics.right;
heightGap = portMetrics.bottom;
int portWidth = (int) ((spanX * cellWidth + (spanX - 1) * widthGap) / density);
int portHeight = (int) ((spanY * cellHeight + (spanY - 1) * heightGap) / density);
rect.set(portWidth, landHeight, landWidth, portHeight);
return rect;
}
/**
* This is the final step of the resize. Here we save the new widget size and position
* to LauncherModel and animate the resize frame.
*/
public void commitResize() {
resizeWidgetIfNeeded(true);
requestLayout();
}
public void onTouchUp() {
int xThreshold = mCellLayout.getCellWidth() + mCellLayout.getWidthGap();
int yThreshold = mCellLayout.getCellHeight() + mCellLayout.getHeightGap();
mDeltaXAddOn = mRunningHInc * xThreshold;
mDeltaYAddOn = mRunningVInc * yThreshold;
mDeltaX = 0;
mDeltaY = 0;
post(new Runnable() {
@Override
public void run() {
snapToWidget(true);
}
});
}
public void snapToWidget(boolean animate) {
final DragLayer.LayoutParams lp = (DragLayer.LayoutParams) getLayoutParams();
int newWidth = mWidgetView.getWidth() + 2 * mBackgroundPadding - mWidgetPaddingLeft -
mWidgetPaddingRight;
int newHeight = mWidgetView.getHeight() + 2 * mBackgroundPadding - mWidgetPaddingTop -
mWidgetPaddingBottom;
mTmpPt[0] = mWidgetView.getLeft();
mTmpPt[1] = mWidgetView.getTop();
mDragLayer.getDescendantCoordRelativeToSelf(mCellLayout.getShortcutsAndWidgets(), mTmpPt);
int newX = mTmpPt[0] - mBackgroundPadding + mWidgetPaddingLeft;
int newY = mTmpPt[1] - mBackgroundPadding + mWidgetPaddingTop;
// We need to make sure the frame's touchable regions lie fully within the bounds of the
// DragLayer. We allow the actual handles to be clipped, but we shift the touch regions
// down accordingly to provide a proper touch target.
if (newY < 0) {
// In this case we shift the touch region down to start at the top of the DragLayer
mTopTouchRegionAdjustment = -newY;
} else {
mTopTouchRegionAdjustment = 0;
}
if (newY + newHeight > mDragLayer.getHeight()) {
// In this case we shift the touch region up to end at the bottom of the DragLayer
mBottomTouchRegionAdjustment = -(newY + newHeight - mDragLayer.getHeight());
} else {
mBottomTouchRegionAdjustment = 0;
}
if (!animate) {
lp.width = newWidth;
lp.height = newHeight;
lp.x = newX;
lp.y = newY;
mLeftHandle.setAlpha(1.0f);
mRightHandle.setAlpha(1.0f);
mTopHandle.setAlpha(1.0f);
mBottomHandle.setAlpha(1.0f);
requestLayout();
} else {
PropertyValuesHolder width = PropertyValuesHolder.ofInt("width", lp.width, newWidth);
PropertyValuesHolder height = PropertyValuesHolder.ofInt("height", lp.height,
newHeight);
PropertyValuesHolder x = PropertyValuesHolder.ofInt("x", lp.x, newX);
PropertyValuesHolder y = PropertyValuesHolder.ofInt("y", lp.y, newY);
ObjectAnimator oa =
LauncherAnimUtils.ofPropertyValuesHolder(lp, this, width, height, x, y);
ObjectAnimator leftOa = LauncherAnimUtils.ofFloat(mLeftHandle, "alpha", 1.0f);
ObjectAnimator rightOa = LauncherAnimUtils.ofFloat(mRightHandle, "alpha", 1.0f);
ObjectAnimator topOa = LauncherAnimUtils.ofFloat(mTopHandle, "alpha", 1.0f);
ObjectAnimator bottomOa = LauncherAnimUtils.ofFloat(mBottomHandle, "alpha", 1.0f);
oa.addUpdateListener(new AnimatorUpdateListener() {
public void onAnimationUpdate(ValueAnimator animation) {
requestLayout();
}
});
AnimatorSet set = LauncherAnimUtils.createAnimatorSet();
if (mResizeMode == AppWidgetProviderInfo.RESIZE_VERTICAL) {
set.playTogether(oa, topOa, bottomOa);
} else if (mResizeMode == AppWidgetProviderInfo.RESIZE_HORIZONTAL) {
set.playTogether(oa, leftOa, rightOa);
} else {
set.playTogether(oa, leftOa, rightOa, topOa, bottomOa);
}
set.setDuration(SNAP_DURATION);
set.start();
}
}
}
| |
package com.capitalone.dashboard.core.client.testexecution;
import com.capitalone.dashboard.TestResultSettings;
import com.capitalone.dashboard.api.domain.TestExecution;
import com.capitalone.dashboard.api.domain.TestRun;
import com.capitalone.dashboard.api.domain.TestStep;
import com.capitalone.dashboard.core.client.JiraXRayRestClientImpl;
import com.capitalone.dashboard.core.client.JiraXRayRestClientSupplier;
import com.capitalone.dashboard.model.CollectorItem;
import com.capitalone.dashboard.model.Feature;
import com.capitalone.dashboard.model.TestResult;
import com.capitalone.dashboard.model.TestCase;
import com.capitalone.dashboard.model.TestCaseStatus;
import com.capitalone.dashboard.model.TestCaseStep;
import com.capitalone.dashboard.model.TestCapability;
import com.capitalone.dashboard.model.TestSuite;
import com.capitalone.dashboard.model.TestSuiteType;
import com.capitalone.dashboard.model.FeatureIssueLink;
import com.capitalone.dashboard.model.TestResultCollector;
import com.capitalone.dashboard.model.CollectorType;
import com.capitalone.dashboard.repository.CollectorItemRepository;
import com.capitalone.dashboard.repository.FeatureRepository;
import com.capitalone.dashboard.repository.TestResultCollectorRepository;
import com.capitalone.dashboard.repository.TestResultRepository;
import com.capitalone.dashboard.util.FeatureCollectorConstants;
import com.google.common.collect.Lists;
import org.slf4j.LoggerFactory;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.Collection;
import java.util.HashSet;
import java.util.stream.Collectors;
public class TestExecutionClientImpl implements TestExecutionClient {
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(TestExecutionClientImpl.class);
private final TestResultSettings testResultSettings;
private final TestResultRepository testResultRepository;
private final TestResultCollectorRepository testResultCollectorRepository;
private final FeatureRepository featureRepository;
private final CollectorItemRepository collectorItemRepository;
private JiraXRayRestClientImpl restClient;
private final JiraXRayRestClientSupplier restClientSupplier;
private List<TestCase> testCases = new ArrayList<>();
public TestExecutionClientImpl(TestResultRepository testResultRepository, TestResultCollectorRepository testResultCollectorRepository,
FeatureRepository featureRepository, CollectorItemRepository collectorItemRepository,
TestResultSettings testResultSettings, JiraXRayRestClientSupplier restClientSupplier) {
this.testResultRepository = testResultRepository;
this.testResultCollectorRepository = testResultCollectorRepository;
this.featureRepository = featureRepository;
this.testResultSettings = testResultSettings;
this.restClientSupplier = restClientSupplier;
this.collectorItemRepository = collectorItemRepository;
}
public void setTestCases(List<TestCase> testCases) {
this.testCases = testCases;
}
private enum TEST_STATUS_COUNT_ATTRIBUTES {
PASS_COUNT, FAIL_COUNT, SKIP_COUNT, UNKNOWN_COUNT
}
private enum TEST_STEP_STATUS_COUNT_ATTRIBUTES {
PASSSTEP_COUNT, FAILSTEP_COUNT, SKIPSTEP_COUNT, UNKNOWNSTEP_COUNT
}
/**
* Updates the test result information in MongoDB with Pagination. pageSize is defined in properties
*
* @return
*/
public int updateTestResultInformation() {
int count = 0;
int pageSize = testResultSettings.getPageSize();
boolean hasMore = true;
List<Feature> testExecutions = featureRepository.getStoryByType("Test Execution");
List<Feature> manualTestExecutions = this.getManualTestExecutions(testExecutions);
for (int i = 0; hasMore; i += 1) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Obtaining story information starting at index " + i + "...");
}
long queryStart = System.currentTimeMillis();
List<Feature> pagedTestExecutions = this.getTestExecutions(manualTestExecutions, i, pageSize);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Story information query took " + (System.currentTimeMillis() - queryStart) + " ms");
}
if (manualTestExecutions != null && !manualTestExecutions.isEmpty()) {
updateMongoInfo(pagedTestExecutions);
count += pagedTestExecutions.size();
}
LOGGER.info("Loop i " + i + " pageSize " + pagedTestExecutions.size());
// will result in an extra call if number of results == pageSize
// but I would rather do that then complicate the jira client implementation
if (pagedTestExecutions == null || pagedTestExecutions.size() < pageSize) {
hasMore = false;
break;
}
}
return count;
}
/**
* Updates the MongoDB with a JSONArray received from the source system
* back-end with story-based data.
*
* @param currentPagedTestExecutions
* A list response of Jira issues from the source system
*/
private void updateMongoInfo(List<Feature> currentPagedTestExecutions) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Size of paged Jira response: " + (currentPagedTestExecutions == null? 0 : currentPagedTestExecutions.size()));
}
if (currentPagedTestExecutions != null) {
List<TestResult> testResultsToSave = new ArrayList<>();
for (Feature testExec : currentPagedTestExecutions) {
// Set collectoritemid for manual test results
CollectorItem collectorItem = createCollectorItem(testExec);
TestResult testResult = testResultRepository.findByCollectorItemId(collectorItem.getId());
if(testResult == null) {
testResult = new TestResult();
}
testResult.setCollectorItemId(collectorItem.getId());
testResult.setDescription(testExec.getsName());
testResult.setTargetAppName(testExec.getsProjectName());
testResult.setType(TestSuiteType.Manual);
try {
TestExecution testExecution = new TestExecution(new URI(testExec.getsUrl()), testExec.getsNumber(), Long.parseLong(testExec.getsId()));
testResult.setUrl(testExecution.getSelf().toString());
restClient = (JiraXRayRestClientImpl) restClientSupplier.get();
Iterable<TestExecution.Test> tests = restClient.getTestExecutionClient().getTests(testExecution).claim();
if (tests != null) {
int totalCount = (int) tests.spliterator().getExactSizeIfKnown();
Map<String,Integer> testCountByStatus = this.getTestCountStatusMap(testExec, tests);
int failCount = testCountByStatus.get(TEST_STATUS_COUNT_ATTRIBUTES.FAIL_COUNT.name());
int passCount = testCountByStatus.get(TEST_STATUS_COUNT_ATTRIBUTES.PASS_COUNT.name());
List<TestCapability> capabilities = new ArrayList<>();
TestCapability capability = new TestCapability();
capability.setDescription(testExec.getsName());
capability.setTotalTestSuiteCount(1);
capability.setType(TestSuiteType.Manual);
List<TestSuite> testSuites = new ArrayList<>();
TestSuite testSuite = new TestSuite();
testSuite.setDescription(testExec.getsName());
testSuite.setType(TestSuiteType.Manual);
testSuite.setTotalTestCaseCount(totalCount);
testSuite.setFailedTestCaseCount(failCount);
testSuite.setSuccessTestCaseCount(passCount);
int skipCount = totalCount - (failCount + passCount);
testSuite.setSkippedTestCaseCount(skipCount);
if(failCount > 0) {
capability.setStatus(TestCaseStatus.Failure);
testResult.setResultStatus("Failure");
testSuite.setStatus(TestCaseStatus.Failure);
testResult.setFailureCount(1);
capability.setFailedTestSuiteCount(1);
} else if (totalCount == passCount){
capability.setStatus(TestCaseStatus.Success);
testResult.setResultStatus("Success");
testSuite.setStatus(TestCaseStatus.Success);
testResult.setSuccessCount(1);
capability.setSuccessTestSuiteCount(1);
} else {
capability.setStatus(TestCaseStatus.Skipped);
testResult.setResultStatus("Skipped");
testSuite.setStatus(TestCaseStatus.Skipped);
testResult.setSkippedCount(1);
capability.setSkippedTestSuiteCount(1);
}
testSuite.setTestCases(this.getTestCases());
testSuites.add(testSuite);
capability.setTestSuites(testSuites);
capabilities.add(capability);
testResult.setTestCapabilities(capabilities);
}
} catch (URISyntaxException u) {
LOGGER.error("URI Syntax Invalid");
}
testResultsToSave.add(testResult);
}
// Saving back to MongoDB
testResultRepository.save(testResultsToSave);
}
}
/**
* Get the test cases for a test suite
*
* @return testCases
*/
private List<TestCase> getTestCases() {
return this.testCases;
}
/**
* Gets the test steps for a test case
*
* @param testRun
* @return
*/
private List<TestCaseStep> getTestSteps(TestRun testRun) {
List<TestCaseStep> testSteps = new ArrayList<>();
for (TestStep testStep : testRun.getSteps()) {
TestCaseStep testCaseStep = new TestCaseStep();
testCaseStep.setId(testStep.getId().toString());
testCaseStep.setDescription(testStep.getStep().getRaw());
if (testStep.getStatus().toString().equals("PASS")) {
testCaseStep.setStatus(TestCaseStatus.Success);
} else if (testStep.getStatus().toString().equals("FAIL")) {
testCaseStep.setStatus(TestCaseStatus.Failure);
} else {
testCaseStep.setStatus(TestCaseStatus.Skipped);
}
testSteps.add(testCaseStep);
}
return testSteps;
}
/**
* Gets the test cases count map based on the status {pass, fail, skip & unknown}
*
* @param testExec
* @param tests
* @return
*/
private Map<String,Integer> getTestCountStatusMap(Feature testExec, Iterable<TestExecution.Test> tests) {
Map<String,Integer> map = new HashMap<String, Integer>(TEST_STATUS_COUNT_ATTRIBUTES.values().length);
int failTestCount = 0, passTestCount = 0, skipTestCount = 0, unknownTestCount = 0;
List<TestCase> testCases = new ArrayList<>();
for(TestExecution.Test test : tests){
Optional<TestRun> testRunOpt = Optional.ofNullable(restClient.getTestRunClient().getTestRun(testExec.getsNumber(), test.getKey()).claim());
if(testRunOpt.isPresent()){
TestRun testRun = testRunOpt.get();
if(testRun.getStatus().equals(TestRun.Status.FAIL)){
failTestCount++;
}else if(testRun.getStatus().equals(TestRun.Status.PASS)){
passTestCount++;
}else if (testRun.getStatus().equals(TestRun.Status.SKIP)){
skipTestCount++;
}else{
unknownTestCount++;
}
TestCase testCase = createTestCase(test, testRun, testExec);
testCases.add(testCase);
}
}
this.setTestCases(testCases);
map.put(TEST_STATUS_COUNT_ATTRIBUTES.PASS_COUNT.name(), passTestCount);
map.put(TEST_STATUS_COUNT_ATTRIBUTES.FAIL_COUNT.name(), failTestCount);
map.put(TEST_STATUS_COUNT_ATTRIBUTES.SKIP_COUNT.name(), skipTestCount);
map.put(TEST_STATUS_COUNT_ATTRIBUTES.UNKNOWN_COUNT.name(), unknownTestCount);
return map;
}
// This method needs a core project update, so temporarily warnings suppressed
@SuppressWarnings("PMD")
private TestCase createTestCase(TestExecution.Test test, TestRun testRun, Feature feature) {
TestCase testCase = new TestCase();
testCase.setId(testRun.getId().toString());
testCase.setDescription(test.toString());
Optional<Iterable<TestStep>> testStepsOpt = Optional.ofNullable(testRun.getSteps());
if (testStepsOpt.isPresent()) {
int totalSteps = (int) testRun.getSteps().spliterator().getExactSizeIfKnown();
Map<String, Integer> stepCountByStatus = this.getStepCountStatusMap(testRun);
int failSteps = stepCountByStatus.get(TEST_STEP_STATUS_COUNT_ATTRIBUTES.FAILSTEP_COUNT.name());
int passSteps = stepCountByStatus.get(TEST_STEP_STATUS_COUNT_ATTRIBUTES.PASSSTEP_COUNT.name());
int skipSteps = stepCountByStatus.get(TEST_STEP_STATUS_COUNT_ATTRIBUTES.SKIPSTEP_COUNT.name());
int unknownSteps = stepCountByStatus.get(TEST_STEP_STATUS_COUNT_ATTRIBUTES.UNKNOWNSTEP_COUNT.name());
testCase.setTotalTestStepCount(totalSteps);
testCase.setFailedTestStepCount(failSteps);
testCase.setSuccessTestStepCount(passSteps);
testCase.setSkippedTestStepCount(skipSteps);
testCase.setUnknownStatusCount(unknownSteps);
if (failSteps > 0) {
testCase.setStatus(TestCaseStatus.Failure);
} else if (skipSteps > 0) {
testCase.setStatus(TestCaseStatus.Skipped);
} else if (passSteps > 0) {
testCase.setStatus(TestCaseStatus.Success);
} else {
testCase.setStatus(TestCaseStatus.Unknown);
}
Set<String> tags = getStoryIds(feature.getIssueLinks());
// Temporarily commented for core project update
// testCase.setTags(tags);
testCase.setTestSteps(this.getTestSteps(testRun));
}
return testCase;
}
private Set<String> getStoryIds(Collection<FeatureIssueLink> issueLinks) {
Set<String> tags = new HashSet<>();
issueLinks.forEach(issueLink -> tags.add(issueLink.getTargetIssueKey()));
return tags;
}
/**
* Gets the test step count map based on the status
*
* @param testRun
* @return
*/
private Map<String,Integer> getStepCountStatusMap(TestRun testRun) {
Map<String,Integer> map = new HashMap<>(TEST_STEP_STATUS_COUNT_ATTRIBUTES.values().length);
int failStepCount = 0, passStepCount = 0, skipStepCount = 0, unknownStepCount = 0;
List<TestStep> testSteps = Lists.newArrayList(testRun.getSteps());
passStepCount = testSteps.stream().filter(testStep -> testStep.getStatus().equals(TestStep.Status.PASS)).collect(Collectors.toList()).size();
failStepCount = testSteps.stream().filter(testStep -> testStep.getStatus().equals(TestStep.Status.FAIL)).collect(Collectors.toList()).size();
skipStepCount = testSteps.stream().filter(testStep -> testStep.getStatus().equals(TestStep.Status.SKIP)).collect(Collectors.toList()).size();
unknownStepCount = testSteps.stream().filter(testStep -> !testStep.getStatus().equals(TestStep.Status.PASS) ||
!testStep.getStatus().equals(TestStep.Status.FAIL) || testStep.getStatus().equals(TestStep.Status.SKIP)
).collect(Collectors.toList()).size();
map.put(TEST_STEP_STATUS_COUNT_ATTRIBUTES.FAILSTEP_COUNT.name(), failStepCount);
map.put(TEST_STEP_STATUS_COUNT_ATTRIBUTES.PASSSTEP_COUNT.name(), passStepCount);
map.put(TEST_STEP_STATUS_COUNT_ATTRIBUTES.SKIPSTEP_COUNT.name(), skipStepCount);
map.put(TEST_STEP_STATUS_COUNT_ATTRIBUTES.UNKNOWNSTEP_COUNT.name(), unknownStepCount);
return map;
}
/**
* Gets test executions with pagination
*
* @param sourceList
* @param page
* @param pageSize
* @return
*/
public List<Feature> getTestExecutions(List<Feature> sourceList, int page, int pageSize) {
if(pageSize <= 0 || page < 0) {
throw new IllegalArgumentException("invalid page size: " + pageSize);
}
int fromIndex = page * pageSize;
if(sourceList == null || sourceList.size() < fromIndex){
return Collections.emptyList();
}
return sourceList.subList(fromIndex, Math.min(fromIndex + pageSize, sourceList.size()));
}
/**
* Filters all the manual test executions
*
* @param testExecutions
* @return
*/
public List<Feature> getManualTestExecutions(List<Feature> testExecutions) {
List<Feature> manualTestExecutions = new ArrayList<>();
String[] automationKeywords = {"automated", "automation"};
for (Feature testExecution : testExecutions) {
if (!Arrays.stream(automationKeywords).parallel().anyMatch(testExecution.getsName().toLowerCase()::contains)) {
manualTestExecutions.add(testExecution);
}
}
return manualTestExecutions;
}
/**
* Retrieves the maximum change date for a given query.
*
* @return A list object of the maximum change date
*/
public String getMaxChangeDate() {
String data = null;
try {
List<Feature> response = featureRepository
.findTopByCollectorIdAndChangeDateGreaterThanOrderByChangeDateDesc(
testResultCollectorRepository.findByName(FeatureCollectorConstants.JIRA_XRAY).getId(),
testResultSettings.getDeltaStartDate());
if ((response != null) && !response.isEmpty()) {
data = response.get(0).getChangeDate();
}
} catch (Exception e) {
LOGGER.error("There was a problem retrieving or parsing data from the local "
+ "repository while retrieving a max change date\nReturning null", e);
}
return data;
}
private CollectorItem createCollectorItem(Feature testExec) {
List<TestResultCollector> collector = testResultCollectorRepository.findByCollectorTypeAndName(CollectorType.Test, "Jira XRay");
TestResultCollector collector1 = collector.get(0);
CollectorItem existing = collectorItemRepository.findByCollectorIdNiceNameAndJobName(collector1.getId(), "Manual", testExec.getsName());
CollectorItem tempCollItem = new CollectorItem();
Optional<CollectorItem> optionalCollectorItem = Optional.ofNullable(existing);
if(optionalCollectorItem.isPresent()) {
tempCollItem.setId(existing.getId());
}else {
tempCollItem.setCollectorId(collector1.getId());
tempCollItem.setDescription("JIRAXRay:" + testExec.getsName());
tempCollItem.setPushed(true);
tempCollItem.setLastUpdated(System.currentTimeMillis());
tempCollItem.setNiceName("Manual");
Map<String, Object> option = new HashMap<>();
option.put("jobName", testExec.getsName());
option.put("instanceUrl", testExec.getsUrl());
tempCollItem.getOptions().putAll(option);
collectorItemRepository.save(tempCollItem);
}
return tempCollItem;
}
}
| |
/*
* Copyright (C) 2018 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.iot.cbor;
import java.io.*;
import java.nio.BufferUnderflowException;
import java.util.Locale;
import java.util.NoSuchElementException;
import java.util.logging.Logger;
class CborReaderImpl implements CborReader {
private static final boolean DEBUG = true;
private static final Logger LOGGER = Logger.getLogger(CborReader.class.getCanonicalName());
static final int UNSPECIFIED = -1;
private static final byte BREAK = (byte) 0xFF;
private final DecoderStream mDecoderStream;
private int mRemainingObjects;
private int mLastTag = CborTag.UNTAGGED;
private CborReaderImpl(DecoderStream decoderStream, int objectCount) {
mDecoderStream = decoderStream;
mRemainingObjects = objectCount;
}
CborReaderImpl(InputStream inputStream, int objectCount) {
this(DecoderStream.create(inputStream), objectCount);
}
CborReaderImpl(byte[] bytes, int offset, int objectCount) {
this(new ByteArrayInputStream(bytes, offset, bytes.length - offset), objectCount);
if (offset >= bytes.length) {
throw new IndexOutOfBoundsException();
}
}
@Override
public boolean hasRemainingDataItems() {
try {
if (mRemainingObjects < 0) {
return mDecoderStream.hasRemaining() && (mDecoderStream.peek() != BREAK);
}
return mRemainingObjects != 0;
} catch (IOException x) {
x.printStackTrace();
// We say true here so that we will call readDataItem() and get the exception
return true;
}
}
@Override
public long bytesParsed() {
return mDecoderStream.bytesParsed();
}
@Override
public CborObject readDataItem() throws CborParseException, IOException {
if (!hasRemainingDataItems()) {
throw new NoSuchElementException();
}
int tag = mLastTag;
mLastTag = CborTag.UNTAGGED;
try {
byte firstByte = mDecoderStream.get();
int majorType = ((firstByte & 0xFF) >> 5);
byte additionalInfo = (byte) (firstByte & 0x1F);
long additionalData;
if (additionalInfo < CborObject.ADDITIONAL_INFO_EXTRA_1B) {
additionalData = additionalInfo;
} else if (additionalInfo == CborObject.ADDITIONAL_INFO_EXTRA_1B) {
additionalData = (mDecoderStream.get() & 0xFF);
} else if (additionalInfo == CborObject.ADDITIONAL_INFO_EXTRA_2B) {
additionalData = mDecoderStream.getShort() & 0xFFFF;
} else if (additionalInfo == CborObject.ADDITIONAL_INFO_EXTRA_4B) {
additionalData = mDecoderStream.getInt() & 0xFFFFFFFFL;
} else if (additionalInfo == CborObject.ADDITIONAL_INFO_EXTRA_8B) {
additionalData = mDecoderStream.getLong();
// We perform an overflow check here by checking for negative values.
// We don't currently support the full use of 64 bit unsigned integers,
// so any number larger than Long.MAX_VALUE will ultimately be wrapped
// around to be negative. This check identifies such cases and errors
// out, EXCEPT when we are a double-precision float.
// <https://github.com/google/cbortree/issues/1>
if (additionalData < 0 && majorType != CborMajorType.OTHER) {
final String explanation =
String.format(
Locale.ENGLISH,
"Additional data value was too large: 0x%X",
additionalData);
if (majorType == CborMajorType.TAG) {
// If this was a tag, then we can simply ignore it.
LOGGER.warning(explanation + ", ignoring tag");
additionalData = CborTag.UNTAGGED;
} else {
LOGGER.warning(explanation + ", stopping");
throw new CborParseException(explanation);
}
}
} else if (additionalInfo == CborObject.ADDITIONAL_INFO_EXTRA_INDEF) {
additionalData = UNSPECIFIED;
} else {
throw new CborParseException(
"Undefined additional info value "
+ additionalInfo
+ " for major type "
+ majorType);
}
switch (majorType) {
case CborMajorType.TAG:
if (CborTag.isValid(additionalData)) {
mLastTag = (int) additionalData;
} else {
LOGGER.warning("Ignoring invalid tag: " + additionalData);
}
return readDataItem();
case CborMajorType.POS_INTEGER:
if (additionalData < 0) {
throw new CborParseException();
} else {
if (mRemainingObjects != UNSPECIFIED) mRemainingObjects--;
return CborInteger.create(additionalData, tag);
}
case CborMajorType.NEG_INTEGER:
if (additionalData < 0) {
throw new CborParseException();
} else {
if (mRemainingObjects != UNSPECIFIED) mRemainingObjects--;
return CborInteger.create(-1 - additionalData, tag);
}
case CborMajorType.BYTE_STRING:
if (additionalData < 0) {
ByteArrayOutputStream aggregator = new ByteArrayOutputStream();
CborReaderImpl subparser =
new CborReaderImpl(mDecoderStream, (int) additionalData);
while (subparser.hasRemainingDataItems()) {
CborObject obj = subparser.readDataItem();
if (obj instanceof CborByteString
&& obj.getMajorType() == CborMajorType.BYTE_STRING) {
aggregator.write(((CborByteString) obj).byteArrayValue());
} else {
throw new CborParseException(
"Unexpected major type in byte string stream");
}
}
if (mRemainingObjects != UNSPECIFIED) mRemainingObjects--;
if (mDecoderStream.get() != BREAK) {
throw new CborParseException("Missing break");
}
return CborByteString.create(
aggregator.toByteArray(), 0, aggregator.size(), tag);
} else {
byte[] bytes = new byte[(int) additionalData];
mDecoderStream.get(bytes);
if (mRemainingObjects != UNSPECIFIED) mRemainingObjects--;
return CborByteString.create(bytes, 0, bytes.length, tag);
}
case CborMajorType.TEXT_STRING:
if (additionalData < 0) {
ByteArrayOutputStream aggregator = new ByteArrayOutputStream();
CborReaderImpl subparser =
new CborReaderImpl(mDecoderStream, (int) additionalData);
while (subparser.hasRemainingDataItems()) {
CborObject obj = subparser.readDataItem();
if (obj instanceof CborTextString) {
aggregator.write(((CborTextString) obj).byteArrayValue());
} else {
throw new CborParseException(
"Unexpected major type in text string stream");
}
}
if (mRemainingObjects != UNSPECIFIED) mRemainingObjects--;
if (mDecoderStream.get() != BREAK) {
throw new CborParseException("Missing break");
}
return CborTextString.create(
aggregator.toByteArray(), 0, aggregator.size(), tag);
} else {
byte[] bytes = new byte[(int) additionalData];
mDecoderStream.get(bytes);
if (mRemainingObjects != UNSPECIFIED) mRemainingObjects--;
return CborTextString.create(bytes, 0, bytes.length, tag);
}
case CborMajorType.ARRAY:
{
CborArray ret = CborArray.create(tag);
CborReaderImpl subparser =
new CborReaderImpl(mDecoderStream, (int) additionalData);
while (subparser.hasRemainingDataItems()) {
ret.add(subparser.readDataItem());
}
if (mRemainingObjects != UNSPECIFIED) mRemainingObjects--;
if ((additionalData == UNSPECIFIED && mDecoderStream.get() != BREAK)) {
throw new CborParseException("Missing break");
}
return ret;
}
case CborMajorType.MAP:
{
CborMap ret = CborMap.create(tag);
if (additionalData != UNSPECIFIED) {
additionalData *= 2;
}
CborReaderImpl subparser =
new CborReaderImpl(mDecoderStream, (int) additionalData);
while (subparser.hasRemainingDataItems()) {
CborObject key = subparser.readDataItem();
CborObject value = subparser.readDataItem();
ret.mapValue().put(key, value);
}
if ((additionalData == UNSPECIFIED) && mDecoderStream.get() != BREAK) {
throw new CborParseException("Missing break");
}
if (mRemainingObjects != UNSPECIFIED) mRemainingObjects--;
return ret;
}
case CborMajorType.OTHER:
if (additionalInfo == CborFloat.TYPE_HALF) {
// Half-precision float
if (mRemainingObjects != UNSPECIFIED) mRemainingObjects--;
return CborFloat.createHalf(
Half.shortBitsToFloat((short) additionalData), tag);
} else if (additionalInfo == CborFloat.TYPE_FLOAT) {
// Full-precision float
if (mRemainingObjects != UNSPECIFIED) mRemainingObjects--;
return CborFloat.create(Float.intBitsToFloat((int) additionalData), tag);
} else if (additionalInfo == CborFloat.TYPE_DOUBLE) {
// Double-precision float
if (mRemainingObjects != UNSPECIFIED) mRemainingObjects--;
return CborFloat.create(Double.longBitsToDouble(additionalData), tag);
} else {
if (mRemainingObjects != UNSPECIFIED) mRemainingObjects--;
return CborSimple.create((int) additionalData, tag);
}
default:
throw new CborParseException("Invalid major type value " + majorType);
}
} catch (EOFException
| BufferUnderflowException
| NoSuchElementException
| IllegalArgumentException x) {
throw new CborParseException("CBOR data is truncated or corrupt", x);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import org.apache.logging.log4j.Logger;
import org.apache.geode.CancelException;
import org.apache.geode.DataSerializer;
import org.apache.geode.SystemFailure;
import org.apache.geode.cache.CacheClosedException;
import org.apache.geode.cache.CacheException;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.execute.Function;
import org.apache.geode.cache.execute.FunctionException;
import org.apache.geode.cache.execute.FunctionInvocationTargetException;
import org.apache.geode.cache.execute.FunctionService;
import org.apache.geode.cache.execute.ResultSender;
import org.apache.geode.cache.query.QueryException;
import org.apache.geode.distributed.internal.ClusterDistributionManager;
import org.apache.geode.distributed.internal.DistributionManager;
import org.apache.geode.distributed.internal.DistributionMessage;
import org.apache.geode.distributed.internal.InternalDistributedSystem;
import org.apache.geode.distributed.internal.MessageWithReply;
import org.apache.geode.distributed.internal.OperationExecutors;
import org.apache.geode.distributed.internal.ReplyException;
import org.apache.geode.distributed.internal.ReplyMessage;
import org.apache.geode.distributed.internal.ReplyProcessor21;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.internal.InternalDataSerializer;
import org.apache.geode.internal.cache.execute.FunctionContextImpl;
import org.apache.geode.internal.cache.execute.MemberFunctionResultSender;
import org.apache.geode.internal.cache.execute.MultiRegionFunctionContextImpl;
import org.apache.geode.internal.cache.execute.metrics.FunctionStats;
import org.apache.geode.internal.cache.execute.metrics.FunctionStatsManager;
import org.apache.geode.internal.serialization.DeserializationContext;
import org.apache.geode.internal.serialization.SerializationContext;
import org.apache.geode.logging.internal.log4j.api.LogService;
public class MemberFunctionStreamingMessage extends DistributionMessage
implements TransactionMessage, MessageWithReply {
private static final Logger logger = LogService.getLogger();
transient int replyMsgNum = 0;
transient boolean replyLastMsg;
private Function functionObject;
private String functionName;
Object args;
private int processorId;
private int txUniqId = TXManagerImpl.NOTX;
private InternalDistributedMember txMemberId = null;
private boolean isFnSerializationReqd;
private Set<String> regionPathSet;
private boolean isReExecute;
private static final short IS_REEXECUTE = UNRESERVED_FLAGS_START;
public MemberFunctionStreamingMessage() {}
public MemberFunctionStreamingMessage(Function function, int procId, Object ar,
boolean isFnSerializationReqd, boolean isReExecute) {
this.functionObject = function;
this.processorId = procId;
this.args = ar;
this.isFnSerializationReqd = isFnSerializationReqd;
this.isReExecute = isReExecute;
this.txUniqId = TXManagerImpl.getCurrentTXUniqueId();
TXStateProxy txState = TXManagerImpl.getCurrentTXState();
if (txState != null && txState.isMemberIdForwardingRequired()) {
this.txMemberId = txState.getOriginatingMember();
}
}
// For Multi region function execution
public MemberFunctionStreamingMessage(Function function, int procId, Object ar,
boolean isFnSerializationReqd, Set<String> regions, boolean isReExecute) {
this.functionObject = function;
this.processorId = procId;
this.args = ar;
this.isFnSerializationReqd = isFnSerializationReqd;
this.regionPathSet = regions;
this.isReExecute = isReExecute;
this.txUniqId = TXManagerImpl.getCurrentTXUniqueId();
TXStateProxy txState = TXManagerImpl.getCurrentTXState();
if (txState != null && txState.isMemberIdForwardingRequired()) {
this.txMemberId = txState.getOriginatingMember();
}
}
public MemberFunctionStreamingMessage(DataInput in) throws IOException, ClassNotFoundException {
fromData(in, InternalDataSerializer.createDeserializationContext(in));
}
private TXStateProxy prepForTransaction(ClusterDistributionManager dm)
throws InterruptedException {
if (this.txUniqId == TXManagerImpl.NOTX) {
return null;
} else {
InternalCache cache = dm.getCache();
if (cache == null) {
// ignore and return, we are shutting down!
return null;
}
TXManagerImpl mgr = cache.getTXMgr();
return mgr.masqueradeAs(this);
}
}
private void cleanupTransaction(TXStateProxy tx) {
if (this.txUniqId != TXManagerImpl.NOTX) {
InternalCache cache = GemFireCacheImpl.getInstance();
if (cache == null) {
// ignore and return, we are shutting down!
return;
}
TXManagerImpl mgr = cache.getTXMgr();
mgr.unmasquerade(tx);
}
}
@Override
protected void process(final ClusterDistributionManager dm) {
Throwable thr = null;
ReplyException rex = null;
if (this.functionObject == null) {
rex = new ReplyException(
new FunctionException(
String.format("Function named %s is not registered to FunctionService",
this.functionName)));
replyWithException(dm, rex);
return;
}
FunctionStats stats =
FunctionStatsManager.getFunctionStats(this.functionObject.getId(), dm.getSystem());
TXStateProxy tx = null;
InternalCache cache = dm.getCache();
long start = 0;
boolean startedFunctionExecution = false;
try {
tx = prepForTransaction(dm);
ResultSender resultSender = new MemberFunctionResultSender(dm, this, this.functionObject);
Set<Region> regions = new HashSet<Region>();
if (this.regionPathSet != null) {
for (String regionPath : this.regionPathSet) {
if (checkCacheClosing(dm) || checkDSClosing(dm)) {
if (dm.getCache() == null) {
thr = new CacheClosedException(
String.format("Remote cache is closed: %s",
dm.getId()));
} else {
dm.getCache().getCacheClosedException(
String.format("Remote cache is closed: %s",
dm.getId()));
}
return;
}
regions.add(cache.getRegion(regionPath));
}
}
FunctionContextImpl context = new MultiRegionFunctionContextImpl(cache,
this.functionObject.getId(), this.args, resultSender, regions, isReExecute);
start = stats.startFunctionExecution(this.functionObject.hasResult());
startedFunctionExecution = true;
if (logger.isDebugEnabled()) {
logger.debug("Executing Function: {} on remote member with context: {}",
this.functionObject.getId(), context.toString());
}
this.functionObject.execute(context);
if (!this.replyLastMsg && this.functionObject.hasResult()) {
throw new FunctionException(
String.format("The function, %s, did not send last result",
functionObject.getId()));
}
stats.endFunctionExecution(start, this.functionObject.hasResult());
} catch (FunctionException functionException) {
if (logger.isDebugEnabled()) {
logger.debug("FunctionException occurred on remote member while executing Function: {}",
this.functionObject.getId(), functionException);
}
if (startedFunctionExecution) {
stats.endFunctionExecutionWithException(start, this.functionObject.hasResult());
}
rex = new ReplyException(functionException);
replyWithException(dm, rex);
// thr = functionException.getCause();
} catch (CancelException exception) {
// bug 37026: this is too noisy...
// throw new CacheClosedException("remote system shutting down");
// thr = se; cache is closed, no point trying to send a reply
thr = new FunctionInvocationTargetException(exception);
if (startedFunctionExecution) {
stats.endFunctionExecutionWithException(start, this.functionObject.hasResult());
}
rex = new ReplyException(thr);
replyWithException(dm, rex);
} catch (Exception exception) {
if (logger.isDebugEnabled()) {
logger.debug("Exception occurred on remote member while executing Function: {}",
this.functionObject.getId(), exception);
}
if (startedFunctionExecution) {
stats.endFunctionExecutionWithException(start, this.functionObject.hasResult());
}
rex = new ReplyException(exception);
replyWithException(dm, rex);
// thr = e.getCause();
} catch (VirtualMachineError err) {
SystemFailure.initiateFailure(err);
// If this ever returns, rethrow the error. We're poisoned
// now, so don't let this thread continue.
throw err;
} catch (Throwable t) {
// Whenever you catch Error or Throwable, you must also
// catch VirtualMachineError (see above). However, there is
// _still_ a possibility that you are dealing with a cascading
// error condition, so you also need to check to see if the JVM
// is still usable:
SystemFailure.checkFailure();
thr = t;
} finally {
cleanupTransaction(tx);
if (thr != null) {
rex = new ReplyException(thr);
replyWithException(dm, rex);
}
}
}
private void replyWithException(ClusterDistributionManager dm, ReplyException rex) {
ReplyMessage.send(getSender(), this.processorId, rex, dm);
}
@Override
public int getProcessorId() {
return this.processorId;
}
@Override
public int getDSFID() {
return MEMBER_FUNCTION_STREAMING_MESSAGE;
}
@Override
public void fromData(DataInput in,
DeserializationContext context) throws IOException, ClassNotFoundException {
super.fromData(in, context);
short flags = in.readShort();
if ((flags & HAS_PROCESSOR_ID) != 0) {
this.processorId = in.readInt();
ReplyProcessor21.setMessageRPId(this.processorId);
}
if ((flags & HAS_TX_ID) != 0)
this.txUniqId = in.readInt();
if ((flags & HAS_TX_MEMBERID) != 0) {
this.txMemberId = DataSerializer.readObject(in);
}
Object object = DataSerializer.readObject(in);
if (object instanceof String) {
this.isFnSerializationReqd = false;
this.functionObject = FunctionService.getFunction((String) object);
if (this.functionObject == null) {
this.functionName = (String) object;
}
} else {
this.functionObject = (Function) object;
this.isFnSerializationReqd = true;
}
this.args = DataSerializer.readObject(in);
this.regionPathSet = DataSerializer.readObject(in);
this.isReExecute = (flags & IS_REEXECUTE) != 0;
}
@Override
public void toData(DataOutput out,
SerializationContext context) throws IOException {
super.toData(out, context);
short flags = 0;
if (this.processorId != 0)
flags |= HAS_PROCESSOR_ID;
if (this.txUniqId != TXManagerImpl.NOTX)
flags |= HAS_TX_ID;
if (this.txMemberId != null)
flags |= HAS_TX_MEMBERID;
if (this.isReExecute)
flags |= IS_REEXECUTE;
out.writeShort(flags);
if (this.processorId != 0)
out.writeInt(this.processorId);
if (this.txUniqId != TXManagerImpl.NOTX)
out.writeInt(this.txUniqId);
if (this.txMemberId != null)
DataSerializer.writeObject(this.txMemberId, out);
if (this.isFnSerializationReqd) {
DataSerializer.writeObject(this.functionObject, out);
} else {
DataSerializer.writeObject(functionObject.getId(), out);
}
DataSerializer.writeObject(this.args, out);
DataSerializer.writeObject(this.regionPathSet, out);
}
public synchronized boolean sendReplyForOneResult(DistributionManager dm, Object oneResult,
boolean lastResult, boolean sendResultsInOrder)
throws CacheException, QueryException, ForceReattemptException, InterruptedException {
if (this.replyLastMsg) {
return false;
}
if (Thread.interrupted())
throw new InterruptedException();
int msgNum = this.replyMsgNum;
this.replyLastMsg = lastResult;
sendReply(getSender(), this.processorId, dm, oneResult, msgNum, lastResult, sendResultsInOrder);
if (logger.isDebugEnabled()) {
logger.debug("Sending reply message count: {} to co-ordinating node", replyMsgNum);
}
this.replyMsgNum++;
return false;
}
protected void sendReply(InternalDistributedMember member, int procId, DistributionManager dm,
Object oneResult, int msgNum, boolean lastResult, boolean sendResultsInOrder) {
if (sendResultsInOrder) {
FunctionStreamingOrderedReplyMessage.send(member, procId, null, dm, oneResult, msgNum,
lastResult);
} else {
FunctionStreamingReplyMessage.send(member, procId, null, dm, oneResult, msgNum, lastResult);
}
}
@Override
public int getProcessorType() {
return OperationExecutors.REGION_FUNCTION_EXECUTION_EXECUTOR;
}
/**
* check to see if the cache is closing
*/
private boolean checkCacheClosing(ClusterDistributionManager dm) {
InternalCache cache = dm.getCache();
return (cache == null || cache.getCancelCriterion().isCancelInProgress());
}
/**
* check to see if the distributed system is closing
*
* @return true if the distributed system is closing
*/
private boolean checkDSClosing(ClusterDistributionManager dm) {
InternalDistributedSystem ds = dm.getSystem();
return (ds == null || ds.isDisconnecting());
}
@Override
public boolean canStartRemoteTransaction() {
return true;
}
@Override
public int getTXUniqId() {
return this.txUniqId;
}
@Override
public InternalDistributedMember getMemberToMasqueradeAs() {
if (txMemberId == null) {
return getSender();
} else {
return txMemberId;
}
}
@Override
public InternalDistributedMember getTXOriginatorClient() {
return null;
}
@Override
public boolean canParticipateInTransaction() {
return true;
}
@Override
public boolean isTransactionDistributed() {
return false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.compaction;
import java.util.*;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.RateLimiter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.Memtable;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.sstable.Component;
import org.apache.cassandra.io.sstable.SSTableReader;
/**
* Pluggable compaction strategy determines how SSTables get merged.
*
* There are two main goals:
* - perform background compaction constantly as needed; this typically makes a tradeoff between
* i/o done by compaction, and merging done at read time.
* - perform a full (maximum possible) compaction if requested by the user
*/
public abstract class AbstractCompactionStrategy
{
private static final Logger logger = LoggerFactory.getLogger(AbstractCompactionStrategy.class);
protected static final float DEFAULT_TOMBSTONE_THRESHOLD = 0.2f;
// minimum interval needed to perform tombstone removal compaction in seconds, default 86400 or 1 day.
protected static final long DEFAULT_TOMBSTONE_COMPACTION_INTERVAL = 86400;
protected static final String TOMBSTONE_THRESHOLD_OPTION = "tombstone_threshold";
protected static final String TOMBSTONE_COMPACTION_INTERVAL_OPTION = "tombstone_compaction_interval";
protected static final String COMPACTION_ENABLED = "enabled";
public final Map<String, String> options;
protected final ColumnFamilyStore cfs;
protected float tombstoneThreshold;
protected long tombstoneCompactionInterval;
/**
* pause/resume/getNextBackgroundTask must synchronize. This guarantees that after pause completes,
* no new tasks will be generated; or put another way, pause can't run until in-progress tasks are
* done being created.
*
* This allows runWithCompactionsDisabled to be confident that after pausing, once in-progress
* tasks abort, it's safe to proceed with truncate/cleanup/etc.
*
* See CASSANDRA-3430
*/
protected boolean isActive = false;
protected volatile boolean enabled = true;
protected AbstractCompactionStrategy(ColumnFamilyStore cfs, Map<String, String> options)
{
assert cfs != null;
this.cfs = cfs;
this.options = options;
/* checks must be repeated here, as user supplied strategies might not call validateOptions directly */
try
{
validateOptions(options);
String optionValue = options.get(TOMBSTONE_THRESHOLD_OPTION);
tombstoneThreshold = optionValue == null ? DEFAULT_TOMBSTONE_THRESHOLD : Float.parseFloat(optionValue);
optionValue = options.get(TOMBSTONE_COMPACTION_INTERVAL_OPTION);
tombstoneCompactionInterval = optionValue == null ? DEFAULT_TOMBSTONE_COMPACTION_INTERVAL : Long.parseLong(optionValue);
optionValue = options.get(COMPACTION_ENABLED);
if (optionValue != null)
{
if (optionValue.equalsIgnoreCase("false"))
this.enabled = false;
}
}
catch (ConfigurationException e)
{
logger.warn("Error setting compaction strategy options ({}), defaults will be used", e.getMessage());
tombstoneThreshold = DEFAULT_TOMBSTONE_THRESHOLD;
tombstoneCompactionInterval = DEFAULT_TOMBSTONE_COMPACTION_INTERVAL;
}
}
/**
* For internal, temporary suspension of background compactions so that we can do exceptional
* things like truncate or major compaction
*/
public synchronized void pause()
{
isActive = false;
}
/**
* For internal, temporary suspension of background compactions so that we can do exceptional
* things like truncate or major compaction
*/
public synchronized void resume()
{
isActive = true;
}
/**
* Performs any extra initialization required
*/
public void startup()
{
isActive = true;
}
/**
* Releases any resources if this strategy is shutdown (when the CFS is reloaded after a schema change).
*/
public void shutdown()
{
isActive = false;
}
/**
* @param gcBefore throw away tombstones older than this
*
* @return the next background/minor compaction task to run; null if nothing to do.
*
* Is responsible for marking its sstables as compaction-pending.
*/
public abstract AbstractCompactionTask getNextBackgroundTask(final int gcBefore);
/**
* @param gcBefore throw away tombstones older than this
*
* @return a compaction task that should be run to compact this columnfamilystore
* as much as possible. Null if nothing to do.
*
* Is responsible for marking its sstables as compaction-pending.
*/
public abstract AbstractCompactionTask getMaximalTask(final int gcBefore);
/**
* @param sstables SSTables to compact. Must be marked as compacting.
* @param gcBefore throw away tombstones older than this
*
* @return a compaction task corresponding to the requested sstables.
* Will not be null. (Will throw if user requests an invalid compaction.)
*
* Is responsible for marking its sstables as compaction-pending.
*/
public abstract AbstractCompactionTask getUserDefinedTask(Collection<SSTableReader> sstables, final int gcBefore);
/**
* @return the number of background tasks estimated to still be needed for this columnfamilystore
*/
public abstract int getEstimatedRemainingTasks();
/**
* @return size in bytes of the largest sstables for this strategy
*/
public abstract long getMaxSSTableBytes();
public boolean isEnabled()
{
return this.enabled && this.isActive;
}
public void enable()
{
this.enabled = true;
}
public void disable()
{
this.enabled = false;
}
/**
* @return whether or not MeteredFlusher should be able to trigger memtable flushes for this CF.
*/
public boolean isAffectedByMeteredFlusher()
{
return true;
}
/**
* Handle a flushed memtable.
*
* @param memtable the flushed memtable
* @param sstable the written sstable. can be null if the memtable was clean.
*/
public void replaceFlushed(Memtable memtable, SSTableReader sstable)
{
cfs.getDataTracker().replaceFlushed(memtable, sstable);
if (sstable != null)
CompactionManager.instance.submitBackground(cfs);
}
/**
* @return a subset of the suggested sstables that are relevant for read requests.
*/
public List<SSTableReader> filterSSTablesForReads(List<SSTableReader> sstables)
{
return sstables;
}
/**
* Filters SSTables that are to be blacklisted from the given collection
*
* @param originalCandidates The collection to check for blacklisted SSTables
* @return list of the SSTables with blacklisted ones filtered out
*/
public static Iterable<SSTableReader> filterSuspectSSTables(Iterable<SSTableReader> originalCandidates)
{
return Iterables.filter(originalCandidates, new Predicate<SSTableReader>()
{
public boolean apply(SSTableReader sstable)
{
return !sstable.isMarkedSuspect();
}
});
}
/**
* Returns a list of KeyScanners given sstables and a range on which to scan.
* The default implementation simply grab one SSTableScanner per-sstable, but overriding this method
* allow for a more memory efficient solution if we know the sstable don't overlap (see
* LeveledCompactionStrategy for instance).
*/
public List<ICompactionScanner> getScanners(Collection<SSTableReader> sstables, Range<Token> range)
{
RateLimiter limiter = CompactionManager.instance.getRateLimiter();
ArrayList<ICompactionScanner> scanners = new ArrayList<ICompactionScanner>();
for (SSTableReader sstable : sstables)
scanners.add(sstable.getScanner(range, limiter));
return scanners;
}
public List<ICompactionScanner> getScanners(Collection<SSTableReader> toCompact)
{
return getScanners(toCompact, null);
}
/**
* Check if given sstable is worth dropping tombstones at gcBefore.
* Check is skipped if tombstone_compaction_interval time does not elapse since sstable creation and returns false.
*
* @param sstable SSTable to check
* @param gcBefore time to drop tombstones
* @return true if given sstable's tombstones are expected to be removed
*/
protected boolean worthDroppingTombstones(SSTableReader sstable, int gcBefore)
{
// since we use estimations to calculate, there is a chance that compaction will not drop tombstones actually.
// if that happens we will end up in infinite compaction loop, so first we check enough if enough time has
// elapsed since SSTable created.
if (System.currentTimeMillis() < sstable.getCreationTimeFor(Component.DATA) + tombstoneCompactionInterval * 1000)
return false;
double droppableRatio = sstable.getEstimatedDroppableTombstoneRatio(gcBefore);
if (droppableRatio <= tombstoneThreshold)
return false;
Set<SSTableReader> overlaps = cfs.getOverlappingSSTables(Collections.singleton(sstable));
if (overlaps.isEmpty())
{
// there is no overlap, tombstones are safely droppable
return true;
}
else if (CompactionController.getFullyExpiredSSTables(cfs, Collections.singleton(sstable), overlaps, gcBefore).size() > 0)
{
return true;
}
else
{
// what percentage of columns do we expect to compact outside of overlap?
if (sstable.getIndexSummarySize() < 2)
{
// we have too few samples to estimate correct percentage
return false;
}
// first, calculate estimated keys that do not overlap
long keys = sstable.estimatedKeys();
Set<Range<Token>> ranges = new HashSet<Range<Token>>(overlaps.size());
for (SSTableReader overlap : overlaps)
ranges.add(new Range<Token>(overlap.first.token, overlap.last.token, overlap.partitioner));
long remainingKeys = keys - sstable.estimatedKeysForRanges(ranges);
// next, calculate what percentage of columns we have within those keys
long columns = sstable.getEstimatedColumnCount().mean() * remainingKeys;
double remainingColumnsRatio = ((double) columns) / (sstable.getEstimatedColumnCount().count() * sstable.getEstimatedColumnCount().mean());
// return if we still expect to have droppable tombstones in rest of columns
return remainingColumnsRatio * droppableRatio > tombstoneThreshold;
}
}
public static Map<String, String> validateOptions(Map<String, String> options) throws ConfigurationException
{
String threshold = options.get(TOMBSTONE_THRESHOLD_OPTION);
if (threshold != null)
{
try
{
float thresholdValue = Float.parseFloat(threshold);
if (thresholdValue < 0)
{
throw new ConfigurationException(String.format("%s must be greater than 0, but was %f", TOMBSTONE_THRESHOLD_OPTION, thresholdValue));
}
}
catch (NumberFormatException e)
{
throw new ConfigurationException(String.format("%s is not a parsable int (base10) for %s", threshold, TOMBSTONE_THRESHOLD_OPTION), e);
}
}
String interval = options.get(TOMBSTONE_COMPACTION_INTERVAL_OPTION);
if (interval != null)
{
try
{
long tombstoneCompactionInterval = Long.parseLong(interval);
if (tombstoneCompactionInterval < 0)
{
throw new ConfigurationException(String.format("%s must be greater than 0, but was %d", TOMBSTONE_COMPACTION_INTERVAL_OPTION, tombstoneCompactionInterval));
}
}
catch (NumberFormatException e)
{
throw new ConfigurationException(String.format("%s is not a parsable int (base10) for %s", interval, TOMBSTONE_COMPACTION_INTERVAL_OPTION), e);
}
}
String compactionEnabled = options.get(COMPACTION_ENABLED);
if (compactionEnabled != null)
{
if (!compactionEnabled.equalsIgnoreCase("true") && !compactionEnabled.equalsIgnoreCase("false"))
{
throw new ConfigurationException(String.format("enabled should either be 'true' or 'false', not %s", compactionEnabled));
}
}
Map<String, String> uncheckedOptions = new HashMap<String, String>(options);
uncheckedOptions.remove(TOMBSTONE_THRESHOLD_OPTION);
uncheckedOptions.remove(TOMBSTONE_COMPACTION_INTERVAL_OPTION);
uncheckedOptions.remove(COMPACTION_ENABLED);
return uncheckedOptions;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*/
package com.microsoft.azure.management.datalake.store.uploader;
import org.apache.commons.lang3.tuple.ImmutableTriple;
import org.apache.commons.lang3.tuple.Triple;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
public class StringExtensionsTests {
private static final String customDelim = ";";
private static ArrayList<Triple<String, Integer, Integer>> TestDataUTF8 = new ArrayList<>();
private static ArrayList<Triple<String, Integer, Integer>> TestDataUTF8CustomDelim = new ArrayList<>();
private static ArrayList<Triple<String, Integer, Integer>> TestDataUTF16 = new ArrayList<>();
private static ArrayList<Triple<String, Integer, Integer>> TestDataUTF16CustomDelim = new ArrayList<>();
private static ArrayList<Triple<String, Integer, Integer>> TestDataUTF32 = new ArrayList<>();
private static ArrayList<Triple<String, Integer, Integer>> TestDataUTF32CustomDelim = new ArrayList<>();
@BeforeClass
public static void setup() throws Exception {
TestDataUTF8.add(new ImmutableTriple<>("", -1, -1));
TestDataUTF8.add(new ImmutableTriple<>("a", -1, -1));
TestDataUTF8.add(new ImmutableTriple<>("a b", -1, -1));
TestDataUTF8.add(new ImmutableTriple<>("\r", 0, 0));
TestDataUTF8.add(new ImmutableTriple<>("\n", 0, 0));
TestDataUTF8.add(new ImmutableTriple<>("\r\n", 1, 1));
TestDataUTF8.add(new ImmutableTriple<>("\n\r", 1, 1));
TestDataUTF8.add(new ImmutableTriple<>("\r\nabcde", 1, 1));
TestDataUTF8.add(new ImmutableTriple<>("abcde\r", 5, 5));
TestDataUTF8.add(new ImmutableTriple<>("abcde\n", 5, 5));
TestDataUTF8.add(new ImmutableTriple<>("abcde\r\n", 6, 6));
TestDataUTF8.add(new ImmutableTriple<>("abcde\rabcde", 5, 5));
TestDataUTF8.add(new ImmutableTriple<>("abcde\nabcde", 5, 5));
TestDataUTF8.add(new ImmutableTriple<>("abcde\r\nabcde", 6, 6));
TestDataUTF8.add(new ImmutableTriple<>("a\rb\na\r\n", 1, 6));
TestDataUTF8.add(new ImmutableTriple<>("\rb\na\r\n", 0, 5));
TestDataUTF8CustomDelim.add(new ImmutableTriple<>("", -1, -1));
TestDataUTF8CustomDelim.add(new ImmutableTriple<>("a", -1, -1));
TestDataUTF8CustomDelim.add(new ImmutableTriple<>("a b", -1, -1));
TestDataUTF8CustomDelim.add(new ImmutableTriple<>(";", 0, 0));
TestDataUTF8CustomDelim.add(new ImmutableTriple<>("a;", 1, 1));
TestDataUTF8CustomDelim.add(new ImmutableTriple<>("b;", 1, 1));
TestDataUTF8CustomDelim.add(new ImmutableTriple<>("a;abcde", 1, 1));
TestDataUTF8CustomDelim.add(new ImmutableTriple<>("abcde;", 5, 5));
TestDataUTF8CustomDelim.add(new ImmutableTriple<>("abcde\r;", 6, 6));
TestDataUTF8CustomDelim.add(new ImmutableTriple<>("abcde;abcde", 5, 5));
TestDataUTF8CustomDelim.add(new ImmutableTriple<>("abcde;abcde", 5, 5));
TestDataUTF8CustomDelim.add(new ImmutableTriple<>("abcde\r;abcde", 6, 6));
TestDataUTF8CustomDelim.add(new ImmutableTriple<>("a;b\na\r;", 1, 6));
TestDataUTF8CustomDelim.add(new ImmutableTriple<>(";b\na\r;", 0, 5));
TestDataUTF16.add(new ImmutableTriple("", -1, -1));
TestDataUTF16.add(new ImmutableTriple("a", -1, -1));
TestDataUTF16.add(new ImmutableTriple("a b", -1, -1));
TestDataUTF16.add(new ImmutableTriple("\r", 1, 1));
TestDataUTF16.add(new ImmutableTriple("\n", 1, 1));
TestDataUTF16.add(new ImmutableTriple("\r\n", 3, 3));
TestDataUTF16.add(new ImmutableTriple("\n\r", 3, 3));
TestDataUTF16.add(new ImmutableTriple("\r\nabcde", 3, 3));
TestDataUTF16.add(new ImmutableTriple("abcde\r", 11, 11));
TestDataUTF16.add(new ImmutableTriple("abcde\n", 11, 11));
TestDataUTF16.add(new ImmutableTriple("abcde\r\n", 13, 13));
TestDataUTF16.add(new ImmutableTriple("abcde\rabcde", 11, 11));
TestDataUTF16.add(new ImmutableTriple("abcde\nabcde", 11, 11));
TestDataUTF16.add(new ImmutableTriple("abcde\r\nabcde", 13, 13));
TestDataUTF16.add(new ImmutableTriple("a\rb\na\r\n", 3, 13));
TestDataUTF16.add(new ImmutableTriple("\rb\na\r\n", 1, 11));
TestDataUTF16CustomDelim.add(new ImmutableTriple("", -1, -1));
TestDataUTF16CustomDelim.add(new ImmutableTriple("a", -1, -1));
TestDataUTF16CustomDelim.add(new ImmutableTriple("a b", -1, -1));
TestDataUTF16CustomDelim.add(new ImmutableTriple(";", 1, 1));
TestDataUTF16CustomDelim.add(new ImmutableTriple("a;", 3, 3));
TestDataUTF16CustomDelim.add(new ImmutableTriple("b;", 3, 3));
TestDataUTF16CustomDelim.add(new ImmutableTriple("a;abcde", 3, 3));
TestDataUTF16CustomDelim.add(new ImmutableTriple("abcde;", 11, 11));
TestDataUTF16CustomDelim.add(new ImmutableTriple("abcde\r;", 13, 13));
TestDataUTF16CustomDelim.add(new ImmutableTriple("abcde;abcde", 11, 11));
TestDataUTF16CustomDelim.add(new ImmutableTriple("abcde;abcde", 11, 11));
TestDataUTF16CustomDelim.add(new ImmutableTriple("abcde\r;abcde", 13, 13));
TestDataUTF16CustomDelim.add(new ImmutableTriple("a;b\na\r;", 3, 13));
TestDataUTF16CustomDelim.add(new ImmutableTriple(";b\na\r;", 1, 11));
TestDataUTF32.add(new ImmutableTriple("", -1, -1));
TestDataUTF32.add(new ImmutableTriple("a", -1, -1));
TestDataUTF32.add(new ImmutableTriple("a b", -1, -1));
TestDataUTF32.add(new ImmutableTriple("\r", 3, 3));
TestDataUTF32.add(new ImmutableTriple("\n", 3, 3));
TestDataUTF32.add(new ImmutableTriple("\r\n", 7, 7));
TestDataUTF32.add(new ImmutableTriple("\n\r", 7, 7));
TestDataUTF32.add(new ImmutableTriple("\r\nabcde", 7, 7));
TestDataUTF32.add(new ImmutableTriple("abcde\r", 23, 23));
TestDataUTF32.add(new ImmutableTriple("abcde\n", 23, 23));
TestDataUTF32.add(new ImmutableTriple("abcde\r\n", 27, 27));
TestDataUTF32.add(new ImmutableTriple("abcde\rabcde", 23, 23));
TestDataUTF32.add(new ImmutableTriple("abcde\nabcde", 23, 23));
TestDataUTF32.add(new ImmutableTriple("abcde\r\nabcde", 27, 27));
TestDataUTF32.add(new ImmutableTriple("a\rb\na\r\n", 7, 27));
TestDataUTF32.add(new ImmutableTriple("\rb\na\r\n", 3, 23));
TestDataUTF32CustomDelim.add(new ImmutableTriple("", -1, -1));
TestDataUTF32CustomDelim.add(new ImmutableTriple("a", -1, -1));
TestDataUTF32CustomDelim.add(new ImmutableTriple("a b", -1, -1));
TestDataUTF32CustomDelim.add(new ImmutableTriple(";", 3, 3));
TestDataUTF32CustomDelim.add(new ImmutableTriple("a;", 7, 7));
TestDataUTF32CustomDelim.add(new ImmutableTriple("b;", 7, 7));
TestDataUTF32CustomDelim.add(new ImmutableTriple("a;abcde", 7, 7));
TestDataUTF32CustomDelim.add(new ImmutableTriple("abcde;", 23, 23));
TestDataUTF32CustomDelim.add(new ImmutableTriple("abcde\r;", 27, 27));
TestDataUTF32CustomDelim.add(new ImmutableTriple("abcde;abcde", 23, 23));
TestDataUTF32CustomDelim.add(new ImmutableTriple("abcde;abcde", 23, 23));
TestDataUTF32CustomDelim.add(new ImmutableTriple("abcde\r;abcde", 27, 27));
TestDataUTF32CustomDelim.add(new ImmutableTriple("a;b\na\r;", 7, 27));
TestDataUTF32CustomDelim.add(new ImmutableTriple(";b\na\r;", 3, 23));
}
@Test
public void StringExtensions_FindNewLine_UTF8()
{
for (Triple<String, Integer, Integer> t: TestDataUTF8)
{
byte[] exactBuffer = t.getLeft().getBytes(StandardCharsets.UTF_8);
byte[] largerBuffer = new byte[exactBuffer.length + 100];
System.arraycopy(exactBuffer, 0, largerBuffer, 0, exactBuffer.length);
int forwardInExactBuffer = StringExtensions.findNewline(exactBuffer, 0, exactBuffer.length, false, StandardCharsets.UTF_8, null);
Assert.assertEquals(t.getMiddle().intValue(), forwardInExactBuffer);
int forwardInLargeBuffer = StringExtensions.findNewline(largerBuffer, 0, exactBuffer.length, false, StandardCharsets.UTF_8, null);
Assert.assertEquals(t.getMiddle().intValue(), forwardInLargeBuffer);
int reverseInExactBuffer = StringExtensions.findNewline(exactBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.UTF_8, null);
Assert.assertEquals(t.getRight().intValue(), reverseInExactBuffer);
int reverseInLargeBuffer = StringExtensions.findNewline(largerBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.UTF_8, null);
Assert.assertEquals(t.getRight().intValue(), reverseInLargeBuffer);
}
for (Triple<String, Integer, Integer> t: TestDataUTF8CustomDelim)
{
byte[] exactBuffer = t.getLeft().getBytes(StandardCharsets.UTF_8);
byte[] largerBuffer = new byte[exactBuffer.length + 100];
System.arraycopy(exactBuffer, 0, largerBuffer, 0, exactBuffer.length);
int forwardInExactBuffer = StringExtensions.findNewline(exactBuffer, 0, exactBuffer.length, false, StandardCharsets.UTF_8, customDelim);
Assert.assertEquals(t.getMiddle().intValue(), forwardInExactBuffer);
int forwardInLargeBuffer = StringExtensions.findNewline(largerBuffer, 0, exactBuffer.length, false, StandardCharsets.UTF_8, customDelim);
Assert.assertEquals(t.getMiddle().intValue(), forwardInLargeBuffer);
int reverseInExactBuffer = StringExtensions.findNewline(exactBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.UTF_8, customDelim);
Assert.assertEquals(t.getRight().intValue(), reverseInExactBuffer);
int reverseInLargeBuffer = StringExtensions.findNewline(largerBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.UTF_8, customDelim);
Assert.assertEquals(t.getRight().intValue(), reverseInLargeBuffer);
}
}
@Test
public void StringExtensions_FindNewLine_UTF16()
{
for (Triple<String, Integer, Integer> t: TestDataUTF16)
{
byte[] exactBuffer = t.getLeft().getBytes(StandardCharsets.UTF_16LE);
byte[] largerBuffer = new byte[exactBuffer.length + 100];
System.arraycopy(exactBuffer, 0, largerBuffer, 0, exactBuffer.length);
int forwardInExactBuffer = StringExtensions.findNewline(exactBuffer, 0, exactBuffer.length, false, StandardCharsets.UTF_16LE, null);
Assert.assertEquals(t.getMiddle().intValue(), forwardInExactBuffer);
int forwardInLargeBuffer = StringExtensions.findNewline(largerBuffer, 0, exactBuffer.length, false, StandardCharsets.UTF_16LE, null);
Assert.assertEquals(t.getMiddle().intValue(), forwardInLargeBuffer);
int reverseInExactBuffer = StringExtensions.findNewline(exactBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.UTF_16LE, null);
Assert.assertEquals(t.getRight().intValue(), reverseInExactBuffer);
int reverseInLargeBuffer = StringExtensions.findNewline(largerBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.UTF_16LE, null);
Assert.assertEquals(t.getRight().intValue(), reverseInLargeBuffer);
}
for (Triple<String, Integer, Integer> t: TestDataUTF16CustomDelim)
{
byte[] exactBuffer = t.getLeft().getBytes(StandardCharsets.UTF_16LE);
byte[] largerBuffer = new byte[exactBuffer.length + 100];
System.arraycopy(exactBuffer, 0, largerBuffer, 0, exactBuffer.length);
int forwardInExactBuffer = StringExtensions.findNewline(exactBuffer, 0, exactBuffer.length, false, StandardCharsets.UTF_16LE, customDelim);
Assert.assertEquals(t.getMiddle().intValue(), forwardInExactBuffer);
int forwardInLargeBuffer = StringExtensions.findNewline(largerBuffer, 0, exactBuffer.length, false, StandardCharsets.UTF_16LE, customDelim);
Assert.assertEquals(t.getMiddle().intValue(), forwardInLargeBuffer);
int reverseInExactBuffer = StringExtensions.findNewline(exactBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.UTF_16LE, customDelim);
Assert.assertEquals(t.getRight().intValue(), reverseInExactBuffer);
int reverseInLargeBuffer = StringExtensions.findNewline(largerBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.UTF_16LE, customDelim);
Assert.assertEquals(t.getRight().intValue(), reverseInLargeBuffer);
}
}
@Test
public void StringExtensions_FindNewLine_UTF16BigEndian()
{
for (Triple<String, Integer, Integer> t: TestDataUTF16)
{
byte[] exactBuffer = t.getLeft().getBytes(StandardCharsets.UTF_16BE);
byte[] largerBuffer = new byte[exactBuffer.length + 100];
System.arraycopy(exactBuffer, 0, largerBuffer, 0, exactBuffer.length);
int forwardInExactBuffer = StringExtensions.findNewline(exactBuffer, 0, exactBuffer.length, false, StandardCharsets.UTF_16BE, null);
Assert.assertEquals(t.getMiddle().intValue(), forwardInExactBuffer);
int forwardInLargeBuffer = StringExtensions.findNewline(largerBuffer, 0, exactBuffer.length, false, StandardCharsets.UTF_16BE, null);
Assert.assertEquals(t.getMiddle().intValue(), forwardInLargeBuffer);
int reverseInExactBuffer = StringExtensions.findNewline(exactBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.UTF_16BE, null);
Assert.assertEquals(t.getRight().intValue(), reverseInExactBuffer);
int reverseInLargeBuffer = StringExtensions.findNewline(largerBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.UTF_16BE, null);
Assert.assertEquals(t.getRight().intValue(), reverseInLargeBuffer);
}
for (Triple<String, Integer, Integer> t: TestDataUTF16CustomDelim)
{
byte[] exactBuffer = t.getLeft().getBytes(StandardCharsets.UTF_16BE);
byte[] largerBuffer = new byte[exactBuffer.length + 100];
System.arraycopy(exactBuffer, 0, largerBuffer, 0, exactBuffer.length);
int forwardInExactBuffer = StringExtensions.findNewline(exactBuffer, 0, exactBuffer.length, false, StandardCharsets.UTF_16BE, customDelim);
Assert.assertEquals(t.getMiddle().intValue(), forwardInExactBuffer);
int forwardInLargeBuffer = StringExtensions.findNewline(largerBuffer, 0, exactBuffer.length, false, StandardCharsets.UTF_16BE, customDelim);
Assert.assertEquals(t.getMiddle().intValue(), forwardInLargeBuffer);
int reverseInExactBuffer = StringExtensions.findNewline(exactBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.UTF_16BE, customDelim);
Assert.assertEquals(t.getRight().intValue(), reverseInExactBuffer);
int reverseInLargeBuffer = StringExtensions.findNewline(largerBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.UTF_16BE, customDelim);
Assert.assertEquals(t.getRight().intValue(), reverseInLargeBuffer);
}
}
@Test
public void StringExtensions_FindNewLine_ASCII()
{
for (Triple<String, Integer, Integer> t: TestDataUTF8)
{
byte[] exactBuffer = t.getLeft().getBytes(StandardCharsets.US_ASCII);
byte[] largerBuffer = new byte[exactBuffer.length + 100];
System.arraycopy(exactBuffer, 0, largerBuffer, 0, exactBuffer.length);
int forwardInExactBuffer = StringExtensions.findNewline(exactBuffer, 0, exactBuffer.length, false, StandardCharsets.US_ASCII, null);
Assert.assertEquals(t.getMiddle().intValue(), forwardInExactBuffer);
int forwardInLargeBuffer = StringExtensions.findNewline(largerBuffer, 0, exactBuffer.length, false, StandardCharsets.US_ASCII, null);
Assert.assertEquals(t.getMiddle().intValue(), forwardInLargeBuffer);
int reverseInExactBuffer = StringExtensions.findNewline(exactBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.US_ASCII, null);
Assert.assertEquals(t.getRight().intValue(), reverseInExactBuffer);
int reverseInLargeBuffer = StringExtensions.findNewline(largerBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.US_ASCII, null);
Assert.assertEquals(t.getRight().intValue(), reverseInLargeBuffer);
}
for (Triple<String, Integer, Integer> t: TestDataUTF8CustomDelim)
{
byte[] exactBuffer = t.getLeft().getBytes(StandardCharsets.US_ASCII);
byte[] largerBuffer = new byte[exactBuffer.length + 100];
System.arraycopy(exactBuffer, 0, largerBuffer, 0, exactBuffer.length);
int forwardInExactBuffer = StringExtensions.findNewline(exactBuffer, 0, exactBuffer.length, false, StandardCharsets.US_ASCII, customDelim);
Assert.assertEquals(t.getMiddle().intValue(), forwardInExactBuffer);
int forwardInLargeBuffer = StringExtensions.findNewline(largerBuffer, 0, exactBuffer.length, false, StandardCharsets.US_ASCII, customDelim);
Assert.assertEquals(t.getMiddle().intValue(), forwardInLargeBuffer);
int reverseInExactBuffer = StringExtensions.findNewline(exactBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.US_ASCII, customDelim);
Assert.assertEquals(t.getRight().intValue(), reverseInExactBuffer);
int reverseInLargeBuffer = StringExtensions.findNewline(largerBuffer, Math.max(0, exactBuffer.length - 1), exactBuffer.length, true, StandardCharsets.US_ASCII, customDelim);
Assert.assertEquals(t.getRight().intValue(), reverseInLargeBuffer);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache30;
import static org.apache.geode.cache.Scope.LOCAL;
import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS;
import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
import static org.apache.geode.test.dunit.Assert.assertEquals;
import static org.apache.geode.test.dunit.Assert.assertNotNull;
import static org.apache.geode.test.dunit.Assert.assertTrue;
import static org.apache.geode.test.dunit.Assert.fail;
import static org.apache.geode.test.dunit.Host.getHost;
import static org.apache.geode.test.dunit.LogWriterUtils.getLogWriter;
import static org.apache.geode.test.dunit.NetworkUtils.getServerHostName;
import java.io.IOException;
import java.util.Properties;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.CacheException;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.CacheLoaderException;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionShortcut;
import org.apache.geode.cache.Scope;
import org.apache.geode.cache.client.ClientCache;
import org.apache.geode.cache.client.ClientCacheFactory;
import org.apache.geode.cache.client.ClientRegionShortcut;
import org.apache.geode.cache.client.ServerOperationException;
import org.apache.geode.cache.client.SubscriptionNotEnabledException;
import org.apache.geode.cache.client.internal.PoolImpl;
import org.apache.geode.cache.server.CacheServer;
import org.apache.geode.test.awaitility.GeodeAwaitility;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.LogWriterUtils;
import org.apache.geode.test.dunit.NetworkUtils;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.WaitCriterion;
import org.apache.geode.test.junit.categories.ClientSubscriptionTest;
/**
* Tests the client register interest
*
* @since GemFire 4.2.3
*/
@Category({ClientSubscriptionTest.class})
public class ClientRegisterInterestDUnitTest extends ClientServerTestCase {
protected static int bridgeServerPort;
@Override
public final void postTearDownCacheTestCase() throws Exception {
disconnectAllFromDS(); // cleans up cache server and client and lonerDS
}
/**
* Tests for Bug 35381 Calling register interest if establishCallbackConnection is not set causes
* cache server NPE.
*/
@Test
public void testBug35381() throws Exception {
final Host host = Host.getHost(0);
final String name = getUniqueName();
final int[] ports = new int[1]; // 1 server in this test
final int whichVM = 0;
final VM vm = Host.getHost(0).getVM(whichVM);
vm.invoke(new CacheSerializableRunnable("Create cache server") {
@Override
public void run2() throws CacheException {
LogWriterUtils.getLogWriter().info("[testBug35381] Create BridgeServer");
getSystem();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
Region region = createRegion(name, factory.create());
assertNotNull(region);
assertNotNull(getRootRegion().getSubregion(name));
region.put("KEY-1", "VAL-1");
try {
bridgeServerPort = startBridgeServer(0);
} catch (IOException e) {
LogWriterUtils.getLogWriter().error("startBridgeServer threw IOException", e);
fail("startBridgeServer threw IOException ", e);
}
assertTrue(bridgeServerPort != 0);
LogWriterUtils.getLogWriter().info("[testBug35381] port=" + bridgeServerPort);
LogWriterUtils.getLogWriter().info("[testBug35381] serverMemberId=" + getMemberId());
}
});
ports[whichVM] = vm.invoke(ClientRegisterInterestDUnitTest::getBridgeServerPort);
assertTrue(ports[whichVM] != 0);
LogWriterUtils.getLogWriter().info("[testBug35381] create bridge client");
Properties config = new Properties();
config.setProperty(MCAST_PORT, "0");
config.setProperty(LOCATORS, "");
getSystem(config);
getCache();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
LogWriterUtils.getLogWriter().info("[testBug35381] creating connection pool");
boolean establishCallbackConnection = false; // SOURCE OF BUG 35381
ClientServerTestCase.configureConnectionPool(factory, NetworkUtils.getServerHostName(host),
ports, establishCallbackConnection, -1, -1, null);
Region region = createRegion(name, factory.create());
assertNotNull(getRootRegion().getSubregion(name));
try {
region.registerInterest("KEY-1");
fail(
"registerInterest failed to throw SubscriptionNotEnabledException with establishCallbackConnection set to false");
} catch (SubscriptionNotEnabledException ignored) {
}
}
private static int getBridgeServerPort() {
return bridgeServerPort;
}
/**
* Tests failover of register interest from client point of view. Related bugs include:
*
* <p>
* Bug 35654 "failed re-registration may never be detected and thus may never re-re-register"
*
* <p>
* Bug 35639 "registerInterest re-registration happens everytime a healthy server is detected"
*
* <p>
* Bug 35655 "a single failed re-registration causes all other pending re-registrations to be
* cancelled"
*/
@Ignore("TODO")
@Test
public void testRegisterInterestFailover() throws Exception {
// controller is bridge client
final Host host = getHost(0);
final String name = getUniqueName();
final String regionName1 = name + "-1";
final String regionName2 = name + "-2";
final String regionName3 = name + "-3";
final String key1 = "KEY-" + regionName1 + "-1";
final String key2 = "KEY-" + regionName1 + "-2";
final String key3 = "KEY-" + regionName1 + "-3";
final int[] ports = new int[3]; // 3 servers in this test
// create first cache server with region for client...
final int firstServerIdx = 0;
final VM firstServerVM = getHost(0).getVM(firstServerIdx);
firstServerVM.invoke(new CacheSerializableRunnable("Create first cache server") {
@Override
public void run2() throws CacheException {
getLogWriter()
.info("[testRegisterInterestFailover] Create first cache server");
getSystem();
AttributesFactory factory = new AttributesFactory();
factory.setScope(LOCAL);
Region region1 = createRootRegion(regionName1, factory.create());
Region region2 = createRootRegion(regionName2, factory.create());
Region region3 = createRootRegion(regionName3, factory.create());
region1.put(key1, "VAL-1");
region2.put(key2, "VAL-1");
region3.put(key3, "VAL-1");
try {
bridgeServerPort = startBridgeServer(0);
} catch (IOException e) {
getLogWriter().error("startBridgeServer threw IOException", e);
fail("startBridgeServer threw IOException ", e);
}
assertTrue(bridgeServerPort != 0);
getLogWriter()
.info("[testRegisterInterestFailover] " + "firstServer port=" + bridgeServerPort);
getLogWriter()
.info("[testRegisterInterestFailover] " + "firstServer memberId=" + getMemberId());
}
});
// create second cache server missing region for client...
final int secondServerIdx = 1;
final VM secondServerVM = getHost(0).getVM(secondServerIdx);
secondServerVM.invoke(new CacheSerializableRunnable("Create second cache server") {
@Override
public void run2() throws CacheException {
getLogWriter()
.info("[testRegisterInterestFailover] Create second cache server");
getSystem();
AttributesFactory factory = new AttributesFactory();
factory.setScope(LOCAL);
Region region1 = createRootRegion(regionName1, factory.create());
Region region3 = createRootRegion(regionName3, factory.create());
region1.put(key1, "VAL-2");
region3.put(key3, "VAL-2");
try {
bridgeServerPort = startBridgeServer(0);
} catch (IOException e) {
getLogWriter().error("startBridgeServer threw IOException", e);
fail("startBridgeServer threw IOException ", e);
}
assertTrue(bridgeServerPort != 0);
getLogWriter()
.info("[testRegisterInterestFailover] " + "secondServer port=" + bridgeServerPort);
getLogWriter()
.info("[testRegisterInterestFailover] " + "secondServer memberId=" + getMemberId());
}
});
// get the cache server ports...
ports[firstServerIdx] =
firstServerVM.invoke(ClientRegisterInterestDUnitTest::getBridgeServerPort);
assertTrue(ports[firstServerIdx] != 0);
ports[secondServerIdx] =
secondServerVM.invoke(ClientRegisterInterestDUnitTest::getBridgeServerPort);
assertTrue(ports[secondServerIdx] != 0);
assertTrue(ports[firstServerIdx] != ports[secondServerIdx]);
// stop second and third servers
secondServerVM.invoke(new CacheSerializableRunnable("Stop second cache server") {
@Override
public void run2() throws CacheException {
stopBridgeServers(getCache());
}
});
// create the bridge client
getLogWriter().info("[testBug35654] create bridge client");
Properties config = new Properties();
config.setProperty(MCAST_PORT, "0");
config.setProperty(LOCATORS, "");
getSystem(config);
getCache();
AttributesFactory factory = new AttributesFactory();
factory.setScope(LOCAL);
getLogWriter().info("[testRegisterInterestFailover] creating connection pool");
boolean establishCallbackConnection = true;
final PoolImpl p = (PoolImpl) configureConnectionPool(factory,
getServerHostName(host), ports, establishCallbackConnection, -1, -1, null);
final Region region1 = createRootRegion(regionName1, factory.create());
final Region region2 = createRootRegion(regionName2, factory.create());
final Region region3 = createRootRegion(regionName3, factory.create());
assertTrue(region1.getInterestList().isEmpty());
assertTrue(region2.getInterestList().isEmpty());
assertTrue(region3.getInterestList().isEmpty());
region1.registerInterest(key1);
region2.registerInterest(key2);
region3.registerInterest(key3);
assertTrue(region1.getInterestList().contains(key1));
assertTrue(region2.getInterestList().contains(key2));
assertTrue(region3.getInterestList().contains(key3));
assertTrue(region1.getInterestListRegex().isEmpty());
assertTrue(region2.getInterestListRegex().isEmpty());
assertTrue(region3.getInterestListRegex().isEmpty());
// get ConnectionProxy and wait until connected to first server
WaitCriterion ev = new WaitCriterion() {
@Override
public boolean done() {
return p.getPrimaryPort() != -1;
}
@Override
public String description() {
return "primary port remained invalid";
}
};
GeodeAwaitility.await().untilAsserted(ev);
assertEquals(ports[firstServerIdx], p.getPrimaryPort());
// assert intial values
assertEquals("VAL-1", region1.get(key1));
assertEquals("VAL-1", region2.get(key2));
assertEquals("VAL-1", region3.get(key3));
// do puts on server1 and make sure values come thru for all 3 registrations
firstServerVM.invoke(new CacheSerializableRunnable("Puts from first cache server") {
@Override
public void run2() throws CacheException {
Region region1 = getCache().getRegion(regionName1);
region1.put(key1, "VAL-1-1");
Region region2 = getCache().getRegion(regionName2);
region2.put(key2, "VAL-1-1");
Region region3 = getCache().getRegion(regionName3);
region3.put(key3, "VAL-1-1");
}
});
ev = new WaitCriterion() {
@Override
public boolean done() {
return "VAL-1-1".equals(region1.get(key1)) && "VAL-1-1".equals(region2.get(key2))
&& "VAL-1-1".equals(region3.get(key3));
}
@Override
public String description() {
return null;
}
};
GeodeAwaitility.await().untilAsserted(ev);
assertEquals("VAL-1-1", region1.get(key1));
assertEquals("VAL-1-1", region2.get(key2));
assertEquals("VAL-1-1", region3.get(key3));
// force failover to server 2
secondServerVM.invoke(new CacheSerializableRunnable("Start second cache server") {
@Override
public void run2() throws CacheException {
try {
startBridgeServer(ports[secondServerIdx]);
} catch (IOException e) {
getLogWriter().error("startBridgeServer threw IOException", e);
fail("startBridgeServer threw IOException ", e);
}
}
});
firstServerVM.invoke(new CacheSerializableRunnable("Stop first cache server") {
@Override
public void run2() throws CacheException {
stopBridgeServers(getCache());
}
});
// wait for failover to second server
ev = new WaitCriterion() {
@Override
public boolean done() {
return ports[secondServerIdx] == p.getPrimaryPort();
}
@Override
public String description() {
return "primary port never became " + ports[secondServerIdx];
}
};
GeodeAwaitility.await().untilAsserted(ev);
try {
assertEquals(null, region2.get(key2));
fail("CacheLoaderException expected");
} catch (CacheLoaderException ignored) {
}
// region2 registration should be gone now
// do puts on server2 and make sure values come thru for only 2 registrations
secondServerVM.invoke(new CacheSerializableRunnable("Puts from second cache server") {
@Override
public void run2() throws CacheException {
AttributesFactory factory = new AttributesFactory();
factory.setScope(LOCAL);
createRootRegion(regionName2, factory.create());
}
});
// assert that there is no actively registered interest on region2
assertTrue(region2.getInterestList().isEmpty());
assertTrue(region2.getInterestListRegex().isEmpty());
region2.put(key2, "VAL-0");
secondServerVM.invoke(new CacheSerializableRunnable("Put from second cache server") {
@Override
public void run2() throws CacheException {
Region region1 = getCache().getRegion(regionName1);
region1.put(key1, "VAL-2-2");
Region region2 = getCache().getRegion(regionName2);
region2.put(key2, "VAL-2-1");
Region region3 = getCache().getRegion(regionName3);
region3.put(key3, "VAL-2-2");
}
});
// wait for updates to come thru
ev = new WaitCriterion() {
@Override
public boolean done() {
return "VAL-2-2".equals(region1.get(key1)) && "VAL-2-2".equals(region3.get(key3));
}
@Override
public String description() {
return null;
}
};
GeodeAwaitility.await().untilAsserted(ev);
assertEquals("VAL-2-2", region1.get(key1));
assertEquals("VAL-0", region2.get(key2));
assertEquals("VAL-2-2", region3.get(key3));
// assert again that there is no actively registered interest on region2
assertTrue(region2.getInterestList().isEmpty());
// register interest again on region2 and make
region2.registerInterest(key2);
assertEquals("VAL-2-1", region2.get(key2));
secondServerVM.invoke(new CacheSerializableRunnable("Put from second cache server") {
@Override
public void run2() throws CacheException {
Region region1 = getCache().getRegion(regionName1);
region1.put(key1, "VAL-2-3");
Region region2 = getCache().getRegion(regionName2);
region2.put(key2, "VAL-2-2");
Region region3 = getCache().getRegion(regionName3);
region3.put(key3, "VAL-2-3");
}
});
// wait for updates to come thru
ev = new WaitCriterion() {
@Override
public boolean done() {
return "VAL-2-3".equals(region1.get(key1)) && "VAL-2-2".equals(region2.get(key2))
&& "VAL-2-3".equals(region3.get(key3));
}
@Override
public String description() {
return null;
}
};
GeodeAwaitility.await().untilAsserted(ev);
assertEquals("VAL-2-3", region1.get(key1));
assertEquals("VAL-2-2", region2.get(key2));
assertEquals("VAL-2-3", region3.get(key3));
// assert public methods report actively registered interest on region2
assertTrue(region2.getInterestList().contains(key2));
}
@Test
public void rejectAttemptToRegisterInterestInLonerSystem() throws Exception {
final String name = getUniqueName();
final String regionName1 = name + "-1";
// create first cache server with region for client...
final int firstServerIdx = 1;
final VM firstServerVM = Host.getHost(0).getVM(firstServerIdx);
firstServerVM.invoke(new CacheSerializableRunnable("Create first cache server") {
@Override
public void run2() throws CacheException {
Cache cache = new CacheFactory().set("mcast-port", "0").create();
try {
CacheServer bridge = cache.addCacheServer();
bridge.setPort(0);
bridge.setMaxThreads(getMaxThreads());
bridge.start();
bridgeServerPort = bridge.getPort();
} catch (IOException e) {
LogWriterUtils.getLogWriter().error("startBridgeServer threw IOException", e);
fail("startBridgeServer threw IOException ", e);
}
assertTrue(bridgeServerPort != 0);
Region region1 = cache.createRegionFactory(RegionShortcut.PARTITION).create(regionName1);
}
});
// get the cache server ports...
int port = firstServerVM.invoke(ClientRegisterInterestDUnitTest::getBridgeServerPort);
try {
ClientCache clientCache =
new ClientCacheFactory().addPoolServer(firstServerVM.getHost().getHostName(), port)
.setPoolSubscriptionEnabled(true).create();
Region region = clientCache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY)
.create(regionName1);
region.registerInterestRegex(".*");
fail();
} catch (ServerOperationException e) {
// expected
if (!e.getRootCause().getMessage().equals(
"Should not register interest for a partitioned region when mcast-port is 0 and no locator is present")) {
fail();
}
}
}
}
| |
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.search.query;
import com.google.common.base.Splitter;
import com.yahoo.collections.LazySet;
import com.yahoo.component.ComponentSpecification;
import com.yahoo.prelude.query.Highlight;
import com.yahoo.prelude.query.IndexedItem;
import com.yahoo.search.Query;
import com.yahoo.search.query.profile.types.FieldDescription;
import com.yahoo.search.query.profile.types.QueryProfileFieldType;
import com.yahoo.search.query.profile.types.QueryProfileType;
import com.yahoo.search.query.ranking.MatchPhase;
import com.yahoo.search.rendering.RendererRegistry;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
/**
* Parameters deciding how the result of a query should be presented
*
* @author Arne Bergene Fossaa
*/
public class Presentation implements Cloneable {
/** The type representing the property arguments consumed by this */
private static final QueryProfileType argumentType;
public static final String PRESENTATION = "presentation";
public static final String BOLDING = "bolding";
public static final String TIMING = "timing";
public static final String SUMMARY = "summary";
public static final String SUMMARY_FIELDS = "summaryFields";
public static final String TENSORS = "tensors";
/** The (short) name of the parameter holding the name of the return format to use */
public static final String FORMAT = "format";
static {
argumentType = new QueryProfileType(PRESENTATION);
argumentType.setStrict(true);
argumentType.setBuiltin(true);
argumentType.addField(new FieldDescription(BOLDING, "boolean", "bolding"));
argumentType.addField(new FieldDescription(TIMING, "boolean", "timing"));
argumentType.addField(new FieldDescription(SUMMARY, "string", "summary"));
argumentType.addField(new FieldDescription(SUMMARY_FIELDS, "string", "summaryFields"));
QueryProfileType formatArgumentType = new QueryProfileType(FORMAT);
formatArgumentType.setBuiltin(true);
formatArgumentType.setStrict(true);
formatArgumentType.addField(new FieldDescription("", "string", "format template"));
formatArgumentType.addField(new FieldDescription(TENSORS, "string", "format.tensors"));
formatArgumentType.freeze();
argumentType.addField(new FieldDescription(FORMAT, new QueryProfileFieldType(formatArgumentType), "format"));
argumentType.freeze();
}
public static QueryProfileType getArgumentType() { return argumentType; }
/** How the result should be highlighted */
private Highlight highlight = null;
/** The terms to highlight in the result (only used by BoldingSearcher, may be removed later). */
private List<IndexedItem> boldingData = null;
/** Whether to do highlighting */
private boolean bolding = true;
/** The summary class to be shown */
private String summary = null;
/** The name of the renderer to use for rendering the hits. */
private ComponentSpecification format = RendererRegistry.defaultRendererId.toSpecification();
/** Whether optional timing data should be rendered */
private boolean timing = false;
/** Whether to renders tensors in short form */
private boolean tensorShortForm = false;
/** Set of explicitly requested summary fields, instead of summary classes */
private Set<String> summaryFields = LazySet.newHashSet();
private static final Splitter COMMA_SPLITTER = Splitter.on(',').omitEmptyStrings().trimResults();
public Presentation(Query parent) { }
/** Returns how terms in this result should be highlighted, or null if not set */
public Highlight getHighlight() { return highlight; }
/** Sets how terms in this result should be highlighted. Set to null to turn highlighting off */
public void setHighlight(Highlight highlight) { this.highlight = highlight; }
/** Returns the name of the summary class to be used to present hits from this query, or null if not set */
public String getSummary() { return summary; }
/** Sets the name of the summary class to be used to present hits from this query */
public void setSummary(String summary) { this.summary = summary; }
/** Returns whether matching query terms should be bolded in the result. Default is true. */
public boolean getBolding() { return bolding; }
/** Sets whether matching query terms should be bolded in the result */
public void setBolding(boolean bolding) { this.bolding = bolding; }
/** Get the name of the format desired for result rendering. */
public ComponentSpecification getRenderer() { return format; }
/** Set the desired format for result rendering. If null, use the default renderer. */
public void setRenderer(ComponentSpecification format) {
this.format = (format != null) ? format : RendererRegistry.defaultRendererId.toSpecification();
}
/** Get the name of the format desired for result rendering. */
public String getFormat() { return format.getName(); }
/** Set the desired format for result rendering. If null, use the default renderer. */
public void setFormat(String format) {
setRenderer(ComponentSpecification.fromString(format));
}
@Override
public Object clone() {
try {
Presentation clone = (Presentation)super.clone();
if (boldingData != null)
clone.boldingData = new ArrayList<>(boldingData);
if (highlight != null)
clone.highlight = highlight.clone();
if (summaryFields != null) {
clone.summaryFields = LazySet.newHashSet();
clone.summaryFields.addAll(this.summaryFields);
}
return clone;
}
catch (CloneNotSupportedException e) {
throw new RuntimeException("Someone inserted a noncloneable superclass", e);
}
}
/** Returns whether to add optional timing data to the rendered result. */
public boolean getTiming() {
return timing;
}
public void setTiming(boolean timing) {
this.timing = timing;
}
/**
* Return the set of explicitly requested fields. Returns an empty set if no
* fields are specified outside of summary classes. The returned set is
* mutable and fields may be added or removed before passing on the query.
*
* @return the set of names of requested fields, never null
*/
public Set<String> getSummaryFields() {
return summaryFields;
}
/**
* Parse the given string as a comma delimited set of field names and
* overwrite the set of summary fields. Whitespace will be trimmed. If you
* want to add or remove fields programmatically, use
* {@link #getSummaryFields()} and modify the returned set.
*
* @param asString the summary fields requested, e.g. "price,author,title"
*/
public void setSummaryFields(String asString) {
summaryFields.clear();
for (String field : COMMA_SPLITTER.split(asString)) {
summaryFields.add(field);
}
}
/**
* Returns whether tensors should use short form in JSON and textual representations, see
* <a href="https://docs.vespa.ai/en/reference/document-json-format.html#tensor">https://docs.vespa.ai/en/reference/document-json-format.html#tensor</a>
* and <a href="https://docs.vespa.ai/en/reference/tensor.html#tensor-literal-form">https://docs.vespa.ai/en/reference/tensor.html#tensor-literal-form</a>.
* Default is false.
*/
public boolean getTensorShortForm() { return tensorShortForm; }
/**
* Sets whether tensors should use short form in JSON and textual representations from a string.
*
* @param value a string which must be either 'short' or 'long'
* @throws IllegalArgumentException if any other value is passed
*/
public void setTensorShortForm(String value) {
tensorShortForm = toTensorShortForm(value);
}
private boolean toTensorShortForm(String value) {
switch (value) {
case "short": return true;
case "long": return false;
default: throw new IllegalArgumentException("Value must be 'long' or 'short', not '" + value + "'");
}
}
public void setTensorShortForm(boolean tensorShortForm) {
this.tensorShortForm = tensorShortForm;
}
/** Prepares this for binary serialization. For internal use - see {@link Query#prepare} */
public void prepare() {
if (highlight != null)
highlight.prepare();
}
@Override
public boolean equals(Object o) {
if ( ! (o instanceof Presentation)) return false;
Presentation p = (Presentation) o;
return QueryHelper.equals(bolding, p.bolding) && QueryHelper.equals(summary, p.summary);
}
@Override
public int hashCode() {
return QueryHelper.combineHash(bolding, summary);
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.client.ml.inference.trainedmodel.tree;
import org.elasticsearch.client.ml.inference.trainedmodel.TargetType;
import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
public class Tree implements TrainedModel {
public static final String NAME = "tree";
public static final ParseField FEATURE_NAMES = new ParseField("feature_names");
public static final ParseField TREE_STRUCTURE = new ParseField("tree_structure");
public static final ParseField TARGET_TYPE = new ParseField("target_type");
public static final ParseField CLASSIFICATION_LABELS = new ParseField("classification_labels");
private static final ObjectParser<Builder, Void> PARSER = new ObjectParser<>(NAME, true, Builder::new);
static {
PARSER.declareStringArray(Builder::setFeatureNames, FEATURE_NAMES);
PARSER.declareObjectArray(Builder::setNodes, (p, c) -> TreeNode.fromXContent(p), TREE_STRUCTURE);
PARSER.declareString(Builder::setTargetType, TARGET_TYPE);
PARSER.declareStringArray(Builder::setClassificationLabels, CLASSIFICATION_LABELS);
}
public static Tree fromXContent(XContentParser parser) {
return PARSER.apply(parser, null).build();
}
private final List<String> featureNames;
private final List<TreeNode> nodes;
private final TargetType targetType;
private final List<String> classificationLabels;
Tree(List<String> featureNames, List<TreeNode> nodes, TargetType targetType, List<String> classificationLabels) {
this.featureNames = featureNames;
this.nodes = nodes;
this.targetType = targetType;
this.classificationLabels = classificationLabels;
}
@Override
public String getName() {
return NAME;
}
@Override
public List<String> getFeatureNames() {
return featureNames;
}
public List<TreeNode> getNodes() {
return nodes;
}
@Nullable
public List<String> getClassificationLabels() {
return classificationLabels;
}
public TargetType getTargetType() {
return targetType;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (featureNames != null) {
builder.field(FEATURE_NAMES.getPreferredName(), featureNames);
}
if (nodes != null) {
builder.field(TREE_STRUCTURE.getPreferredName(), nodes);
}
if (classificationLabels != null) {
builder.field(CLASSIFICATION_LABELS.getPreferredName(), classificationLabels);
}
if (targetType != null) {
builder.field(TARGET_TYPE.getPreferredName(), targetType.toString());
}
builder.endObject();
return builder;
}
@Override
public String toString() {
return Strings.toString(this);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Tree that = (Tree) o;
return Objects.equals(featureNames, that.featureNames)
&& Objects.equals(classificationLabels, that.classificationLabels)
&& Objects.equals(targetType, that.targetType)
&& Objects.equals(nodes, that.nodes);
}
@Override
public int hashCode() {
return Objects.hash(featureNames, nodes, targetType, classificationLabels);
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private List<String> featureNames;
private ArrayList<TreeNode.Builder> nodes;
private int numNodes;
private TargetType targetType;
private List<String> classificationLabels;
public Builder() {
nodes = new ArrayList<>();
// allocate space in the root node and set to a leaf
nodes.add(null);
addLeaf(0, 0.0);
numNodes = 1;
}
public Builder setFeatureNames(List<String> featureNames) {
this.featureNames = featureNames;
return this;
}
public Builder addNode(TreeNode.Builder node) {
nodes.add(node);
return this;
}
public Builder setNodes(List<TreeNode.Builder> nodes) {
this.nodes = new ArrayList<>(nodes);
return this;
}
public Builder setNodes(TreeNode.Builder... nodes) {
return setNodes(Arrays.asList(nodes));
}
public Builder setTargetType(TargetType targetType) {
this.targetType = targetType;
return this;
}
public Builder setClassificationLabels(List<String> classificationLabels) {
this.classificationLabels = classificationLabels;
return this;
}
private void setTargetType(String targetType) {
this.targetType = TargetType.fromString(targetType);
}
/**
* Add a decision node. Space for the child nodes is allocated
* @param nodeIndex Where to place the node. This is either 0 (root) or an existing child node index
* @param featureIndex The feature index the decision is made on
* @param isDefaultLeft Default left branch if the feature is missing
* @param decisionThreshold The decision threshold
* @return The created node
*/
public TreeNode.Builder addJunction(int nodeIndex, int featureIndex, boolean isDefaultLeft, double decisionThreshold) {
int leftChild = numNodes++;
int rightChild = numNodes++;
nodes.ensureCapacity(nodeIndex + 1);
for (int i = nodes.size(); i < nodeIndex + 1; i++) {
nodes.add(null);
}
TreeNode.Builder node = TreeNode.builder(nodeIndex)
.setDefaultLeft(isDefaultLeft)
.setLeftChild(leftChild)
.setRightChild(rightChild)
.setSplitFeature(featureIndex)
.setThreshold(decisionThreshold);
nodes.set(nodeIndex, node);
// allocate space for the child nodes
while (nodes.size() <= rightChild) {
nodes.add(null);
}
return node;
}
/**
* Sets the node at {@code nodeIndex} to a leaf node.
* @param nodeIndex The index as allocated by a call to {@link #addJunction(int, int, boolean, double)}
* @param value The prediction value
* @return this
*/
public Builder addLeaf(int nodeIndex, double value) {
for (int i = nodes.size(); i < nodeIndex + 1; i++) {
nodes.add(null);
}
nodes.set(nodeIndex, TreeNode.builder(nodeIndex).setLeafValue(Collections.singletonList(value)));
return this;
}
public Tree build() {
return new Tree(featureNames,
nodes.stream().map(TreeNode.Builder::build).collect(Collectors.toList()),
targetType,
classificationLabels);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.lucene;
import static org.apache.geode.cache.lucene.test.LuceneTestUtilities.*;
import static org.junit.Assert.*;
import java.io.Serializable;
import java.util.Iterator;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.geode.internal.cache.CachedDeserializable;
import org.apache.geode.internal.cache.EntrySnapshot;
import org.apache.geode.internal.cache.RegionEntry;
import org.awaitility.Awaitility;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.ExpirationAction;
import org.apache.geode.cache.ExpirationAttributes;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionShortcut;
import org.apache.geode.cache.lucene.internal.LuceneIndexForPartitionedRegion;
import org.apache.geode.cache.lucene.internal.LuceneIndexImpl;
import org.apache.geode.cache.lucene.internal.LuceneIndexStats;
import org.apache.geode.cache.lucene.internal.filesystem.FileSystemStats;
import org.apache.geode.cache.lucene.test.LuceneTestUtilities;
import org.apache.geode.test.junit.categories.IntegrationTest;
@Category(IntegrationTest.class)
public class LuceneIndexMaintenanceIntegrationTest extends LuceneIntegrationTest {
private static int WAIT_FOR_FLUSH_TIME = 10000;
@Test
public void indexIsNotUpdatedIfTransactionHasNotCommittedYet() throws Exception {
luceneService.createIndexFactory().setFields("title", "description").create(INDEX_NAME,
REGION_NAME);
Region region = createRegion(REGION_NAME, RegionShortcut.PARTITION);
region.put("object-1", new TestObject("title 1", "hello world"));
region.put("object-2", new TestObject("title 2", "this will not match"));
region.put("object-3", new TestObject("title 3", "hello world"));
region.put("object-4", new TestObject("hello world", "hello world"));
LuceneIndex index = luceneService.getIndex(INDEX_NAME, REGION_NAME);
luceneService.waitUntilFlushed(INDEX_NAME, REGION_NAME, WAIT_FOR_FLUSH_TIME,
TimeUnit.MILLISECONDS);
LuceneQuery query = luceneService.createLuceneQueryFactory().create(INDEX_NAME, REGION_NAME,
"description:\"hello world\"", DEFAULT_FIELD);
PageableLuceneQueryResults<Integer, TestObject> results = query.findPages();
assertEquals(3, results.size());
// begin transaction
cache.getCacheTransactionManager().begin();
region.put("object-1", new TestObject("title 1", "updated"));
luceneService.waitUntilFlushed(INDEX_NAME, REGION_NAME, WAIT_FOR_FLUSH_TIME,
TimeUnit.MILLISECONDS);
assertEquals(3, query.findPages().size());
}
@Test
public void indexIsUpdatedAfterTransactionHasCommitted() throws Exception {
luceneService.createIndexFactory().setFields("title", "description").create(INDEX_NAME,
REGION_NAME);
Region region = createRegion(REGION_NAME, RegionShortcut.PARTITION);
region.put("object-1", new TestObject("title 1", "hello world"));
region.put("object-2", new TestObject("title 2", "this will not match"));
region.put("object-3", new TestObject("title 3", "hello world"));
region.put("object-4", new TestObject("hello world", "hello world"));
LuceneIndex index = luceneService.getIndex(INDEX_NAME, REGION_NAME);
luceneService.waitUntilFlushed(INDEX_NAME, REGION_NAME, WAIT_FOR_FLUSH_TIME,
TimeUnit.MILLISECONDS);
LuceneQuery query = luceneService.createLuceneQueryFactory().create(INDEX_NAME, REGION_NAME,
"description:\"hello world\"", DEFAULT_FIELD);
PageableLuceneQueryResults<Integer, TestObject> results = query.findPages();
assertEquals(3, results.size());
cache.getCacheTransactionManager().begin();
region.put("object-1", new TestObject("title 1", "updated"));
cache.getCacheTransactionManager().commit();
luceneService.waitUntilFlushed(INDEX_NAME, REGION_NAME, WAIT_FOR_FLUSH_TIME,
TimeUnit.MILLISECONDS);
assertEquals(2, query.findPages().size());
}
@Test
public void indexIsNotUpdatedAfterTransactionRollback() throws Exception {
luceneService.createIndexFactory().setFields("title", "description").create(INDEX_NAME,
REGION_NAME);
Region region = createRegion(REGION_NAME, RegionShortcut.PARTITION);
region.put("object-1", new TestObject("title 1", "hello world"));
region.put("object-2", new TestObject("title 2", "this will not match"));
region.put("object-3", new TestObject("title 3", "hello world"));
region.put("object-4", new TestObject("hello world", "hello world"));
LuceneIndex index = luceneService.getIndex(INDEX_NAME, REGION_NAME);
luceneService.waitUntilFlushed(INDEX_NAME, REGION_NAME, WAIT_FOR_FLUSH_TIME,
TimeUnit.MILLISECONDS);
LuceneQuery query = luceneService.createLuceneQueryFactory().create(INDEX_NAME, REGION_NAME,
"description:\"hello world\"", DEFAULT_FIELD);
PageableLuceneQueryResults<Integer, TestObject> results = query.findPages();
assertEquals(3, results.size());
cache.getCacheTransactionManager().begin();
region.put("object-1", new TestObject("title 1", "updated"));
cache.getCacheTransactionManager().rollback();
luceneService.waitUntilFlushed(INDEX_NAME, REGION_NAME, WAIT_FOR_FLUSH_TIME,
TimeUnit.MILLISECONDS);
assertEquals(3, query.findPages().size());
}
@Test
public void statsAreUpdatedAfterACommit() throws Exception {
luceneService.createIndexFactory().setFields("title", "description").create(INDEX_NAME,
REGION_NAME);
Region region = createRegion(REGION_NAME, RegionShortcut.PARTITION);
region.put("object-1", new TestObject("title 1", "hello world"));
region.put("object-2", new TestObject("title 2", "this will not match"));
region.put("object-3", new TestObject("title 3", "hello world"));
region.put("object-4", new TestObject("hello world", "hello world"));
LuceneIndexForPartitionedRegion index =
(LuceneIndexForPartitionedRegion) luceneService.getIndex(INDEX_NAME, REGION_NAME);
luceneService.waitUntilFlushed(INDEX_NAME, REGION_NAME, WAIT_FOR_FLUSH_TIME,
TimeUnit.MILLISECONDS);
FileSystemStats fileSystemStats = index.getFileSystemStats();
LuceneIndexStats indexStats = index.getIndexStats();
await(() -> assertEquals(4, indexStats.getDocuments()));
await(() -> assertTrue(fileSystemStats.getBytes() > 0));
}
@Test
public void indexShouldBeUpdatedWithRegionExpirationDestroyOperation() throws Exception {
luceneService.createIndexFactory().setFields("title", "description").create(INDEX_NAME,
REGION_NAME);
// Configure PR with expiration operation set to destroy
Region region = cache.createRegionFactory(RegionShortcut.PARTITION)
.setEntryTimeToLive(new ExpirationAttributes(1, ExpirationAction.DESTROY))
.create(REGION_NAME);
populateRegion(region);
// Wait for expiration to destroy region entries. The region should be
// left with zero entries.
Awaitility.await().atMost(60, TimeUnit.SECONDS).until(() -> {
assertEquals(0, region.size());
});
LuceneIndex index = luceneService.getIndex(INDEX_NAME, REGION_NAME);
// Wait for events to be flushed from AEQ.
luceneService.waitUntilFlushed(INDEX_NAME, REGION_NAME, WAIT_FOR_FLUSH_TIME,
TimeUnit.MILLISECONDS);
// Execute query to fetch all the values for "description" field.
LuceneQuery query = luceneService.createLuceneQueryFactory().create(INDEX_NAME, REGION_NAME,
"description:\"hello world\"", DEFAULT_FIELD);
PageableLuceneQueryResults<Integer, TestObject> results = query.findPages();
// The query should return 0 results.
assertEquals(0, results.size());
}
@Test
public void nullValuesShouldNotCauseAnException() throws Exception {
luceneService.createIndexFactory().setFields("title", "description").create(INDEX_NAME,
REGION_NAME);
// Configure PR with expiration operation set to destroy
Region region = cache.createRegionFactory(RegionShortcut.PARTITION).create(REGION_NAME);
region.create(0, null);
region.put(113, new TestObject("hello world", "hello world"));
LuceneIndex index = luceneService.getIndex(INDEX_NAME, REGION_NAME);
// Wait for events to be flushed from AEQ.
luceneService.waitUntilFlushed(INDEX_NAME, REGION_NAME, WAIT_FOR_FLUSH_TIME,
TimeUnit.MILLISECONDS);
// Execute query to fetch all the values for "description" field.
LuceneQuery query = luceneService.createLuceneQueryFactory().create(INDEX_NAME, REGION_NAME,
"description:\"hello world\"", DEFAULT_FIELD);
PageableLuceneQueryResults<Integer, TestObject> results = query.findPages();
assertEquals(1, results.size());
}
@Test
public void entriesFlushedToIndexAfterWaitForFlushCalled() throws InterruptedException {
luceneService.createIndexFactory().setFields("title", "description").create(INDEX_NAME,
REGION_NAME);
Region region = createRegion(REGION_NAME, RegionShortcut.PARTITION);
LuceneTestUtilities.pauseSender(cache);
region.put("object-1", new TestObject("title 1", "hello world"));
region.put("object-2", new TestObject("title 2", "this will not match"));
region.put("object-3", new TestObject("title 3", "hello world"));
region.put("object-4", new TestObject("hello world", "hello world"));
LuceneIndexImpl index = (LuceneIndexImpl) luceneService.getIndex(INDEX_NAME, REGION_NAME);
assertFalse(
luceneService.waitUntilFlushed(INDEX_NAME, REGION_NAME, 500, TimeUnit.MILLISECONDS));
LuceneTestUtilities.resumeSender(cache);
assertTrue(luceneService.waitUntilFlushed(INDEX_NAME, REGION_NAME, WAIT_FOR_FLUSH_TIME,
TimeUnit.MILLISECONDS));
assertEquals(4, index.getIndexStats().getCommits());
}
@Test
public void entriesKeptInSerializedFormInDataRegion() throws InterruptedException {
// Create index and region
luceneService.createIndexFactory().setFields("title", "description").create(INDEX_NAME,
REGION_NAME);
Region region = createRegion(REGION_NAME, RegionShortcut.PARTITION);
// Pause sender
LuceneTestUtilities.pauseSender(cache);
// Do puts
populateRegion(region);
// Verify values are in serialized form
verifySerializedValues(region);
// Resume sender and wait for flushed
LuceneTestUtilities.resumeSender(cache);
assertTrue(luceneService.waitUntilFlushed(INDEX_NAME, REGION_NAME, WAIT_FOR_FLUSH_TIME,
TimeUnit.MILLISECONDS));
// Verify values are still in serialized form
verifySerializedValues(region);
}
private void populateRegion(Region region) {
region.put("object-1", new TestObject("title 1", "hello world"));
region.put("object-2", new TestObject("title 2", "this will not match"));
region.put("object-3", new TestObject("title 3", "hello world"));
region.put("object-4", new TestObject("hello world", "hello world"));
}
private void verifySerializedValues(Region region) {
Set entries = region.entrySet();
assertFalse(entries.isEmpty());
for (Iterator i = entries.iterator(); i.hasNext();) {
EntrySnapshot entry = (EntrySnapshot) i.next();
RegionEntry re = entry.getRegionEntry();
Object reValue = re.getValue(null);
assertTrue(reValue instanceof CachedDeserializable);
Object cdValue = ((CachedDeserializable) reValue).getValue();
assertTrue(cdValue instanceof byte[]);
}
}
private void await(Runnable runnable) {
Awaitility.await().atMost(30, TimeUnit.SECONDS).until(runnable);
}
private static class TestObject implements Serializable {
String title;
String description;
public TestObject(String title, String description) {
this.title = title;
this.description = description;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sling.scripting.jsp.jasper.compiler;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import org.apache.sling.scripting.jsp.jasper.JasperException;
import org.apache.sling.scripting.jsp.jasper.JspCompilationContext;
import org.apache.sling.scripting.jsp.jasper.Options;
import org.apache.sling.scripting.jsp.jasper.compiler.Node.CustomTag;
/**
* Main JSP compiler class. This class uses Ant for compiling.
*
* @author Anil K. Vijendran
* @author Mandar Raje
* @author Pierre Delisle
* @author Kin-man Chung
* @author Remy Maucherat
* @author Mark Roth
*/
public abstract class Compiler {
protected org.apache.juli.logging.Log log = org.apache.juli.logging.LogFactory
.getLog(Compiler.class);
// ----------------------------------------------------- Instance Variables
protected JspCompilationContext ctxt;
protected ErrorDispatcher errDispatcher;
protected PageInfo pageInfo;
protected TagFileProcessor tfp;
protected Options options;
protected Node.Nodes pageNodes;
private final boolean defaultIsSession;
// ------------------------------------------------------------ Constructor
public Compiler(boolean defaultIsSession) {
this.defaultIsSession = defaultIsSession;
}
public void init(final JspCompilationContext ctxt) {
this.ctxt = ctxt;
this.options = ctxt.getOptions();
}
// --------------------------------------------------------- Public Methods
/**
* <p>
* Retrieves the parsed nodes of the JSP page, if they are available. May
* return null. Used in development mode for generating detailed error
* messages. http://issues.apache.org/bugzilla/show_bug.cgi?id=37062.
* </p>
*/
public Node.Nodes getPageNodes() {
return this.pageNodes;
}
/**
* Compile the jsp file into equivalent servlet in .java file
*
* @return a smap for the current JSP page, if one is generated, null
* otherwise
*/
protected String[] generateJava() throws Exception {
String[] smapStr = null;
long t1, t2, t3, t4;
t1 = t2 = t3 = t4 = 0;
if (log.isDebugEnabled()) {
t1 = System.currentTimeMillis();
}
// Setup page info area
pageInfo = new PageInfo(new BeanRepository(ctxt.getClassLoader(),
errDispatcher), ctxt.getJspFile(), defaultIsSession);
JspConfig jspConfig = options.getJspConfig();
JspConfig.JspProperty jspProperty = jspConfig.findJspProperty(ctxt
.getJspFile());
/*
* If the current uri is matched by a pattern specified in a
* jsp-property-group in web.xml, initialize pageInfo with those
* properties.
*/
if (jspProperty.isELIgnored() != null) {
pageInfo.setELIgnored(JspUtil.booleanValue(jspProperty
.isELIgnored()));
}
if (jspProperty.isScriptingInvalid() != null) {
pageInfo.setScriptingInvalid(JspUtil.booleanValue(jspProperty
.isScriptingInvalid()));
}
if (jspProperty.getIncludePrelude() != null) {
pageInfo.setIncludePrelude(jspProperty.getIncludePrelude());
}
if (jspProperty.getIncludeCoda() != null) {
pageInfo.setIncludeCoda(jspProperty.getIncludeCoda());
}
if (jspProperty.isDeferedSyntaxAllowedAsLiteral() != null) {
pageInfo.setDeferredSyntaxAllowedAsLiteral(JspUtil.booleanValue(jspProperty
.isDeferedSyntaxAllowedAsLiteral()));
}
if (jspProperty.isTrimDirectiveWhitespaces() != null) {
pageInfo.setTrimDirectiveWhitespaces(JspUtil.booleanValue(jspProperty
.isTrimDirectiveWhitespaces()));
}
ctxt.checkOutputDir();
String javaFileName = ctxt.getServletJavaFileName();
ServletWriter writer = null;
try {
// Setup the ServletWriter
String javaEncoding = ctxt.getOptions().getJavaEncoding();
OutputStreamWriter osw = null;
try {
osw = new OutputStreamWriter(
ctxt.getOutputStream(javaFileName), javaEncoding);
} catch (UnsupportedEncodingException ex) {
errDispatcher.jspError("jsp.error.needAlternateJavaEncoding",
javaEncoding);
} catch (IOException ioe) {
throw (IOException)new FileNotFoundException(ioe.getMessage()).initCause(ioe);
}
writer = new ServletWriter(new PrintWriter(osw));
ctxt.setWriter(writer);
// Reset the temporary variable counter for the generator.
JspUtil.resetTemporaryVariableName();
// Parse the file
ParserController parserCtl = new ParserController(ctxt, this);
pageNodes = parserCtl.parse(ctxt.getJspFile());
if (ctxt.isPrototypeMode()) {
// generate prototype .java file for the tag file
Generator.generate(writer, this, pageNodes);
writer.close();
writer = null;
return null;
}
// Validate and process attributes
Validator.validate(this, pageNodes);
if (log.isDebugEnabled()) {
t2 = System.currentTimeMillis();
}
// Collect page info
Collector.collect(this, pageNodes);
// Compile (if necessary) and load the tag files referenced in
// this compilation unit.
tfp = new TagFileProcessor();
tfp.loadTagFiles(this, pageNodes);
if (log.isDebugEnabled()) {
t3 = System.currentTimeMillis();
}
// Determine which custom tag needs to declare which scripting vars
ScriptingVariabler.set(pageNodes, errDispatcher);
// Optimizations by Tag Plugins
TagPluginManager tagPluginManager = options.getTagPluginManager();
tagPluginManager.apply(pageNodes, errDispatcher, pageInfo);
// Optimization: concatenate contiguous template texts.
TextOptimizer.concatenate(this, pageNodes);
// Generate static function mapper codes.
ELFunctionMapper.map(this, pageNodes);
// generate servlet .java file
Generator.generate(writer, this, pageNodes);
// we have to use a temporary variable in order to not
// close the writer twice if close() throws an exception
final ServletWriter w = writer;
writer = null;
w.close();
// The writer is only used during the compile, dereference
// it in the JspCompilationContext when done to allow it
// to be GC'd and save memory.
ctxt.setWriter(null);
if (log.isDebugEnabled()) {
t4 = System.currentTimeMillis();
log.debug("Generated " + javaFileName + " total=" + (t4 - t1)
+ " generate=" + (t4 - t3) + " validate=" + (t2 - t1));
}
} catch (Exception e) {
if (writer != null) {
try {
writer.close();
writer = null;
} catch (Exception e1) {
// do nothing
}
}
// Remove the generated .java file
ctxt.delete(javaFileName);
throw e;
} finally {
if (writer != null) {
try {
writer.close();
} catch (Exception e2) {
// do nothing
}
}
}
// JSR45 Support
if (!options.isSmapSuppressed()) {
smapStr = SmapUtil.generateSmap(ctxt, pageNodes);
}
// If any proto type .java and .class files was generated,
// the prototype .java may have been replaced by the current
// compilation (if the tag file is self referencing), but the
// .class file need to be removed, to make sure that javac would
// generate .class again from the new .java file just generated.
tfp.removeProtoTypeFiles(ctxt.getClassFileName());
return smapStr;
}
/**
* Compile the servlet from .java file to .class file
*/
protected abstract void generateClass(String[] smap)
throws FileNotFoundException, JasperException, Exception;
/**
* Compile the jsp file from the current engine context
*/
public void compile() throws FileNotFoundException, JasperException,
Exception {
compile(true);
}
/**
* Compile the jsp file from the current engine context. As an side- effect,
* tag files that are referenced by this page are also compiled.
*
* @param compileClass
* If true, generate both .java and .class file If false,
* generate only .java file
*/
public void compile(boolean compileClass) throws FileNotFoundException,
JasperException, Exception {
compile(compileClass, false);
}
/**
* Compile the jsp file from the current engine context. As an side- effect,
* tag files that are referenced by this page are also compiled.
*
* @param compileClass
* If true, generate both .java and .class file If false,
* generate only .java file
* @param jspcMode
* true if invoked from JspC, false otherwise
*/
public void compile(boolean compileClass, boolean jspcMode)
throws FileNotFoundException, JasperException, Exception {
if (errDispatcher == null) {
this.errDispatcher = new ErrorDispatcher(jspcMode);
}
try {
String[] smap = generateJava();
if (compileClass) {
generateClass(smap);
}
} finally {
if (tfp != null) {
tfp.removeProtoTypeFiles(null);
}
// Make sure these object which are only used during the
// generation and compilation of the JSP page get
// dereferenced so that they can be GC'd and reduce the
// memory footprint.
tfp = null;
errDispatcher = null;
pageInfo = null;
// Only get rid of the pageNodes if in production.
// In development mode, they are used for detailed
// error messages.
// http://issues.apache.org/bugzilla/show_bug.cgi?id=37062
//if (!this.options.getDevelopment()) {
// pageNodes = null;
//}
if (ctxt.getWriter() != null) {
ctxt.getWriter().close();
ctxt.setWriter(null);
}
}
}
/**
* Gets the error dispatcher.
*/
public ErrorDispatcher getErrorDispatcher() {
return errDispatcher;
}
/**
* Gets the info about the page under compilation
*/
public PageInfo getPageInfo() {
return pageInfo;
}
public JspCompilationContext getCompilationContext() {
return ctxt;
}
/**
* Remove generated files
*/
public void removeGeneratedFiles() {
this.removeGeneratedClassFiles();
try {
String javaFileName = ctxt.getServletJavaFileName();
if (javaFileName != null) {
if (log.isDebugEnabled()) {
log.debug("Deleting " + javaFileName);
}
ctxt.delete(javaFileName);
}
} catch (Exception e) {
// Remove as much as possible, ignore possible exceptions
}
}
public void removeGeneratedClassFiles() {
try {
String classFileName = ctxt.getClassFileName();
if (classFileName != null) {
if (log.isDebugEnabled()) {
log.debug("Deleting " + classFileName);
}
ctxt.delete(classFileName);
}
} catch (Exception e) {
// Remove as much as possible, ignore possible exceptions
}
}
public void clean() {
if ( this.pageNodes != null ) {
try {
pageNodes.visit(new CleanVisitor());
} catch ( final JasperException ignore) {
// ignore
}
}
}
protected boolean getDefaultIsSession() {
return defaultIsSession;
}
private static final class CleanVisitor extends Node.Visitor {
public void visit(final CustomTag n) throws JasperException {
n.clean();
visitBody(n);
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.mixedreality.remoterendering;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceClient;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.exception.HttpResponseException;
import com.azure.core.http.HttpHeaders;
import com.azure.core.http.HttpRequest;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedResponseBase;
import com.azure.core.http.rest.Response;
import com.azure.core.util.Context;
import com.azure.core.util.logging.ClientLogger;
import com.azure.core.util.polling.LongRunningOperationStatus;
import com.azure.core.util.polling.PollResponse;
import com.azure.mixedreality.remoterendering.implementation.MixedRealityRemoteRenderingImpl;
import com.azure.mixedreality.remoterendering.implementation.models.CreateConversionSettings;
import com.azure.mixedreality.remoterendering.implementation.models.SessionProperties;
import com.azure.mixedreality.remoterendering.implementation.models.ConversionSettings;
import com.azure.mixedreality.remoterendering.implementation.models.ConversionInputSettings;
import com.azure.mixedreality.remoterendering.implementation.models.ConversionOutputSettings;
import com.azure.mixedreality.remoterendering.implementation.models.UpdateSessionSettings;
import com.azure.mixedreality.remoterendering.implementation.models.CreateSessionSettings;
import com.azure.mixedreality.remoterendering.models.AssetConversion;
import com.azure.mixedreality.remoterendering.models.AssetConversionStatus;
import com.azure.mixedreality.remoterendering.models.BeginSessionOptions;
import com.azure.mixedreality.remoterendering.models.AssetConversionOptions;
import com.azure.mixedreality.remoterendering.models.RemoteRenderingServiceError;
import com.azure.mixedreality.remoterendering.models.RenderingSession;
import com.azure.mixedreality.remoterendering.models.RenderingSessionSize;
import com.azure.mixedreality.remoterendering.models.RenderingSessionStatus;
import com.azure.mixedreality.remoterendering.models.UpdateSessionOptions;
import reactor.core.publisher.Mono;
import com.azure.core.util.polling.PollerFlux;
import java.time.Duration;
import java.util.Objects;
import java.util.Optional;
import java.util.UUID;
import java.util.stream.Collectors;
import static com.azure.core.util.FluxUtil.withContext;
import static com.azure.core.util.FluxUtil.monoError;
/** A builder for creating a new instance of the MixedRealityRemoteRendering type. */
@ServiceClient(builder = RemoteRenderingClientBuilder.class, isAsync = true)
public final class RemoteRenderingAsyncClient {
private static final Duration CONVERSION_POLLING_INTERVAL = Duration.ofSeconds(10);
private static final Duration STANDARD_SESSION_POLLING_INTERVAL = Duration.ofSeconds(2);
private static final Duration DEFAULT_SESSION_POLLING_INTERVAL = Duration.ofSeconds(10);
private final ClientLogger logger = new ClientLogger(RemoteRenderingAsyncClient.class);
private final UUID accountId;
private final MixedRealityRemoteRenderingImpl impl;
RemoteRenderingAsyncClient(MixedRealityRemoteRenderingImpl impl, UUID accountId) {
this.accountId = accountId;
this.impl = impl;
}
/**
* Creates a new rendering session.
*
* @param sessionId An ID uniquely identifying the rendering session for the given account. The ID is case
* sensitive, can contain any combination of alphanumeric characters including hyphens and underscores, and
* cannot contain more than 256 characters.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the rendering session.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
public PollerFlux<RenderingSession, RenderingSession> beginSession(String sessionId) {
return beginSession(sessionId, new BeginSessionOptions());
}
/**
* Creates a new rendering session.
*
* @param sessionId An ID uniquely identifying the rendering session for the given account. The ID is case
* sensitive, can contain any combination of alphanumeric characters including hyphens and underscores, and
* cannot contain more than 256 characters.
* @param options Settings for the session to be created.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the rendering session.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
public PollerFlux<RenderingSession, RenderingSession> beginSession(String sessionId, BeginSessionOptions options) {
return beginSessionInternal(sessionId, options, Context.NONE);
}
PollerFlux<RenderingSession, RenderingSession> beginSessionInternal(String sessionId, BeginSessionOptions options, Context context) {
Objects.requireNonNull(sessionId, "'sessionId' cannot be null.");
Objects.requireNonNull(options, "'options' cannot be null.");
Objects.requireNonNull(context, "'context' cannot be null.");
if (sessionId.isEmpty()) {
throw logger.logExceptionAsError(new IllegalArgumentException("'sessionId' cannot be an empty string."));
}
return new PollerFlux<>(
(options.getSize() == RenderingSessionSize.STANDARD) ? STANDARD_SESSION_POLLING_INTERVAL : DEFAULT_SESSION_POLLING_INTERVAL,
pollingContext -> impl.getRemoteRenderings().createSessionWithResponseAsync(accountId, sessionId, ModelTranslator.toGenerated(options), context).map(r -> ModelTranslator.fromGenerated(r.getValue())),
pollingContext -> {
Mono<RenderingSession> response = impl.getRemoteRenderings().getSessionWithResponseAsync(accountId, sessionId, context).map(r -> ModelTranslator.fromGenerated(r.getValue()));
return response.map(session -> {
final RenderingSessionStatus sessionStatus = session.getStatus();
LongRunningOperationStatus lroStatus;
if (sessionStatus == RenderingSessionStatus.STARTING) {
lroStatus = LongRunningOperationStatus.IN_PROGRESS;
} else if (sessionStatus == RenderingSessionStatus.ERROR) {
lroStatus = LongRunningOperationStatus.FAILED;
} else if (sessionStatus == RenderingSessionStatus.READY) {
lroStatus = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
} else if (sessionStatus == RenderingSessionStatus.STOPPED) {
lroStatus = LongRunningOperationStatus.USER_CANCELLED;
} else {
lroStatus = LongRunningOperationStatus.FAILED;
}
return new PollResponse<>(lroStatus, session);
});
},
(pollingContext, pollResponse) -> impl.getRemoteRenderings().stopSessionWithResponseAsync(accountId, sessionId, context).then(Mono.just(pollingContext.getLatestResponse().getValue())),
pollingContext -> Mono.just(pollingContext.getLatestResponse().getValue())
);
}
/**
* Gets properties of a particular rendering session.
*
* @param sessionId An ID uniquely identifying the rendering session for the given account. The ID is case
* sensitive, can contain any combination of alphanumeric characters including hyphens and underscores, and
* cannot contain more than 256 characters.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the rendering session.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<RenderingSession> getSession(String sessionId) {
return getSessionWithResponse(sessionId).map(Response::getValue);
}
/**
* Gets properties of a particular rendering session.
*
* @param sessionId An ID uniquely identifying the rendering session for the given account. The ID is case
* sensitive, can contain any combination of alphanumeric characters including hyphens and underscores, and
* cannot contain more than 256 characters.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the rendering session.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<RenderingSession>> getSessionWithResponse(String sessionId) {
try {
return withContext(context -> getSessionInternal(sessionId, context));
} catch (RuntimeException exception) {
return monoError(this.logger, exception);
}
}
Mono<Response<RenderingSession>> getSessionInternal(String sessionId, Context context) {
Objects.requireNonNull(sessionId, "'sessionId' cannot be null.");
Objects.requireNonNull(context, "'context' cannot be null.");
if (sessionId.isEmpty()) {
throw logger.logExceptionAsError(new IllegalArgumentException("'sessionId' cannot be an empty string."));
}
return impl.getRemoteRenderings().getSessionWithResponseAsync(accountId, sessionId, context).map(ModelTranslator::fromGenerated);
}
/**
* Updates a particular rendering session.
*
* @param sessionId An ID uniquely identifying the rendering session for the given account. The ID is case
* sensitive, can contain any combination of alphanumeric characters including hyphens and underscores, and
* cannot contain more than 256 characters.
* @param options Options for the session to be updated.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the rendering session.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<RenderingSession> updateSession(String sessionId, UpdateSessionOptions options) {
return updateSessionWithResponse(sessionId, options).map(Response::getValue);
}
/**
* Updates a particular rendering session.
*
* @param sessionId An ID uniquely identifying the rendering session for the given account. The ID is case
* sensitive, can contain any combination of alphanumeric characters including hyphens and underscores, and
* cannot contain more than 256 characters.
* @param options Options for the session to be updated.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the rendering session.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<RenderingSession>> updateSessionWithResponse(String sessionId, UpdateSessionOptions options) {
try {
return withContext(context -> updateSessionInternal(sessionId, options, context));
} catch (RuntimeException exception) {
return monoError(this.logger, exception);
}
}
Mono<Response<RenderingSession>> updateSessionInternal(String sessionId, UpdateSessionOptions options, Context context) {
Objects.requireNonNull(sessionId, "'sessionId' cannot be null.");
Objects.requireNonNull(options, "'options' cannot be null.");
Objects.requireNonNull(context, "'context' cannot be null.");
if (sessionId.isEmpty()) {
throw logger.logExceptionAsError(new IllegalArgumentException("'sessionId' cannot be an empty string."));
}
return impl.getRemoteRenderings().updateSessionWithResponseAsync(accountId, sessionId, ModelTranslator.toGenerated(options), context).map(ModelTranslator::fromGenerated);
}
/**
* Stops a particular rendering session.
*
* @param sessionId An ID uniquely identifying the rendering session for the given account. The ID is case
* sensitive, can contain any combination of alphanumeric characters including hyphens and underscores, and
* cannot contain more than 256 characters.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return nothing on completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Void> endSession(String sessionId) {
return endSessionWithResponse(sessionId).then();
}
/**
* Stops a particular rendering session.
*
* @param sessionId An ID uniquely identifying the rendering session for the given account. The ID is case
* sensitive, can contain any combination of alphanumeric characters including hyphens and underscores, and
* cannot contain more than 256 characters.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return nothing on completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<Void>> endSessionWithResponse(String sessionId) {
try {
return withContext(context -> endSessionInternal(sessionId, context));
} catch (RuntimeException exception) {
return monoError(this.logger, exception);
}
}
Mono<Response<Void>> endSessionInternal(String sessionId, Context context) {
Objects.requireNonNull(sessionId, "'sessionId' cannot be null.");
Objects.requireNonNull(context, "'context' cannot be null.");
if (sessionId.isEmpty()) {
throw logger.logExceptionAsError(new IllegalArgumentException("'sessionId' cannot be an empty string."));
}
return impl.getRemoteRenderings().stopSessionWithResponseAsync(accountId, sessionId, context).map(r -> r);
}
/**
* Get a list of all rendering sessions.
*
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a list of all rendering sessions.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<RenderingSession> listSessions() {
return listSessionsInternal(Context.NONE);
}
PagedFlux<RenderingSession> listSessionsInternal(Context context) {
Objects.requireNonNull(context, "'context' cannot be null.");
return new PagedFlux<>(
() -> impl.getRemoteRenderings().listSessionsSinglePageAsync(accountId, context).map(p ->
new PagedResponseBase<HttpRequest, RenderingSession>(p.getRequest(),
p.getStatusCode(),
p.getHeaders(),
p.getValue().stream().map(ModelTranslator::fromGenerated).collect(Collectors.toList()),
p.getContinuationToken(),
null)),
continuationToken -> impl.getRemoteRenderings().listSessionsNextSinglePageAsync(continuationToken, context).map(p ->
new PagedResponseBase<HttpRequest, RenderingSession>(p.getRequest(),
p.getStatusCode(),
p.getHeaders(),
p.getValue().stream().map(ModelTranslator::fromGenerated).collect(Collectors.toList()),
p.getContinuationToken(),
null)));
}
/**
* Starts a conversion using an asset stored in an Azure Blob Storage account. If the remote rendering account has
* been linked with the storage account no Shared Access Signatures (storageContainerReadListSas,
* storageContainerWriteSas) for storage access need to be provided. Documentation how to link your Azure Remote
* Rendering account with the Azure Blob Storage account can be found in the
* [documentation](https://docs.microsoft.com/azure/remote-rendering/how-tos/create-an-account#link-storage-accounts).
*
* <p>All files in the input container starting with the blobPrefix will be retrieved to perform the conversion. To
* cut down on conversion times only necessary files should be available under the blobPrefix.
*
* @param conversionId An ID uniquely identifying the conversion for the given account. The ID is case sensitive,
* can contain any combination of alphanumeric characters including hyphens and underscores, and cannot contain
* more than 256 characters.
* @param options The conversion options.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the conversion.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
public PollerFlux<AssetConversion, AssetConversion> beginConversion(String conversionId, AssetConversionOptions options) {
return beginConversionInternal(conversionId, options, Context.NONE);
}
PollerFlux<AssetConversion, AssetConversion> beginConversionInternal(String conversionId, AssetConversionOptions options, Context context) {
Objects.requireNonNull(context, "'context' cannot be null.");
return new PollerFlux<>(
CONVERSION_POLLING_INTERVAL,
pollingContext -> impl.getRemoteRenderings().createConversionWithResponseAsync(accountId, conversionId, new CreateConversionSettings(ModelTranslator.toGenerated(options)), context).map(c -> ModelTranslator.fromGenerated(c.getValue())),
pollingContext -> {
Mono<AssetConversion> response = impl.getRemoteRenderings().getConversionWithResponseAsync(accountId, conversionId, context).map(c -> ModelTranslator.fromGenerated(c.getValue()));
return response.map(conversion -> {
final AssetConversionStatus convStatus = conversion.getStatus();
LongRunningOperationStatus lroStatus;
if ((convStatus == AssetConversionStatus.RUNNING) || (convStatus == AssetConversionStatus.NOT_STARTED)) {
lroStatus = LongRunningOperationStatus.IN_PROGRESS;
} else if (convStatus == AssetConversionStatus.FAILED) {
lroStatus = LongRunningOperationStatus.FAILED;
} else if (convStatus == AssetConversionStatus.SUCCEEDED) {
lroStatus = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED;
} else if (convStatus == AssetConversionStatus.CANCELLED) {
lroStatus = LongRunningOperationStatus.USER_CANCELLED;
} else {
lroStatus = LongRunningOperationStatus.FAILED;
}
return new PollResponse<>(lroStatus, conversion);
});
},
(pollingContext, pollResponse) -> Mono.error(new RuntimeException("Cancellation is not supported.")),
pollingContext -> Mono.just(pollingContext.getLatestResponse().getValue())
);
}
/**
* Gets the status of a previously created asset conversion.
*
* @param conversionId An ID uniquely identifying the conversion for the given account. The ID is case sensitive,
* can contain any combination of alphanumeric characters including hyphens and underscores, and cannot contain
* more than 256 characters.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the conversion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<AssetConversion> getConversion(String conversionId) {
return getConversionWithResponse(conversionId).map(Response::getValue);
}
/**
* Gets the status of a previously created asset conversion.
*
* @param conversionId An ID uniquely identifying the conversion for the given account. The ID is case sensitive,
* can contain any combination of alphanumeric characters including hyphens and underscores, and cannot contain
* more than 256 characters.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the conversion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<AssetConversion>> getConversionWithResponse(String conversionId) {
try {
return withContext(context -> getConversionInternal(conversionId, context));
} catch (RuntimeException exception) {
return monoError(this.logger, exception);
}
}
Mono<Response<AssetConversion>> getConversionInternal(String conversionId, Context context) {
Objects.requireNonNull(conversionId, "'conversionId' cannot be null.");
Objects.requireNonNull(context, "'context' cannot be null.");
if (conversionId.isEmpty()) {
throw logger.logExceptionAsError(new IllegalArgumentException("'conversionId' cannot be an empty string."));
}
return impl.getRemoteRenderings().getConversionWithResponseAsync(accountId, conversionId, context).map(ModelTranslator::fromGenerated);
}
/**
* Gets a list of all conversions.
*
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a list of all conversions.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedFlux<AssetConversion> listConversions() {
return listConversionsInternal(Context.NONE);
}
PagedFlux<AssetConversion> listConversionsInternal(Context context) {
Objects.requireNonNull(context, "'context' cannot be null.");
return new PagedFlux<>(
() -> impl.getRemoteRenderings().listConversionsSinglePageAsync(accountId, context).map(p ->
new PagedResponseBase<HttpRequest, AssetConversion>(p.getRequest(),
p.getStatusCode(),
p.getHeaders(),
p.getValue().stream().map(ModelTranslator::fromGenerated).collect(Collectors.toList()),
p.getContinuationToken(),
null)),
continuationToken -> impl.getRemoteRenderings().listConversionsNextSinglePageAsync(continuationToken, context).map(p ->
new PagedResponseBase<HttpRequest, AssetConversion>(p.getRequest(),
p.getStatusCode(),
p.getHeaders(),
p.getValue().stream().map(ModelTranslator::fromGenerated).collect(Collectors.toList()),
p.getContinuationToken(),
null)));
}
private static class ModelTranslator {
private static <T, Y> Response<T> fromGenerated(Response<Y> response) {
if (response == null) {
return null;
}
return new Response<T>() {
private final T value = fromGeneratedGeneric(response.getValue());
@Override
public int getStatusCode() {
return response.getStatusCode();
}
@Override
public HttpHeaders getHeaders() {
return response.getHeaders();
}
@Override
public HttpRequest getRequest() {
return response.getRequest();
}
@Override
public T getValue() {
return this.value;
}
};
}
@SuppressWarnings("unchecked")
private static <T, Y> T fromGeneratedGeneric(Y value) {
if (value == null) {
return null;
} else if (value instanceof com.azure.mixedreality.remoterendering.implementation.models.Conversion) {
return (T) fromGenerated((com.azure.mixedreality.remoterendering.implementation.models.Conversion) value);
} else if (value instanceof SessionProperties) {
return (T) fromGenerated((SessionProperties) value);
} else if (value instanceof com.azure.mixedreality.remoterendering.implementation.models.Error) {
return (T) fromGenerated((com.azure.mixedreality.remoterendering.implementation.models.Error) value);
} else if (value instanceof com.azure.mixedreality.remoterendering.implementation.models.ConversionSettings) {
return (T) fromGenerated((com.azure.mixedreality.remoterendering.implementation.models.ConversionSettings) value);
} else {
// throw?
return null;
}
}
private static AssetConversion fromGenerated(com.azure.mixedreality.remoterendering.implementation.models.Conversion conversion) {
if (conversion == null) {
return null;
}
return new AssetConversion(
conversion.getId(),
fromGenerated(conversion.getSettings()),
conversion.getOutput() != null ? conversion.getOutput().getOutputAssetUri() : null,
fromGenerated(conversion.getError()),
AssetConversionStatus.fromString(conversion.getStatus().toString()),
conversion.getCreationTime());
}
private static RenderingSession fromGenerated(SessionProperties sessionProperties) {
if (sessionProperties == null) {
return null;
}
return new RenderingSession(
sessionProperties.getId(),
Optional.ofNullable(sessionProperties.getArrInspectorPort()).orElse(0),
Optional.ofNullable(sessionProperties.getHandshakePort()).orElse(0),
Duration.ofMinutes(Optional.ofNullable(sessionProperties.getElapsedTimeMinutes()).orElse(0)),
sessionProperties.getHostname(),
Duration.ofMinutes(Optional.ofNullable(sessionProperties.getMaxLeaseTimeMinutes()).orElse(0)),
RenderingSessionSize.fromString(sessionProperties.getSize().toString()),
RenderingSessionStatus.fromString(sessionProperties.getStatus().toString()),
Optional.ofNullable(sessionProperties.getTeraflops()).orElse(0.0f),
fromGenerated(sessionProperties.getError()),
sessionProperties.getCreationTime());
}
private static RemoteRenderingServiceError fromGenerated(com.azure.mixedreality.remoterendering.implementation.models.Error error) {
if (error == null) {
return null;
}
return new RemoteRenderingServiceError(
error.getCode(),
error.getMessage(),
error.getTarget(),
fromGenerated(error.getInnerError()),
(error.getDetails() != null) ? error.getDetails().stream().map(ModelTranslator::fromGenerated).collect(Collectors.toList()) : null);
}
private static AssetConversionOptions fromGenerated(com.azure.mixedreality.remoterendering.implementation.models.ConversionSettings settings) {
if (settings == null) {
return null;
}
return new AssetConversionOptions()
.setInputBlobPrefix(settings.getInputLocation().getBlobPrefix())
.setInputRelativeAssetPath(settings.getInputLocation().getRelativeInputAssetPath())
.setInputStorageContainerReadListSas(settings.getInputLocation().getStorageContainerReadListSas())
.setInputStorageContainerUrl(settings.getInputLocation().getStorageContainerUri())
.setOutputAssetFilename(settings.getOutputLocation().getOutputAssetFilename())
.setOutputBlobPrefix(settings.getOutputLocation().getBlobPrefix())
.setOutputStorageContainerUrl(settings.getOutputLocation().getStorageContainerUri())
.setOutputStorageContainerWriteSas(settings.getOutputLocation().getStorageContainerWriteSas());
}
private static ConversionSettings toGenerated(AssetConversionOptions conversionOptions) {
if (conversionOptions == null) {
return null;
}
return new ConversionSettings(
new ConversionInputSettings(
conversionOptions.getInputStorageContainerUrl(),
conversionOptions.getInputRelativeAssetPath())
.setStorageContainerReadListSas(conversionOptions.getInputStorageContainerReadListSas())
.setBlobPrefix(conversionOptions.getInputBlobPrefix()),
new ConversionOutputSettings(conversionOptions.getOutputStorageContainerUrl())
.setStorageContainerWriteSas(conversionOptions.getOutputStorageContainerWriteSas())
.setBlobPrefix(conversionOptions.getOutputBlobPrefix())
.setOutputAssetFilename(conversionOptions.getOutputAssetFilename())
);
}
private static UpdateSessionSettings toGenerated(UpdateSessionOptions options) {
if (options == null) {
return null;
}
return new UpdateSessionSettings((int) options.getMaxLeaseTime().toMinutes());
}
private static CreateSessionSettings toGenerated(BeginSessionOptions options) {
if (options == null) {
return null;
}
return new CreateSessionSettings((int) options.getMaxLeaseTime().toMinutes(), com.azure.mixedreality.remoterendering.implementation.models.SessionSize.fromString(options.getSize().toString()));
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lens.ml;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import lombok.Getter;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
/**
* Table specification for running test on a table.
*/
public class TableTestingSpec {
/** The Constant LOG. */
public static final Log LOG = LogFactory.getLog(TableTestingSpec.class);
/** The db. */
private String db;
/** The table containing input data. */
private String inputTable;
// TODO use partition condition
/** The partition filter. */
private String partitionFilter;
/** The feature columns. */
private List<String> featureColumns;
/** The label column. */
private String labelColumn;
/** The output column. */
private String outputColumn;
/** The output table. */
private String outputTable;
/** The conf. */
private transient HiveConf conf;
/** The algorithm. */
private String algorithm;
/** The model id. */
private String modelID;
@Getter
private boolean outputTableExists;
public String testID;
private HashMap<String, FieldSchema> columnNameToFieldSchema;
/**
* The Class TableTestingSpecBuilder.
*/
public static class TableTestingSpecBuilder {
/** The spec. */
private final TableTestingSpec spec;
/**
* Instantiates a new table testing spec builder.
*/
public TableTestingSpecBuilder() {
spec = new TableTestingSpec();
}
/**
* Database.
*
* @param database
* the database
* @return the table testing spec builder
*/
public TableTestingSpecBuilder database(String database) {
spec.db = database;
return this;
}
/**
* Set the input table
*
* @param table
* the table
* @return the table testing spec builder
*/
public TableTestingSpecBuilder inputTable(String table) {
spec.inputTable = table;
return this;
}
/**
* Partition filter for input table
*
* @param partFilter
* the part filter
* @return the table testing spec builder
*/
public TableTestingSpecBuilder partitionFilter(String partFilter) {
spec.partitionFilter = partFilter;
return this;
}
/**
* Feature columns.
*
* @param featureColumns
* the feature columns
* @return the table testing spec builder
*/
public TableTestingSpecBuilder featureColumns(List<String> featureColumns) {
spec.featureColumns = featureColumns;
return this;
}
/**
* Labe column.
*
* @param labelColumn
* the label column
* @return the table testing spec builder
*/
public TableTestingSpecBuilder labeColumn(String labelColumn) {
spec.labelColumn = labelColumn;
return this;
}
/**
* Output column.
*
* @param outputColumn
* the output column
* @return the table testing spec builder
*/
public TableTestingSpecBuilder outputColumn(String outputColumn) {
spec.outputColumn = outputColumn;
return this;
}
/**
* Output table.
*
* @param table
* the table
* @return the table testing spec builder
*/
public TableTestingSpecBuilder outputTable(String table) {
spec.outputTable = table;
return this;
}
/**
* Hive conf.
*
* @param conf
* the conf
* @return the table testing spec builder
*/
public TableTestingSpecBuilder hiveConf(HiveConf conf) {
spec.conf = conf;
return this;
}
/**
* Algorithm.
*
* @param algorithm
* the algorithm
* @return the table testing spec builder
*/
public TableTestingSpecBuilder algorithm(String algorithm) {
spec.algorithm = algorithm;
return this;
}
/**
* Model id.
*
* @param modelID
* the model id
* @return the table testing spec builder
*/
public TableTestingSpecBuilder modelID(String modelID) {
spec.modelID = modelID;
return this;
}
/**
* Builds the.
*
* @return the table testing spec
*/
public TableTestingSpec build() {
return spec;
}
/**
* Set the unique test id
*
* @param testID
* @return
*/
public TableTestingSpecBuilder testID(String testID) {
spec.testID = testID;
return this;
}
}
/**
* New builder.
*
* @return the table testing spec builder
*/
public static TableTestingSpecBuilder newBuilder() {
return new TableTestingSpecBuilder();
}
/**
* Validate.
*
* @return true, if successful
*/
public boolean validate() {
List<FieldSchema> columns;
try {
Hive metastoreClient = Hive.get(conf);
Table tbl = (db == null) ? metastoreClient.getTable(inputTable) : metastoreClient.getTable(db, inputTable);
columns = tbl.getAllCols();
columnNameToFieldSchema = new HashMap<String, FieldSchema>();
for (FieldSchema fieldSchema : columns) {
columnNameToFieldSchema.put(fieldSchema.getName(), fieldSchema);
}
// Check if output table exists
Table outTbl = metastoreClient.getTable(db == null ? "default" : db, outputTable, false);
outputTableExists = (outTbl != null);
} catch (HiveException exc) {
LOG.error("Error getting table info " + toString(), exc);
return false;
}
// Check if labeled column and feature columns are contained in the table
List<String> testTableColumns = new ArrayList<String>(columns.size());
for (FieldSchema column : columns) {
testTableColumns.add(column.getName());
}
if (!testTableColumns.containsAll(featureColumns)) {
LOG.info("Invalid feature columns: " + featureColumns + ". Actual columns in table:" + testTableColumns);
return false;
}
if (!testTableColumns.contains(labelColumn)) {
LOG.info("Invalid label column: " + labelColumn + ". Actual columns in table:" + testTableColumns);
return false;
}
if (StringUtils.isBlank(outputColumn)) {
LOG.info("Output column is required");
return false;
}
if (StringUtils.isBlank(outputTable)) {
LOG.info("Output table is required");
return false;
}
return true;
}
public String getTestQuery() {
if (!validate()) {
return null;
}
// We always insert a dynamic partition
StringBuilder q = new StringBuilder("INSERT OVERWRITE TABLE " + outputTable + " PARTITION (part_testid='" + testID
+ "') SELECT ");
String featureCols = StringUtils.join(featureColumns, ",");
q.append(featureCols).append(",").append(labelColumn).append(", ").append("predict(").append("'").append(algorithm)
.append("', ").append("'").append(modelID).append("', ").append(featureCols).append(") ").append(outputColumn)
.append(" FROM ").append(inputTable);
return q.toString();
}
public String getCreateOutputTableQuery() {
StringBuilder createTableQuery = new StringBuilder("CREATE TABLE IF NOT EXISTS ").append(outputTable).append("(");
// Output table contains feature columns, label column, output column
List<String> outputTableColumns = new ArrayList<String>();
for (String featureCol : featureColumns) {
outputTableColumns.add(featureCol + " " + columnNameToFieldSchema.get(featureCol).getType());
}
outputTableColumns.add(labelColumn + " " + columnNameToFieldSchema.get(labelColumn).getType());
outputTableColumns.add(outputColumn + " string");
createTableQuery.append(StringUtils.join(outputTableColumns, ", "));
// Append partition column
createTableQuery.append(") PARTITIONED BY (part_testid string)");
return createTableQuery.toString();
}
}
| |
/*
* Entwined STM
*
* (c) Copyright 2013 CERN. This software is distributed under the terms of the Apache License Version 2.0, copied
* verbatim in the file "COPYING". In applying this licence, CERN does not waive the privileges and immunities granted
* to it by virtue of its status as an Intergovernmental Organization or submit itself to any jurisdiction.
*/
package cern.entwined;
import static com.google.common.base.Predicates.in;
import static com.google.common.base.Predicates.not;
import java.util.AbstractSet;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NoSuchElementException;
import java.util.Set;
import cern.entwined.exception.ConflictException;
import com.google.common.collect.Iterators;
import com.google.common.collect.Sets;
/**
* Implementation of a transactional map. It logs all the reads and modifications of the data, and uses it at commit
* time to detect conflicting transactions.
*
* @param <K> The map key type.
* @param <V> The map value type.
* @author Ivan Koblik
*/
public class TransactionalMap<K, V> extends SemiPersistent<TransactionalMap<K, V>> implements OpaqueMap<K, V> {
/**
* The unmodifiable backbone of {@link TransactionalMap}.
*/
private final Map<K, V> sourceMap;
/**
* Map of key value pairs of all locally added or modified values.
*/
private final Map<K, V> pendingModifications = new HashMap<K, V>();
/**
* Set of keys of all the items locally removed from the map.
*/
private final Set<K> pendingDeletions = new HashSet<K>();
/**
* Set of keys of all the items that were accessed or attempted to be accessed (i.e. for non-existent keys).
*/
private final Set<K> accessed = new HashSet<K>();
/**
* This flag is used to mark the "entire world" as has been accessed. The reasoning is quite simple; if a user knows
* all the entries in the map he also knows all entries that aren't there, which may not be true in the context of
* the global map. Due to this {@link TransactionalMap} disallows commit if global state changes in any way even if
* it is just a new entry.
*/
private boolean globallyAccessed = false;
/**
* This flag is set to true the first time clear method is called.
*/
private boolean cleared = false;
/**
* Constructs a new empty {@link TransactionalMap}.
*/
@SuppressWarnings("unchecked")
public TransactionalMap() {
this(Collections.EMPTY_MAP, false);
}
/**
* Constructs new {@link TransactionalMap} initializing it with the given collection. Passed collection is copied.
*
* @param sourceMap The {@link TransactionalMap} initial state.
*/
public TransactionalMap(Map<K, V> sourceMap) {
this(sourceMap, true);
}
/**
* Constructs new {@link TransactionalMap} initializing it with the given collection.
*
* @param sourceMap The {@link TransactionalMap} initial state.
* @param cloneSource If true passed collection is copied.
*/
private TransactionalMap(Map<K, V> sourceMap, boolean cloneSource) {
Utils.checkNull("Source map", sourceMap);
if (cloneSource) {
this.sourceMap = Collections.unmodifiableMap(new HashMap<K, V>(sourceMap));
} else {
this.sourceMap = sourceMap;
}
}
@Override
public int size() {
this.markGloballyAccessed();
Set<K> keys = Sets.union(this.sourceMap.keySet(), this.pendingModifications.keySet());
return keys.size() - this.pendingDeletions.size();
}
@Override
public boolean isEmpty() {
boolean empty = (this.sourceMap.size() == this.pendingDeletions.size()) && this.pendingModifications.isEmpty();
if (empty) {
this.markGloballyAccessed();
}
return empty;
}
/*
* (non-Javadoc)
*
* @see cern.oasis.server.stm.OpaqueMap#clear()
*/
@Override
public void clear() {
this.markCleared();
this.pendingDeletions.addAll(sourceMap.keySet());
this.pendingModifications.clear();
}
/*
* (non-Javadoc)
*
* @see cern.oasis.server.stm.OpaqueMap#containsKey(java.lang.Object)
*/
@Override
public boolean containsKey(K key) {
this.markAccessed(key);
return (sourceMap.containsKey(key) || this.pendingModifications.containsKey(key))
&& !pendingDeletions.contains(key);
}
/*
* (non-Javadoc)
*
* @see cern.oasis.server.stm.OpaqueMap#get(java.lang.Object)
*/
@Override
public V get(K key) {
this.markAccessed(key);
if (this.pendingDeletions.contains(key)) {
return null;
} else if (this.pendingModifications.containsKey(key)) {
return this.pendingModifications.get(key);
}
return this.sourceMap.get(key);
}
/*
* (non-Javadoc)
*
* @see cern.oasis.server.stm.OpaqueMap#put(java.lang.Object, java.lang.Object)
*/
@Override
public V put(K key, V value) {
this.accessed.add(key);
this.pendingDeletions.remove(key);
V oldValue = this.pendingModifications.put(key, value);
return null != oldValue ? oldValue : this.sourceMap.get(key);
}
/*
* (non-Javadoc)
*
* @see cern.oasis.server.stm.OpaqueMap#putAll(java.util.Map)
*/
@Override
public void putAll(Map<? extends K, ? extends V> m) {
Utils.checkNull("Map", m);
this.accessed.addAll(m.keySet());
for (Entry<? extends K, ? extends V> entry : m.entrySet()) {
this.put(entry.getKey(), entry.getValue());
}
}
/*
* (non-Javadoc)
*
* @see cern.oasis.server.stm.OpaqueMap#remove(java.lang.Object)
*/
@Override
public V remove(K key) {
V oldValue = this.get(key); // Getting old value and marking it as accessed
if (this.sourceMap.containsKey(key)) {
this.pendingDeletions.add(key);
}
this.pendingModifications.remove(key);
return oldValue;
}
//
// Views
//
@Override
public Set<K> keySet() {
return new KeySet();
}
//
// Transactional methods
//
/*
* (non-Javadoc)
*
* @see cern.oasis.server.stm.Transactional#cleanCopy()
*/
@Override
public TransactionalMap<K, V> cleanCopy() {
return new TransactionalMap<K, V>(this.sourceMap, false);
}
/*
* (non-Javadoc)
*
* @see cern.oasis.server.stm.SemiPersistent#dirtyCopy()
*/
@Override
protected TransactionalMap<K, V> dirtyCopy() {
TransactionalMap<K, V> copy = new TransactionalMap<K, V>(this.sourceMap, false);
copy.globallyAccessed = this.globallyAccessed;
copy.markAccessed(this.accessed);
copy.pendingDeletions.addAll(this.pendingDeletions);
copy.pendingModifications.putAll(this.pendingModifications);
return copy;
}
/*
* (non-Javadoc)
*
* @see cern.oasis.server.stm.SemiPersistent#update(java.lang.Object, boolean)
*/
@Override
protected void update(TransactionalMap<K, V> changes, boolean onlyReadLogs) {
Utils.checkNull("Local changes", changes);
if (this.sourceMap != changes.sourceMap) {
throw new IllegalArgumentException("Updates are only possible for collections with the same source");
}
if (changes.globallyAccessed) {
markGloballyAccessed();
}
this.markAccessed(changes.accessed);
if (!onlyReadLogs) {
this.pendingModifications.clear();
this.pendingModifications.putAll(changes.pendingModifications);
this.pendingDeletions.clear();
this.pendingDeletions.addAll(changes.pendingDeletions);
}
}
/*
* (non-Javadoc)
*
* @see cern.oasis.server.stm.ConflictAware#commit(java.lang.Object)
*/
@Override
public TransactionalMap<K, V> commit(TransactionalMap<K, V> globalState) {
Utils.checkNull("Global state", globalState);
if (!globalState.pendingDeletions.isEmpty() || !globalState.pendingModifications.isEmpty()
|| !globalState.accessed.isEmpty() || globalState.globallyAccessed) {
throw new IllegalArgumentException("Global state map must be commited before calling this method");
}
// Checking for conflicts
if (this.globallyAccessed) {
if (!globalState.sourceMap.equals(this.sourceMap)) {
throw new ConflictException("All the items of this map have been accessed "
+ "this prohibits commit in the case of concurrent changes");
}
}
for (K key : this.accessed) {
checkConsistency(globalState.sourceMap, key);
}
// Return current global state if there are no local modifications
if (this.pendingDeletions.isEmpty() && this.pendingModifications.isEmpty()) {
return globalState;
}
// Getting a copy of the global map
HashMap<K, V> globalMapCopy = new HashMap<K, V>(globalState.sourceMap);
// Apply addition or modification
for (Entry<K, V> entry : this.pendingModifications.entrySet()) {
globalMapCopy.put(entry.getKey(), entry.getValue());
}
// Apply deletion
for (K key : this.pendingDeletions) {
globalMapCopy.remove(key);
}
// Returning a new instance of the map
return new TransactionalMap<K, V>(globalMapCopy);
}
//
// Private methods
//
/**
* Marks given key as accessed.
*
* @param key The key to mark as accessed.
*/
private void markAccessed(K key) {
if (!this.globallyAccessed) {
this.accessed.add(key);
}
}
/**
* Marks given collection of keys as accessed.
*
* @param key The keys to mark as accessed.
*/
private void markAccessed(Collection<K> keys) {
if (!this.globallyAccessed) {
this.accessed.addAll(keys);
}
}
/**
* Marks the entire space of keys as accessed unless the map has been cleared.
*/
private void markGloballyAccessed() {
if (!this.cleared) {
// Global access is allowed after the map has been cleared.
this.globallyAccessed = true;
this.accessed.clear();
}
}
/**
* Marks the map as cleared and marks all its items as accessed unless it has already been globally accessed.
*/
private void markCleared() {
if (!this.globallyAccessed) {
this.cleared = true;
this.markAccessed(sourceMap.keySet());
}
}
/**
* Simply checks if values corresponding to the key are the <b>same</b> in the global and source maps.
*
* @param globalMap The global map.
* @param key The key corresponding to the value to be checked.
*/
private void checkConsistency(Map<K, V> globalMap, K key) {
V sourceValue = this.sourceMap.get(key);
V globalValue = globalMap.get(key);
if ((sourceValue != globalValue) || // <br>
((null == sourceValue || null == globalValue)// <br>
&& (this.sourceMap.containsKey(key) ^ globalMap.containsKey(key)))) {
throw new ConflictException("Conflicting changes for [" + key + "]");
}
}
/**
* Dynamic view on the keys of the map.
*
* @author Ivan Koblik
*/
private class KeySet extends AbstractSet<K> {
@Override
public Iterator<K> iterator() {
return new KeyIterator();
}
@Override
public int size() {
return TransactionalMap.this.size();
}
@Override
public boolean isEmpty() {
return TransactionalMap.this.isEmpty();
}
@Override
@SuppressWarnings("unchecked")
public boolean contains(Object o) {
return containsKey((K) o);
}
@Override
@SuppressWarnings("unchecked")
public boolean remove(Object o) {
boolean result = TransactionalMap.this.containsKey((K) o);
TransactionalMap.this.remove((K) o);
return result;
}
@Override
public boolean removeAll(Collection<?> c) {
boolean modified = false;
for (Iterator<?> i = c.iterator(); i.hasNext();) {
modified |= this.remove(i.next());
}
return modified;
}
@Override
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
TransactionalMap.this.clear();
}
}
/**
* Iterator over the keys of the map.
*
* @author Ivan Koblik
*/
private class KeyIterator implements Iterator<K> {
/**
* The delegate iterator that is a concatenation of sourceMap and pendingModifications minus pendingDeletions.
*/
private final Iterator<K> keyIterator;
/**
* Constructs the iterator by initializing the delegate iterator.
*/
public KeyIterator() {
// Concatenate iterators of sourceMap and pendingModifications.
Iterator<K> unfiltered = Iterators.concat(TransactionalMap.this.sourceMap.keySet().iterator(),
TransactionalMap.this.pendingModifications.keySet().iterator());
// Remove the elements from pendingDeletions.
keyIterator = Iterators.filter(unfiltered, not(in(TransactionalMap.this.pendingDeletions)));
}
@Override
public boolean hasNext() {
boolean hasNext = keyIterator.hasNext();
if (!hasNext) {
TransactionalMap.this.markGloballyAccessed();
}
return hasNext;
}
@Override
public K next() {
try {
return keyIterator.next();
} catch (NoSuchElementException ex) {
TransactionalMap.this.markGloballyAccessed();
throw ex;
}
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.elasticmapreduce.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Modify an instance group size.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/InstanceGroupModifyConfig"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class InstanceGroupModifyConfig implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* Unique ID of the instance group to expand or shrink.
* </p>
*/
private String instanceGroupId;
/**
* <p>
* Target size for the instance group.
* </p>
*/
private Integer instanceCount;
/**
* <p>
* The EC2 InstanceIds to terminate. After you terminate the instances, the instance group will not return to its
* original requested size.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<String> eC2InstanceIdsToTerminate;
/**
* <p>
* Policy for customizing shrink operations.
* </p>
*/
private ShrinkPolicy shrinkPolicy;
/**
* Default constructor for InstanceGroupModifyConfig object. Callers should use the setter or fluent setter
* (with...) methods to initialize the object after creating it.
*/
public InstanceGroupModifyConfig() {
}
/**
* Constructs a new InstanceGroupModifyConfig object. Callers should use the setter or fluent setter (with...)
* methods to initialize any additional object members.
*
* @param instanceGroupId
* Unique ID of the instance group to expand or shrink.
* @param instanceCount
* Target size for the instance group.
*/
public InstanceGroupModifyConfig(String instanceGroupId, Integer instanceCount) {
setInstanceGroupId(instanceGroupId);
setInstanceCount(instanceCount);
}
/**
* <p>
* Unique ID of the instance group to expand or shrink.
* </p>
*
* @param instanceGroupId
* Unique ID of the instance group to expand or shrink.
*/
public void setInstanceGroupId(String instanceGroupId) {
this.instanceGroupId = instanceGroupId;
}
/**
* <p>
* Unique ID of the instance group to expand or shrink.
* </p>
*
* @return Unique ID of the instance group to expand or shrink.
*/
public String getInstanceGroupId() {
return this.instanceGroupId;
}
/**
* <p>
* Unique ID of the instance group to expand or shrink.
* </p>
*
* @param instanceGroupId
* Unique ID of the instance group to expand or shrink.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public InstanceGroupModifyConfig withInstanceGroupId(String instanceGroupId) {
setInstanceGroupId(instanceGroupId);
return this;
}
/**
* <p>
* Target size for the instance group.
* </p>
*
* @param instanceCount
* Target size for the instance group.
*/
public void setInstanceCount(Integer instanceCount) {
this.instanceCount = instanceCount;
}
/**
* <p>
* Target size for the instance group.
* </p>
*
* @return Target size for the instance group.
*/
public Integer getInstanceCount() {
return this.instanceCount;
}
/**
* <p>
* Target size for the instance group.
* </p>
*
* @param instanceCount
* Target size for the instance group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public InstanceGroupModifyConfig withInstanceCount(Integer instanceCount) {
setInstanceCount(instanceCount);
return this;
}
/**
* <p>
* The EC2 InstanceIds to terminate. After you terminate the instances, the instance group will not return to its
* original requested size.
* </p>
*
* @return The EC2 InstanceIds to terminate. After you terminate the instances, the instance group will not return
* to its original requested size.
*/
public java.util.List<String> getEC2InstanceIdsToTerminate() {
if (eC2InstanceIdsToTerminate == null) {
eC2InstanceIdsToTerminate = new com.amazonaws.internal.SdkInternalList<String>();
}
return eC2InstanceIdsToTerminate;
}
/**
* <p>
* The EC2 InstanceIds to terminate. After you terminate the instances, the instance group will not return to its
* original requested size.
* </p>
*
* @param eC2InstanceIdsToTerminate
* The EC2 InstanceIds to terminate. After you terminate the instances, the instance group will not return to
* its original requested size.
*/
public void setEC2InstanceIdsToTerminate(java.util.Collection<String> eC2InstanceIdsToTerminate) {
if (eC2InstanceIdsToTerminate == null) {
this.eC2InstanceIdsToTerminate = null;
return;
}
this.eC2InstanceIdsToTerminate = new com.amazonaws.internal.SdkInternalList<String>(eC2InstanceIdsToTerminate);
}
/**
* <p>
* The EC2 InstanceIds to terminate. After you terminate the instances, the instance group will not return to its
* original requested size.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setEC2InstanceIdsToTerminate(java.util.Collection)} or
* {@link #withEC2InstanceIdsToTerminate(java.util.Collection)} if you want to override the existing values.
* </p>
*
* @param eC2InstanceIdsToTerminate
* The EC2 InstanceIds to terminate. After you terminate the instances, the instance group will not return to
* its original requested size.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public InstanceGroupModifyConfig withEC2InstanceIdsToTerminate(String... eC2InstanceIdsToTerminate) {
if (this.eC2InstanceIdsToTerminate == null) {
setEC2InstanceIdsToTerminate(new com.amazonaws.internal.SdkInternalList<String>(eC2InstanceIdsToTerminate.length));
}
for (String ele : eC2InstanceIdsToTerminate) {
this.eC2InstanceIdsToTerminate.add(ele);
}
return this;
}
/**
* <p>
* The EC2 InstanceIds to terminate. After you terminate the instances, the instance group will not return to its
* original requested size.
* </p>
*
* @param eC2InstanceIdsToTerminate
* The EC2 InstanceIds to terminate. After you terminate the instances, the instance group will not return to
* its original requested size.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public InstanceGroupModifyConfig withEC2InstanceIdsToTerminate(java.util.Collection<String> eC2InstanceIdsToTerminate) {
setEC2InstanceIdsToTerminate(eC2InstanceIdsToTerminate);
return this;
}
/**
* <p>
* Policy for customizing shrink operations.
* </p>
*
* @param shrinkPolicy
* Policy for customizing shrink operations.
*/
public void setShrinkPolicy(ShrinkPolicy shrinkPolicy) {
this.shrinkPolicy = shrinkPolicy;
}
/**
* <p>
* Policy for customizing shrink operations.
* </p>
*
* @return Policy for customizing shrink operations.
*/
public ShrinkPolicy getShrinkPolicy() {
return this.shrinkPolicy;
}
/**
* <p>
* Policy for customizing shrink operations.
* </p>
*
* @param shrinkPolicy
* Policy for customizing shrink operations.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public InstanceGroupModifyConfig withShrinkPolicy(ShrinkPolicy shrinkPolicy) {
setShrinkPolicy(shrinkPolicy);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getInstanceGroupId() != null)
sb.append("InstanceGroupId: ").append(getInstanceGroupId()).append(",");
if (getInstanceCount() != null)
sb.append("InstanceCount: ").append(getInstanceCount()).append(",");
if (getEC2InstanceIdsToTerminate() != null)
sb.append("EC2InstanceIdsToTerminate: ").append(getEC2InstanceIdsToTerminate()).append(",");
if (getShrinkPolicy() != null)
sb.append("ShrinkPolicy: ").append(getShrinkPolicy());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof InstanceGroupModifyConfig == false)
return false;
InstanceGroupModifyConfig other = (InstanceGroupModifyConfig) obj;
if (other.getInstanceGroupId() == null ^ this.getInstanceGroupId() == null)
return false;
if (other.getInstanceGroupId() != null && other.getInstanceGroupId().equals(this.getInstanceGroupId()) == false)
return false;
if (other.getInstanceCount() == null ^ this.getInstanceCount() == null)
return false;
if (other.getInstanceCount() != null && other.getInstanceCount().equals(this.getInstanceCount()) == false)
return false;
if (other.getEC2InstanceIdsToTerminate() == null ^ this.getEC2InstanceIdsToTerminate() == null)
return false;
if (other.getEC2InstanceIdsToTerminate() != null && other.getEC2InstanceIdsToTerminate().equals(this.getEC2InstanceIdsToTerminate()) == false)
return false;
if (other.getShrinkPolicy() == null ^ this.getShrinkPolicy() == null)
return false;
if (other.getShrinkPolicy() != null && other.getShrinkPolicy().equals(this.getShrinkPolicy()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getInstanceGroupId() == null) ? 0 : getInstanceGroupId().hashCode());
hashCode = prime * hashCode + ((getInstanceCount() == null) ? 0 : getInstanceCount().hashCode());
hashCode = prime * hashCode + ((getEC2InstanceIdsToTerminate() == null) ? 0 : getEC2InstanceIdsToTerminate().hashCode());
hashCode = prime * hashCode + ((getShrinkPolicy() == null) ? 0 : getShrinkPolicy().hashCode());
return hashCode;
}
@Override
public InstanceGroupModifyConfig clone() {
try {
return (InstanceGroupModifyConfig) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.elasticmapreduce.model.transform.InstanceGroupModifyConfigMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2017 Andrew Khitrin (ahitrin@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ext.ui.locks.manage;
import java.util.Collection;
import java.util.Map;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.IContributionManager;
import org.eclipse.jface.viewers.*;
import org.eclipse.osgi.util.NLS;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.SashForm;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Label;
import org.eclipse.ui.ISharedImages;
import org.eclipse.ui.IWorkbenchPart;
import org.eclipse.ui.IWorkbenchSite;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.ext.ui.locks.graph.LockGraphicalView;
import org.jkiss.dbeaver.ext.ui.locks.table.LockTable;
import org.jkiss.dbeaver.ext.ui.locks.table.LockTableDetail;
import org.jkiss.dbeaver.model.admin.locks.DBAServerLock;
import org.jkiss.dbeaver.model.admin.locks.DBAServerLockItem;
import org.jkiss.dbeaver.model.admin.locks.DBAServerLockManager;
import org.jkiss.dbeaver.ui.DBeaverIcons;
import org.jkiss.dbeaver.ui.UIIcon;
import org.jkiss.dbeaver.ui.UIUtils;
/**
* LockManagerViewer
*/
public class LockManagerViewer {
public static final String keyType = "type";
public static final String typeWait = "wait";
public static final String typeHold = "hold";
private Font boldFont;
private LockListControl lockTable;
private LockTableDetail blockedTable;
private LockTableDetail blockingTable;
private Label blockedLabel;
private Label blockingLabel;
private DBAServerLock<?> curLock;
private LockGraphManager<?, ?> graphManager;
private LockGraphicalView gv;
@SuppressWarnings("unused")
private final DBAServerLockManager<DBAServerLock<?>, DBAServerLockItem> lockManager;
private Action killAction = new Action("Kill waiting session", UIUtils.getShardImageDescriptor(ISharedImages.IMG_ELCL_STOP)) {
@Override
public void run() {
if (curLock != null) {
DBAServerLock<?> root = graphManager.getGraph(curLock).getLockRoot();
alterSession();
refreshLocks(root);
setTableLockSelect(root);
}
}
};
public LockGraphManager<?, ?> getGraphManager() {
return graphManager;
}
public void dispose() {
lockTable.disposeControl();
UIUtils.dispose(boldFont);
}
protected LockManagerViewer(IWorkbenchPart part, Composite parent, final DBAServerLockManager<DBAServerLock<?>, DBAServerLockItem> lockManager) {
this.graphManager = (LockGraphManager<?, ?>) lockManager;
boldFont = UIUtils.makeBoldFont(parent.getFont());
Composite composite = UIUtils.createPlaceholder(parent, 1);
SashForm sashMain = UIUtils.createPartDivider(part, composite, SWT.HORIZONTAL | SWT.SMOOTH);
sashMain.setLayoutData(new GridData(GridData.FILL_BOTH));
SashForm sash = UIUtils.createPartDivider(part, sashMain, SWT.VERTICAL | SWT.SMOOTH);
sash.setLayoutData(new GridData(GridData.FILL_BOTH));
this.lockManager = lockManager;
lockTable = new LockListControl(sash, part.getSite(), lockManager, lockManager.getLocksType());
lockTable.createProgressPanel(composite);
lockTable.getItemsViewer().addSelectionChangedListener(new ISelectionChangedListener() {
@Override
public void selectionChanged(SelectionChangedEvent event) {
onLockSelect(getSelectedLock());
}
});
lockTable.loadData();
SashForm infoSash = UIUtils.createPartDivider(part, sash, SWT.HORIZONTAL | SWT.SMOOTH);
infoSash.setLayoutData(new GridData(GridData.FILL_BOTH));
Composite cBlocked = UIUtils.createPlaceholder(infoSash, 1, 5);
blockedLabel = new Label(cBlocked, SWT.NULL);
blockedLabel.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
blockedLabel.setFont(boldFont);
blockedTable = new LockTableDetail(cBlocked, SWT.SHEET, part.getSite(), lockManager);
blockedTable.setLayoutData(new GridData(GridData.FILL_BOTH));
Composite cBlocking = UIUtils.createPlaceholder(infoSash, 1, 5);
blockingLabel = new Label(cBlocking, SWT.NULL);
blockingLabel.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
blockingLabel.setFont(boldFont);
blockingTable = new LockTableDetail(cBlocking, SWT.SHEET, part.getSite(), lockManager);
blockingTable.setLayoutData(new GridData(GridData.FILL_BOTH));
gv = new LockGraphicalView(this);
gv.createPartControl(sashMain);
sashMain.setWeights(new int[]{3, 1});
sash.setWeights(new int[]{4, 1});
}
protected void onLockSelect(DBAServerLock<?> lock) {
curLock = lock;
refreshGraph(curLock);
}
public void setTableLockSelect(DBAServerLock<?> lock) {
ColumnViewer itemsViewer = lockTable.getItemsViewer();
itemsViewer.getControl().setRedraw(false);
try {
itemsViewer.setSelection(new StructuredSelection(lock), true);
} finally {
itemsViewer.getControl().setRedraw(true);
}
curLock = lock;
}
protected void contributeToToolbar(DBAServerLockManager<DBAServerLock<?>, DBAServerLockItem> sessionManager, IContributionManager contributionManager) {
}
public Action getKillAction() {
return killAction;
}
private DBAServerLock<?> getSelectedLock() {
ISelection selection = lockTable.getSelectionProvider().getSelection();
if (selection instanceof IStructuredSelection && !selection.isEmpty()) {
return (DBAServerLock<?>) ((IStructuredSelection) selection).getFirstElement();
} else {
return null;
}
}
private void refreshGraph(DBAServerLock<?> selected) {
gv.drawGraf(selected);
}
public void refreshLocks(DBAServerLock<?> selected) {
lockTable.loadData(false);
gv.drawGraf(selected);
}
public void refreshDetail(Map<String, Object> options) {
StringBuilder sb = new StringBuilder("Wait - ");
sb.append(curLock.getTitle());
blockedLabel.setText(sb.toString());
blockedTable.getOptions().putAll(options);
blockedTable.getOptions().put(keyType, typeWait);
blockedTable.loadData(false);
sb.setLength(0);
if (curLock.getHoldBy() != null) {
sb.append("Hold - ");
sb.append(curLock.getHoldBy().getTitle());
blockingLabel.setText(sb.toString());
}
blockingTable.getOptions().putAll(options);
blockingTable.getOptions().put(keyType, typeHold);
blockingTable.loadData();
}
public void alterSession() {
if (UIUtils.confirmAction(
"Terminate",
NLS.bind("Teminate session?", "Terminate"))) {
lockTable.createAlterService(curLock, null).schedule();
}
}
public Composite getControl() {
return lockTable.getControl();
}
private class LockListControl extends LockTable {
private Class<DBAServerLock<?>> locksType;
LockListControl(SashForm sash, IWorkbenchSite site, DBAServerLockManager<DBAServerLock<?>, DBAServerLockItem> lockManager, Class<DBAServerLock<?>> locksType) {
super(sash, SWT.SHEET, site, lockManager);
this.locksType = locksType;
}
@Nullable
@Override
protected Class<?>[] getListBaseTypes(Collection<DBAServerLock<?>> items) {
return new Class[] { locksType };
}
@Override
protected void fillCustomActions(IContributionManager contributionManager) {
contributeToToolbar(getLockManager(), contributionManager);
contributionManager.add(new Action("Refresh locks", DBeaverIcons.getImageDescriptor(UIIcon.REFRESH)) {
@Override
public void run() {
refreshLocks(curLock);
}
});
contributionManager.add(killAction);
}
}
}
| |
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.masterdb.security.hibernate.index;
import java.util.Map;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.BeanDefinition;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.opengamma.masterdb.security.hibernate.ExternalIdBean;
import com.opengamma.masterdb.security.hibernate.TenorBean;
/**
* A Hibernate bean representation of {@link IborIndexBean}.
*/
@BeanDefinition
public class IborIndexBean extends IndexBean {
@PropertyDefinition
private TenorBean _tenor;
@PropertyDefinition
private ExternalIdBean _conventionId;
@Override
public boolean equals(final Object other) {
if (!(other instanceof IborIndexBean)) {
return false;
}
IborIndexBean index = (IborIndexBean) other;
return new EqualsBuilder()
.append(getId(), index.getId())
.append(getDescription(), index.getDescription())
.append(getTenor(), index.getTenor())
.append(getConventionId(), index.getConventionId())
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder()
.append(getDescription())
.append(getTenor())
.append(getConventionId())
.toHashCode();
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code IborIndexBean}.
* @return the meta-bean, not null
*/
public static IborIndexBean.Meta meta() {
return IborIndexBean.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(IborIndexBean.Meta.INSTANCE);
}
@Override
public IborIndexBean.Meta metaBean() {
return IborIndexBean.Meta.INSTANCE;
}
//-----------------------------------------------------------------------
/**
* Gets the tenor.
* @return the value of the property
*/
public TenorBean getTenor() {
return _tenor;
}
/**
* Sets the tenor.
* @param tenor the new value of the property
*/
public void setTenor(TenorBean tenor) {
this._tenor = tenor;
}
/**
* Gets the the {@code tenor} property.
* @return the property, not null
*/
public final Property<TenorBean> tenor() {
return metaBean().tenor().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the conventionId.
* @return the value of the property
*/
public ExternalIdBean getConventionId() {
return _conventionId;
}
/**
* Sets the conventionId.
* @param conventionId the new value of the property
*/
public void setConventionId(ExternalIdBean conventionId) {
this._conventionId = conventionId;
}
/**
* Gets the the {@code conventionId} property.
* @return the property, not null
*/
public final Property<ExternalIdBean> conventionId() {
return metaBean().conventionId().createProperty(this);
}
//-----------------------------------------------------------------------
@Override
public IborIndexBean clone() {
return JodaBeanUtils.cloneAlways(this);
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(96);
buf.append("IborIndexBean{");
int len = buf.length();
toString(buf);
if (buf.length() > len) {
buf.setLength(buf.length() - 2);
}
buf.append('}');
return buf.toString();
}
@Override
protected void toString(StringBuilder buf) {
super.toString(buf);
buf.append("tenor").append('=').append(JodaBeanUtils.toString(getTenor())).append(',').append(' ');
buf.append("conventionId").append('=').append(JodaBeanUtils.toString(getConventionId())).append(',').append(' ');
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code IborIndexBean}.
*/
public static class Meta extends IndexBean.Meta {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code tenor} property.
*/
private final MetaProperty<TenorBean> _tenor = DirectMetaProperty.ofReadWrite(
this, "tenor", IborIndexBean.class, TenorBean.class);
/**
* The meta-property for the {@code conventionId} property.
*/
private final MetaProperty<ExternalIdBean> _conventionId = DirectMetaProperty.ofReadWrite(
this, "conventionId", IborIndexBean.class, ExternalIdBean.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
this, (DirectMetaPropertyMap) super.metaPropertyMap(),
"tenor",
"conventionId");
/**
* Restricted constructor.
*/
protected Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case 110246592: // tenor
return _tenor;
case 1520979052: // conventionId
return _conventionId;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends IborIndexBean> builder() {
return new DirectBeanBuilder<IborIndexBean>(new IborIndexBean());
}
@Override
public Class<? extends IborIndexBean> beanType() {
return IborIndexBean.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return _metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code tenor} property.
* @return the meta-property, not null
*/
public final MetaProperty<TenorBean> tenor() {
return _tenor;
}
/**
* The meta-property for the {@code conventionId} property.
* @return the meta-property, not null
*/
public final MetaProperty<ExternalIdBean> conventionId() {
return _conventionId;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case 110246592: // tenor
return ((IborIndexBean) bean).getTenor();
case 1520979052: // conventionId
return ((IborIndexBean) bean).getConventionId();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
switch (propertyName.hashCode()) {
case 110246592: // tenor
((IborIndexBean) bean).setTenor((TenorBean) newValue);
return;
case 1520979052: // conventionId
((IborIndexBean) bean).setConventionId((ExternalIdBean) newValue);
return;
}
super.propertySet(bean, propertyName, newValue, quiet);
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| |
/* Copyright (C) 2013-2014 Computer Sciences Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
/**
* Autogenerated by Thrift
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
*/
package ezbake.quarantine.client;
import java.util.*;
public class HTMLDocument implements org.apache.thrift.TBase<HTMLDocument, HTMLDocument._Fields>, java.io.Serializable, Cloneable {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("HTMLDocument");
private static final org.apache.thrift.protocol.TField ID_FIELD_DESC = new org.apache.thrift.protocol.TField("id", org.apache.thrift.protocol.TType.I32, (short)1);
private static final org.apache.thrift.protocol.TField CONTENT_FIELD_DESC = new org.apache.thrift.protocol.TField("content", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final org.apache.thrift.protocol.TField DATE_FIELD_DESC = new org.apache.thrift.protocol.TField("date", org.apache.thrift.protocol.TType.STRING, (short)3);
public int id;
public String content;
public String date;
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
ID((short)1, "id"),
CONTENT((short)2, "content"),
DATE((short)3, "date");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // ID
return ID;
case 2: // CONTENT
return CONTENT;
case 3: // DATE
return DATE;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __ID_ISSET_ID = 0;
private BitSet __isset_bit_vector = new BitSet(1);
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.ID, new org.apache.thrift.meta_data.FieldMetaData("id", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.CONTENT, new org.apache.thrift.meta_data.FieldMetaData("content", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.DATE, new org.apache.thrift.meta_data.FieldMetaData("date", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(HTMLDocument.class, metaDataMap);
}
public HTMLDocument() {
}
public HTMLDocument(
int id,
String content,
String date)
{
this();
this.id = id;
setIdIsSet(true);
this.content = content;
this.date = date;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public HTMLDocument(HTMLDocument other) {
__isset_bit_vector.clear();
__isset_bit_vector.or(other.__isset_bit_vector);
this.id = other.id;
if (other.isSetContent()) {
this.content = other.content;
}
if (other.isSetDate()) {
this.date = other.date;
}
}
public HTMLDocument deepCopy() {
return new HTMLDocument(this);
}
@Override
public void clear() {
setIdIsSet(false);
this.id = 0;
this.content = null;
this.date = null;
}
public int getId() {
return this.id;
}
public HTMLDocument setId(int id) {
this.id = id;
setIdIsSet(true);
return this;
}
public void unsetId() {
__isset_bit_vector.clear(__ID_ISSET_ID);
}
/** Returns true if field id is set (has been assigned a value) and false otherwise */
public boolean isSetId() {
return __isset_bit_vector.get(__ID_ISSET_ID);
}
public void setIdIsSet(boolean value) {
__isset_bit_vector.set(__ID_ISSET_ID, value);
}
public String getContent() {
return this.content;
}
public HTMLDocument setContent(String content) {
this.content = content;
return this;
}
public void unsetContent() {
this.content = null;
}
/** Returns true if field content is set (has been assigned a value) and false otherwise */
public boolean isSetContent() {
return this.content != null;
}
public void setContentIsSet(boolean value) {
if (!value) {
this.content = null;
}
}
public String getDate() {
return this.date;
}
public HTMLDocument setDate(String date) {
this.date = date;
return this;
}
public void unsetDate() {
this.date = null;
}
/** Returns true if field date is set (has been assigned a value) and false otherwise */
public boolean isSetDate() {
return this.date != null;
}
public void setDateIsSet(boolean value) {
if (!value) {
this.date = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case ID:
if (value == null) {
unsetId();
} else {
setId((Integer)value);
}
break;
case CONTENT:
if (value == null) {
unsetContent();
} else {
setContent((String)value);
}
break;
case DATE:
if (value == null) {
unsetDate();
} else {
setDate((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case ID:
return new Integer(getId());
case CONTENT:
return getContent();
case DATE:
return getDate();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case ID:
return isSetId();
case CONTENT:
return isSetContent();
case DATE:
return isSetDate();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof HTMLDocument)
return this.equals((HTMLDocument)that);
return false;
}
public boolean equals(HTMLDocument that) {
if (that == null)
return false;
boolean this_present_id = true;
boolean that_present_id = true;
if (this_present_id || that_present_id) {
if (!(this_present_id && that_present_id))
return false;
if (this.id != that.id)
return false;
}
boolean this_present_content = true && this.isSetContent();
boolean that_present_content = true && that.isSetContent();
if (this_present_content || that_present_content) {
if (!(this_present_content && that_present_content))
return false;
if (!this.content.equals(that.content))
return false;
}
boolean this_present_date = true && this.isSetDate();
boolean that_present_date = true && that.isSetDate();
if (this_present_date || that_present_date) {
if (!(this_present_date && that_present_date))
return false;
if (!this.date.equals(that.date))
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
public int compareTo(HTMLDocument other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
HTMLDocument typedOther = (HTMLDocument)other;
lastComparison = Boolean.valueOf(isSetId()).compareTo(typedOther.isSetId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id, typedOther.id);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetContent()).compareTo(typedOther.isSetContent());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetContent()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.content, typedOther.content);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetDate()).compareTo(typedOther.isSetDate());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetDate()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.date, typedOther.date);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField field;
iprot.readStructBegin();
while (true)
{
field = iprot.readFieldBegin();
if (field.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (field.id) {
case 1: // ID
if (field.type == org.apache.thrift.protocol.TType.I32) {
this.id = iprot.readI32();
setIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
}
break;
case 2: // CONTENT
if (field.type == org.apache.thrift.protocol.TType.STRING) {
this.content = iprot.readString();
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
}
break;
case 3: // DATE
if (field.type == org.apache.thrift.protocol.TType.STRING) {
this.date = iprot.readString();
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(ID_FIELD_DESC);
oprot.writeI32(this.id);
oprot.writeFieldEnd();
if (this.content != null) {
oprot.writeFieldBegin(CONTENT_FIELD_DESC);
oprot.writeString(this.content);
oprot.writeFieldEnd();
}
if (this.date != null) {
oprot.writeFieldBegin(DATE_FIELD_DESC);
oprot.writeString(this.date);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("HTMLDocument(");
boolean first = true;
sb.append("id:");
sb.append(this.id);
first = false;
if (!first) sb.append(", ");
sb.append("content:");
if (this.content == null) {
sb.append("null");
} else {
sb.append(this.content);
}
first = false;
if (!first) sb.append(", ");
sb.append("date:");
if (this.date == null) {
sb.append("null");
} else {
sb.append(this.date);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bit_vector = new BitSet(1);
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.client.api.impl;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.DataInputByteBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.api.TimelineClient;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier;
import org.apache.hadoop.yarn.server.resourcemanager
.ParameterizedSchedulerTestBase;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Collection;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* This class is to test class {@link YarnClientImpl ).
*/
public class TestYarnClientImpl extends ParameterizedSchedulerTestBase {
public TestYarnClientImpl(SchedulerType type) throws IOException {
super(type);
}
@Before
public void setup() {
QueueMetrics.clearQueueMetrics();
DefaultMetricsSystem.setMiniClusterMode(true);
}
@Test
public void testStartWithTimelineV15() {
Configuration conf = getConf();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
conf.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 1.5f);
YarnClientImpl client = (YarnClientImpl) YarnClient.createYarnClient();
client.init(conf);
client.start();
client.stop();
}
@Test
public void testAsyncAPIPollTimeout() {
testAsyncAPIPollTimeoutHelper(null, false);
testAsyncAPIPollTimeoutHelper(0L, true);
testAsyncAPIPollTimeoutHelper(1L, true);
}
private void testAsyncAPIPollTimeoutHelper(Long valueForTimeout,
boolean expectedTimeoutEnforcement) {
YarnClientImpl client = new YarnClientImpl();
try {
Configuration conf = getConf();
if (valueForTimeout != null) {
conf.setLong(
YarnConfiguration.YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_TIMEOUT_MS,
valueForTimeout);
}
client.init(conf);
Assert.assertEquals(
expectedTimeoutEnforcement, client.enforceAsyncAPITimeout());
} finally {
IOUtils.closeQuietly(client);
}
}
@Test
public void testBestEffortTimelineDelegationToken()
throws Exception {
Configuration conf = getConf();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
YarnClientImpl client = spy(new YarnClientImpl() {
@Override
TimelineClient createTimelineClient() throws IOException, YarnException {
timelineClient = mock(TimelineClient.class);
when(timelineClient.getDelegationToken(any()))
.thenThrow(new RuntimeException("Best effort test exception"));
return timelineClient;
}
});
client.init(conf);
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_CLIENT_BEST_EFFORT,
true);
client.serviceInit(conf);
client.getTimelineDelegationToken();
try {
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_CLIENT_BEST_EFFORT, false);
client.serviceInit(conf);
client.getTimelineDelegationToken();
Assert.fail("Get delegation token should have thrown an exception");
} catch (IOException e) {
// Success
}
}
@Test
public void testAutomaticTimelineDelegationTokenLoading()
throws Exception {
Configuration conf = getConf();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, conf);
TimelineDelegationTokenIdentifier timelineDT =
new TimelineDelegationTokenIdentifier();
final Token<TimelineDelegationTokenIdentifier> dToken =
new Token<>(
timelineDT.getBytes(), new byte[0], timelineDT.getKind(), new Text());
// create a mock client
YarnClientImpl client = spy(new YarnClientImpl() {
@Override
TimelineClient createTimelineClient() throws IOException, YarnException {
timelineClient = mock(TimelineClient.class);
when(timelineClient.getDelegationToken(any())).thenReturn(dToken);
return timelineClient;
}
@Override
protected void serviceStart() {
rmClient = mock(ApplicationClientProtocol.class);
}
@Override
protected void serviceStop() {
}
@Override
public ApplicationReport getApplicationReport(ApplicationId appId) {
ApplicationReport report = mock(ApplicationReport.class);
when(report.getYarnApplicationState())
.thenReturn(YarnApplicationState.RUNNING);
return report;
}
@Override
public boolean isSecurityEnabled() {
return true;
}
});
client.init(conf);
client.start();
try {
// when i == 0, timeline DT already exists, no need to get one more
// when i == 1, timeline DT doesn't exist, need to get one more
for (int i = 0; i < 2; ++i) {
ApplicationSubmissionContext context =
mock(ApplicationSubmissionContext.class);
ApplicationId applicationId = ApplicationId.newInstance(0, i + 1);
when(context.getApplicationId()).thenReturn(applicationId);
DataOutputBuffer dob = new DataOutputBuffer();
Credentials credentials = new Credentials();
if (i == 0) {
credentials.addToken(client.timelineService, dToken);
}
credentials.writeTokenStorageToStream(dob);
ByteBuffer tokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
ContainerLaunchContext clc = ContainerLaunchContext.newInstance(
null, null, null, null, tokens, null);
when(context.getAMContainerSpec()).thenReturn(clc);
client.submitApplication(context);
if (i == 0) {
// GetTimelineDelegationToken shouldn't be called
verify(client, never()).getTimelineDelegationToken();
}
// In either way, token should be there
credentials = new Credentials();
DataInputByteBuffer dibb = new DataInputByteBuffer();
tokens = clc.getTokens();
if (tokens != null) {
dibb.reset(tokens);
credentials.readTokenStorageStream(dibb);
tokens.rewind();
}
Collection<Token<? extends TokenIdentifier>> dTokens =
credentials.getAllTokens();
Assert.assertEquals(1, dTokens.size());
Assert.assertEquals(dToken, dTokens.iterator().next());
}
} finally {
client.stop();
}
}
@Test
public void testParseTimelineDelegationTokenRenewer() {
// Client side
YarnClientImpl client = (YarnClientImpl) YarnClient.createYarnClient();
Configuration conf = getConf();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
conf.set(YarnConfiguration.RM_PRINCIPAL, "rm/_HOST@EXAMPLE.COM");
conf.set(
YarnConfiguration.RM_ADDRESS, "localhost:8188");
try {
client.init(conf);
client.start();
Assert.assertEquals("rm/localhost@EXAMPLE.COM", client.timelineDTRenewer);
} finally {
client.stop();
}
}
}
| |
/*
*
*/
package relationalMetaModel.diagram.part;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.core.commands.operations.OperationHistoryFactory;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.SubProgressMonitor;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.xmi.XMLResource;
import org.eclipse.emf.transaction.TransactionalEditingDomain;
import org.eclipse.emf.workspace.util.WorkspaceSynchronizer;
import org.eclipse.gef.EditPart;
import org.eclipse.gmf.runtime.common.core.command.CommandResult;
import org.eclipse.gmf.runtime.diagram.core.services.ViewService;
import org.eclipse.gmf.runtime.diagram.ui.editparts.DiagramEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IPrimaryEditPart;
import org.eclipse.gmf.runtime.diagram.ui.parts.IDiagramGraphicalViewer;
import org.eclipse.gmf.runtime.diagram.ui.parts.IDiagramWorkbenchPart;
import org.eclipse.gmf.runtime.emf.commands.core.command.AbstractTransactionalCommand;
import org.eclipse.gmf.runtime.emf.core.GMFEditingDomainFactory;
import org.eclipse.gmf.runtime.emf.core.util.EMFCoreUtil;
import org.eclipse.gmf.runtime.notation.Diagram;
import org.eclipse.gmf.runtime.notation.View;
import org.eclipse.gmf.tooling.runtime.part.DefaultDiagramEditorUtil;
import org.eclipse.jface.dialogs.IDialogSettings;
import org.eclipse.jface.wizard.Wizard;
import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.part.FileEditorInput;
/**
* @generated
*/
public class RelationalMetaModelDiagramEditorUtil {
/**
* @generated
*/
public static Map<?, ?> getSaveOptions() {
HashMap<String, Object> saveOptions = new HashMap<String, Object>();
saveOptions.put(XMLResource.OPTION_ENCODING, "UTF-8"); //$NON-NLS-1$
saveOptions.put(Resource.OPTION_SAVE_ONLY_IF_CHANGED,
Resource.OPTION_SAVE_ONLY_IF_CHANGED_MEMORY_BUFFER);
return saveOptions;
}
/**
* @generated
*/
public static boolean openDiagram(Resource diagram)
throws PartInitException {
String path = diagram.getURI().toPlatformString(true);
IResource workspaceResource = ResourcesPlugin.getWorkspace().getRoot()
.findMember(new Path(path));
if (workspaceResource instanceof IFile) {
IWorkbenchPage page = PlatformUI.getWorkbench()
.getActiveWorkbenchWindow().getActivePage();
return null != page
.openEditor(
new FileEditorInput((IFile) workspaceResource),
relationalMetaModel.diagram.part.RelationalMetaModelDiagramEditor.ID);
}
return false;
}
/**
* @generated
*/
public static void setCharset(IFile file) {
if (file == null) {
return;
}
try {
file.setCharset("UTF-8", new NullProgressMonitor()); //$NON-NLS-1$
} catch (CoreException e) {
relationalMetaModel.diagram.part.RelationalMetaModelDiagramEditorPlugin
.getInstance()
.logError(
"Unable to set charset for file " + file.getFullPath(), e); //$NON-NLS-1$
}
}
/**
* @generated
*/
public static String getUniqueFileName(IPath containerFullPath,
String fileName, String extension) {
return DefaultDiagramEditorUtil.getUniqueFileName(containerFullPath,
fileName, extension,
DefaultDiagramEditorUtil.EXISTS_IN_WORKSPACE);
}
/**
* Runs the wizard in a dialog.
*
* @generated
*/
public static void runWizard(Shell shell, Wizard wizard, String settingsKey) {
IDialogSettings pluginDialogSettings = relationalMetaModel.diagram.part.RelationalMetaModelDiagramEditorPlugin
.getInstance().getDialogSettings();
IDialogSettings wizardDialogSettings = pluginDialogSettings
.getSection(settingsKey);
if (wizardDialogSettings == null) {
wizardDialogSettings = pluginDialogSettings
.addNewSection(settingsKey);
}
wizard.setDialogSettings(wizardDialogSettings);
WizardDialog dialog = new WizardDialog(shell, wizard);
dialog.create();
dialog.getShell().setSize(Math.max(500, dialog.getShell().getSize().x),
500);
dialog.open();
}
/**
* This method should be called within a workspace modify operation since it creates resources.
* @generated
*/
public static Resource createDiagram(URI diagramURI, URI modelURI,
IProgressMonitor progressMonitor) {
TransactionalEditingDomain editingDomain = GMFEditingDomainFactory.INSTANCE
.createEditingDomain();
progressMonitor
.beginTask(
relationalMetaModel.diagram.part.Messages.RelationalMetaModelDiagramEditorUtil_CreateDiagramProgressTask,
3);
final Resource diagramResource = editingDomain.getResourceSet()
.createResource(diagramURI);
final Resource modelResource = editingDomain.getResourceSet()
.createResource(modelURI);
final String diagramName = diagramURI.lastSegment();
AbstractTransactionalCommand command = new AbstractTransactionalCommand(
editingDomain,
relationalMetaModel.diagram.part.Messages.RelationalMetaModelDiagramEditorUtil_CreateDiagramCommandLabel,
Collections.EMPTY_LIST) {
protected CommandResult doExecuteWithResult(
IProgressMonitor monitor, IAdaptable info)
throws ExecutionException {
relationalMetaModel.RelationalSchema model = createInitialModel();
attachModelToResource(model, modelResource);
Diagram diagram = ViewService
.createDiagram(
model,
relationalMetaModel.diagram.edit.parts.RelationalSchemaEditPart.MODEL_ID,
relationalMetaModel.diagram.part.RelationalMetaModelDiagramEditorPlugin.DIAGRAM_PREFERENCES_HINT);
if (diagram != null) {
diagramResource.getContents().add(diagram);
diagram.setName(diagramName);
diagram.setElement(model);
}
try {
modelResource
.save(relationalMetaModel.diagram.part.RelationalMetaModelDiagramEditorUtil
.getSaveOptions());
diagramResource
.save(relationalMetaModel.diagram.part.RelationalMetaModelDiagramEditorUtil
.getSaveOptions());
} catch (IOException e) {
relationalMetaModel.diagram.part.RelationalMetaModelDiagramEditorPlugin
.getInstance()
.logError(
"Unable to store model and diagram resources", e); //$NON-NLS-1$
}
return CommandResult.newOKCommandResult();
}
};
try {
OperationHistoryFactory.getOperationHistory().execute(command,
new SubProgressMonitor(progressMonitor, 1), null);
} catch (ExecutionException e) {
relationalMetaModel.diagram.part.RelationalMetaModelDiagramEditorPlugin
.getInstance().logError(
"Unable to create model and diagram", e); //$NON-NLS-1$
}
setCharset(WorkspaceSynchronizer.getFile(modelResource));
setCharset(WorkspaceSynchronizer.getFile(diagramResource));
return diagramResource;
}
/**
* Create a new instance of domain element associated with canvas.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private static relationalMetaModel.RelationalSchema createInitialModel() {
return relationalMetaModel.RelationalMetaModelFactory.eINSTANCE
.createRelationalSchema();
}
/**
* Store model element in the resource.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private static void attachModelToResource(
relationalMetaModel.RelationalSchema model, Resource resource) {
resource.getContents().add(model);
}
/**
* @generated
*/
public static void selectElementsInDiagram(
IDiagramWorkbenchPart diagramPart, List<EditPart> editParts) {
diagramPart.getDiagramGraphicalViewer().deselectAll();
EditPart firstPrimary = null;
for (EditPart nextPart : editParts) {
diagramPart.getDiagramGraphicalViewer().appendSelection(nextPart);
if (firstPrimary == null && nextPart instanceof IPrimaryEditPart) {
firstPrimary = nextPart;
}
}
if (!editParts.isEmpty()) {
diagramPart.getDiagramGraphicalViewer().reveal(
firstPrimary != null ? firstPrimary : (EditPart) editParts
.get(0));
}
}
/**
* @generated
*/
private static int findElementsInDiagramByID(DiagramEditPart diagramPart,
EObject element, List<EditPart> editPartCollector) {
IDiagramGraphicalViewer viewer = (IDiagramGraphicalViewer) diagramPart
.getViewer();
final int intialNumOfEditParts = editPartCollector.size();
if (element instanceof View) { // support notation element lookup
EditPart editPart = (EditPart) viewer.getEditPartRegistry().get(
element);
if (editPart != null) {
editPartCollector.add(editPart);
return 1;
}
}
String elementID = EMFCoreUtil.getProxyID(element);
@SuppressWarnings("unchecked")
List<EditPart> associatedParts = viewer.findEditPartsForElement(
elementID, IGraphicalEditPart.class);
// perform the possible hierarchy disjoint -> take the top-most parts only
for (EditPart nextPart : associatedParts) {
EditPart parentPart = nextPart.getParent();
while (parentPart != null && !associatedParts.contains(parentPart)) {
parentPart = parentPart.getParent();
}
if (parentPart == null) {
editPartCollector.add(nextPart);
}
}
if (intialNumOfEditParts == editPartCollector.size()) {
if (!associatedParts.isEmpty()) {
editPartCollector.add(associatedParts.get(0));
} else {
if (element.eContainer() != null) {
return findElementsInDiagramByID(diagramPart,
element.eContainer(), editPartCollector);
}
}
}
return editPartCollector.size() - intialNumOfEditParts;
}
/**
* @generated
*/
public static View findView(DiagramEditPart diagramEditPart,
EObject targetElement, LazyElement2ViewMap lazyElement2ViewMap) {
boolean hasStructuralURI = false;
if (targetElement.eResource() instanceof XMLResource) {
hasStructuralURI = ((XMLResource) targetElement.eResource())
.getID(targetElement) == null;
}
View view = null;
LinkedList<EditPart> editPartHolder = new LinkedList<EditPart>();
if (hasStructuralURI
&& !lazyElement2ViewMap.getElement2ViewMap().isEmpty()) {
view = lazyElement2ViewMap.getElement2ViewMap().get(targetElement);
} else if (findElementsInDiagramByID(diagramEditPart, targetElement,
editPartHolder) > 0) {
EditPart editPart = editPartHolder.get(0);
view = editPart.getModel() instanceof View ? (View) editPart
.getModel() : null;
}
return (view == null) ? diagramEditPart.getDiagramView() : view;
}
/**
* XXX This is quite suspicious code (especially editPartTmpHolder) and likely to be removed soon
* @generated
*/
public static class LazyElement2ViewMap {
/**
* @generated
*/
private Map<EObject, View> element2ViewMap;
/**
* @generated
*/
private View scope;
/**
* @generated
*/
private Set<? extends EObject> elementSet;
/**
* @generated
*/
public LazyElement2ViewMap(View scope, Set<? extends EObject> elements) {
this.scope = scope;
this.elementSet = elements;
}
/**
* @generated
*/
public final Map<EObject, View> getElement2ViewMap() {
if (element2ViewMap == null) {
element2ViewMap = new HashMap<EObject, View>();
// map possible notation elements to itself as these can't be found by view.getElement()
for (EObject element : elementSet) {
if (element instanceof View) {
View view = (View) element;
if (view.getDiagram() == scope.getDiagram()) {
element2ViewMap.put(element, view); // take only those that part of our diagram
}
}
}
buildElement2ViewMap(scope, element2ViewMap, elementSet);
}
return element2ViewMap;
}
/**
* @generated
*/
private static boolean buildElement2ViewMap(View parentView,
Map<EObject, View> element2ViewMap,
Set<? extends EObject> elements) {
if (elements.size() == element2ViewMap.size()) {
return true;
}
if (parentView.isSetElement()
&& !element2ViewMap.containsKey(parentView.getElement())
&& elements.contains(parentView.getElement())) {
element2ViewMap.put(parentView.getElement(), parentView);
if (elements.size() == element2ViewMap.size()) {
return true;
}
}
boolean complete = false;
for (Iterator<?> it = parentView.getChildren().iterator(); it
.hasNext() && !complete;) {
complete = buildElement2ViewMap((View) it.next(),
element2ViewMap, elements);
}
for (Iterator<?> it = parentView.getSourceEdges().iterator(); it
.hasNext() && !complete;) {
complete = buildElement2ViewMap((View) it.next(),
element2ViewMap, elements);
}
for (Iterator<?> it = parentView.getTargetEdges().iterator(); it
.hasNext() && !complete;) {
complete = buildElement2ViewMap((View) it.next(),
element2ViewMap, elements);
}
return complete;
}
} //LazyElement2ViewMap
}
| |
package com.planet_ink.coffee_mud.Abilities.Skills;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.TrackingLibrary;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2010 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings("unchecked")
public class Skill_RegionalAwareness extends StdSkill
{
public String ID() { return "Skill_RegionalAwareness"; }
public String name(){ return "Regional Awareness";}
protected int canAffectCode(){return 0;}
protected int canTargetCode(){return 0;}
public int abstractQuality(){return Ability.QUALITY_INDIFFERENT;}
private static final String[] triggerStrings = {"REGION","REGIONALAWARENESS"};
public String[] triggerStrings(){return triggerStrings;}
public int classificationCode(){return Ability.ACODE_SKILL|Ability.DOMAIN_NATURELORE;}
public int overrideMana(){return 0;}
protected int iniTrainsRequired(){return CMProps.getIntVar(CMProps.SYSTEMI_COMMONTRAINCOST);}
protected int iniPracticesRequired(){return CMProps.getIntVar(CMProps.SYSTEMI_COMMONPRACCOST);}
public char roomColor(Room room)
{
if(room==null) return ' ';
if(CMath.bset(room.envStats().sensesMask(),EnvStats.SENSE_ROOMUNMAPPABLE))
return 'w';
switch(room.domainType())
{
case Room.DOMAIN_OUTDOORS_CITY:return 'w';
case Room.DOMAIN_OUTDOORS_WOODS:return 'G';
case Room.DOMAIN_OUTDOORS_ROCKS:return 'W';
case Room.DOMAIN_OUTDOORS_PLAINS:return 'Y';
case Room.DOMAIN_OUTDOORS_UNDERWATER:return 'B';
case Room.DOMAIN_OUTDOORS_AIR:return ' ';
case Room.DOMAIN_OUTDOORS_WATERSURFACE:return 'b';
case Room.DOMAIN_OUTDOORS_JUNGLE:return 'R';
case Room.DOMAIN_OUTDOORS_SWAMP:return 'r';
case Room.DOMAIN_OUTDOORS_DESERT:return 'y';
case Room.DOMAIN_OUTDOORS_HILLS:return 'g';
case Room.DOMAIN_OUTDOORS_MOUNTAINS:return 'p';
case Room.DOMAIN_OUTDOORS_SPACEPORT:return 'P';
default:
return 'k';
}
}
public char roomChar(Room room)
{
if(room==null) return ' ';
if(CMath.bset(room.envStats().sensesMask(),EnvStats.SENSE_ROOMUNMAPPABLE))
return ' ';
switch(room.domainType())
{
case Room.DOMAIN_OUTDOORS_CITY:return '=';
case Room.DOMAIN_OUTDOORS_WOODS:return 'T';
case Room.DOMAIN_OUTDOORS_ROCKS:return ':';
case Room.DOMAIN_OUTDOORS_PLAINS:return '_';
case Room.DOMAIN_OUTDOORS_UNDERWATER:return '~';
case Room.DOMAIN_OUTDOORS_AIR:return ' ';
case Room.DOMAIN_OUTDOORS_WATERSURFACE:return '~';
case Room.DOMAIN_OUTDOORS_JUNGLE:return 'J';
case Room.DOMAIN_OUTDOORS_SWAMP:return 'x';
case Room.DOMAIN_OUTDOORS_DESERT:return '.';
case Room.DOMAIN_OUTDOORS_HILLS:return 'h';
case Room.DOMAIN_OUTDOORS_MOUNTAINS:return 'M';
case Room.DOMAIN_OUTDOORS_SPACEPORT:return '@';
default:
return '#';
}
}
public boolean invoke(MOB mob, Vector commands, Environmental givenTarget, boolean auto, int asLevel)
{
if((!auto)&&((mob.location().domainType()&Room.INDOORS)==Room.INDOORS))
{
mob.tell("This only works outdoors.");
return false;
}
if((!auto)
&&(!CMLib.flags().canBeSeenBy(mob.location(),mob)))
{
mob.tell("You need to be able to see your surroundings to do that.");
return false;
}
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
CMMsg msg=CMClass.getMsg(mob,null,this,CMMsg.MSG_LOOK,"<S-NAME> peer(s) at the horizon with a distant expression.");
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
int diameter=2+(adjustedLevel(mob,asLevel)/10);
char[][] map=new char[diameter][diameter];
for(int i=0;i<diameter;i++)
for(int i2=0;i2<diameter;i2++)
map[i][i2]=' ';
Room[][] rmap=new Room[diameter][diameter];
Vector rooms=new Vector();
HashSet closedPaths=new HashSet();
TrackingLibrary.TrackingFlags flags;
flags = new TrackingLibrary.TrackingFlags()
.add(TrackingLibrary.TrackingFlag.OPENONLY)
.add(TrackingLibrary.TrackingFlag.NOEMPTYGRIDS)
.add(TrackingLibrary.TrackingFlag.NOAIR);
CMLib.tracking().getRadiantRooms(mob.location(),rooms,flags,null,diameter,null);
rmap[diameter/2][diameter/2]=mob.location();
map[diameter/2][diameter/2]='*';
for(int i=0;i<rooms.size();i++)
{
Room R=(Room)rooms.elementAt(i);
if((closedPaths.contains(R))
||(R==mob.location()))
continue;
Room parentR=null;
int parentDir=-1;
int[] xy=null;
for(int i2=0;(i2<diameter)&&(parentR==null);i2++)
for(int i3=0;(i3<diameter)&&(parentR==null);i3++)
{
Room R2=rmap[i2][i3];
if(R2!=null)
for(int d=Directions.NUM_DIRECTIONS()-1;d>=0;d--)
if((R2.getRoomInDir(d)==R)
&&(!closedPaths.contains(R2))
&&(R2.getExitInDir(d)!=null))
{
parentR=R2;
parentDir=d;
xy=Directions.adjustXYByDirections(i3,i2,d);
break;
}
}
if(xy!=null)
{
if((parentDir<0)
||(xy[0]<0)||(xy[0]>=diameter)||(xy[1]<0)||(xy[1]>=diameter)
||(map[xy[1]][xy[0]]!=' '))
closedPaths.add(R);
else
{
map[xy[1]][xy[0]]=roomChar(R);
rmap[xy[1]][xy[0]]=R;
if((R.domainType()&Room.INDOORS)==Room.INDOORS)
closedPaths.add(R);
}
}
}
StringBuffer str=new StringBuffer("");
char r=' ';
char c=' ';
for(int i2=0;i2<diameter;i2++)
{
for(int i3=0;i3<diameter;i3++)
{
r=map[i2][i3];
c=roomColor(rmap[i2][i3]);
if(c!=' ')
str.append("^"+c+""+r);
else
str.append(r);
}
str.append("\n\r");
}
if(mob.session()!=null) mob.session().colorOnlyPrintln(str.toString());
}
}
else
beneficialVisualFizzle(mob,null,"<S-NAME> peer(s) around distantly, looking frustrated.");
return success;
}
}
| |
import javax.swing.*;
import javax.swing.event.*;
import java.awt.*;
import java.awt.event.*;
/**
*
* @author Hyun Choi, Ted Pyne, Patrick Forelli
*/
public class CheckersGUI extends javax.swing.JFrame {
//keeps track of a Board, a 2d array of JLabels to represent each tile, and JPanel to store the tiles
public Board board;
private JLabel[][] GUIboard;
//JPanel entireGUI for the enclosure of both the board and the text
private JPanel entireGUI;
//outer JPanel panel for the outer board panel, boardGUI for the inner board panel
private JPanel panel;
private JPanel boardGUI;
//JPanel for textual info; JLabels/JButton for information and toggling
private JPanel text;
GridBagConstraints c;
private JLabel victoryStatus;
private JLabel turnStatus;
private JButton aiToggle;
private JLabel aiDifficulty;
private JButton newGame;
private JLabel aiDepth;
//AI implementation
private MoveAI ai;
private boolean aiActive;
private JSlider difficulty;
private JSlider lookAhead;
private boolean selected = false; //if a piece is selected or not
private int[][] currentSelected; //coordinates of the selected piece and the target area
private final int MULTIPLIER = 62; //width of one tile
/**
* Creates new form CheckersGUI
*/
public CheckersGUI() {
selected = false;
board = new Board();
GUIboard = new JLabel[8][8];
for (int i = 0; i < 8; i++)
{
for (int j = 0; j < 8; j++)
{
//if(board.getPiece(i,j) != null)
GUIboard[i][j] = new JLabel();
}
}
entireGUI = new JPanel(); //outer JPanel to store the boardGUI and the textual information
entireGUI.setLayout(new BoxLayout(entireGUI, BoxLayout.X_AXIS));
aiActive = false; //by default, AI is inactive
text = new JPanel(); //inner JPanel to hold textual information
text.setLayout(new GridBagLayout());
c = new GridBagConstraints();
initializeBoardGUI(); //initalizes board side of gui
initializeText(); //initializes text side of gui
currentSelected = new int[2][2];
panel = new JPanel(); //enclose GridLayout within JPanel on the JFrame
panel.add(boardGUI);
renderBoard(); //render board on the GUI
}
public void renderBoard() //method to arrange images to form the board
{
boolean previousColorIsWhite = false; //for arrangement
for (int i = 0; i < 8; i++)
{
for (int j = 0; j < 8; j++)
{
if (board.getPiece(i,j) != null) //Get the piece at that space in the board
{
if (board.getPiece(i,j).getIsWhite())//if the piece is white
{
if (board.getPiece(i,j).getIsKing())
GUIboard[i][j].setIcon(new javax.swing.ImageIcon(getClass().getResource("/image/whitewithwhiteking.png")));
else
GUIboard[i][j].setIcon(new javax.swing.ImageIcon(getClass().getResource("/image/whitewithwhite.png")));
}
else //so that means it's a red
{
if (board.getPiece(i,j).getIsKing())
GUIboard[i][j].setIcon(new javax.swing.ImageIcon(getClass().getResource("/image/whitewithredking.png")));
else
GUIboard[i][j].setIcon(new javax.swing.ImageIcon(getClass().getResource("/image/whitewithred.png")));
}
previousColorIsWhite=true;
}
else //if no piece, then blank tile (white or green)
{
if (previousColorIsWhite)
GUIboard[i][j].setIcon(new javax.swing.ImageIcon(getClass().getResource("/image/greentile.png")));
else
GUIboard[i][j].setIcon(new javax.swing.ImageIcon(getClass().getResource("/image/whitetile.png")));
previousColorIsWhite = !previousColorIsWhite;
}
boardGUI.add(GUIboard[i][j]);
}
previousColorIsWhite=!previousColorIsWhite;
}
refreshText(); //update the text fields
//combine the two components of the GUI
entireGUI.add(panel);
entireGUI.add(text);
setResizable(false); //window cannot be resized
//make it visible
pack();
this.setContentPane(entireGUI);
setVisible(true);
}
public void initializeBoard()
{
board = new Board();
if(ai!=null) ai = new AI(board);
GUIboard = new JLabel[8][8];
for (int i = 0; i < 8; i++)
{
for (int j = 0; j < 8; j++)
{
//if(board.getPiece(i,j) != null)
GUIboard[i][j] = new JLabel();
}
}
initializeBoardGUI();
entireGUI.remove(panel);
panel = new JPanel();
panel.add(boardGUI);
renderBoard();
}
public void initializeBoardGUI()
{
boardGUI = new JPanel();
boardGUI.setLayout(new GridLayout(8,8)); //tiles in a GridLayout of 8x8
boardGUI.addMouseListener(new MouseAdapter() { //essence of the GUI's click detection
public void mouseClicked(MouseEvent e) {
if (!selected) //if nothing is selected
{
currentSelected[0]=arrayCoord(pressed(e)); //store coordinates of the press in array
selected = true;
//if invalid selection, revert
if(!board.isValidSelection(currentSelected[0][1], currentSelected[0][0])){
currentSelected = new int[2][2];
selected=false;
}
else {
//If a valid selection has been made, highlight the piece to the user
int i = currentSelected[0][1];
int j = currentSelected[0][0];
if (board.getPiece(i,j).getIsWhite())//if the piece is white
{
if (board.getPiece(i,j).getIsKing())
GUIboard[i][j].setIcon(new javax.swing.ImageIcon(getClass().getResource("/image/whitewithwhitekingselected.png")));
else
GUIboard[i][j].setIcon(new javax.swing.ImageIcon(getClass().getResource("/image/whitewithwhiteselected.png")));
}
else //so that means it's a red
{
if (board.getPiece(i,j).getIsKing())
GUIboard[i][j].setIcon(new javax.swing.ImageIcon(getClass().getResource("/image/whitewithredkingselected.png")));
else
GUIboard[i][j].setIcon(new javax.swing.ImageIcon(getClass().getResource("/image/whitewithredselected.png")));
}
}
}
else if (selected) //Target tile
{
//using the coordinates, make a move and render the board on the GUI
currentSelected[1]=arrayCoord(pressed(e));
TurnProcessor turnProc = new TurnProcessor(currentSelected[0][1], currentSelected[0][0], currentSelected[1][1], currentSelected[1][0], board);
if(currentSelected[1][1]==currentSelected[0][1] && currentSelected[0][0] == currentSelected[1][0]){ //If the player clicked on their first selection, deselect it
currentSelected = new int[2][2];
selected=false;
renderBoard();
}
else if(!turnProc.isValidTurn()){ //If the selection is invalid, wait for a valid one
selected = true;
} else{ //If a valid selection, do the move
move(currentSelected);
renderBoard();
//revert to original state
currentSelected = new int[2][2];
selected=false;
}
makeAllAIMoves();
}
}
});
}
private void makeAllAIMoves(){
if(ai!=null)
while(!board.isWhiteTurn() && board.gameIsWon()==null){
ai.makeMove();
renderBoard();
}
}
private void initializeText()
{
c.ipady=80;
final JLabel VICTORY = new JLabel ("VICTORY"); //victory text
c.gridx=0;
c.gridy=0;
text.add(VICTORY, c);
victoryStatus = new JLabel(); //victory status
c.gridx=1;
c.gridy=0;
text.add(victoryStatus, c);
final JLabel TURN = new JLabel ("TURN");
c.gridx=0;
c.gridy=1;
text.add(TURN, c);
turnStatus = new JLabel();
c.gridx=1;
c.gridy=1;
text.add(turnStatus, c);
final JLabel AI = new JLabel ("AI STATUS");
c.gridx=0;
c.gridy=2;
text.add(AI, c);
aiToggle = new JButton("AI INACTIVE");
FontMetrics fm = aiToggle.getFontMetrics(aiToggle.getFont());
int w = fm.stringWidth("AI INACTIVE ");
int h = fm.getHeight();
Dimension size = new Dimension (w,h);
aiToggle.setMinimumSize(size);
aiToggle.setPreferredSize(size);
c.gridx=1;
c.gridy=2;
c.ipady=40;
c.ipadx=40;
text.add(aiToggle, c);
aiToggle.addActionListener(new ActionListener() { //button for toggling AI activation status
public void actionPerformed(ActionEvent e)
{
aiActive = !aiActive;
if (aiActive)
{
ai = new AI3(board);
aiToggle.setText("AI ACTIVE ");
aiMenuToggle();
makeAllAIMoves();
}
else
{
aiToggle.setText("AI INACTIVE");
ai = null;
aiMenuToggle();
}
}
});
newGame = new JButton ("PLAY NEW GAME");
c.gridx=0;
c.gridy=5;
c.gridwidth=2;
c.fill = GridBagConstraints.HORIZONTAL;
newGame.addActionListener(new ActionListener() { //button to reset game
public void actionPerformed(ActionEvent e)
{
initializeBoard();
}
});
text.add(newGame,c);
final JLabel name = new JLabel ("PCCheckers");
name.setFont(new Font("Courier New", Font.ITALIC, 16));
c.gridx=0;
c.gridy=6;
c.gridwidth=2;
c.fill = GridBagConstraints.HORIZONTAL;
c.anchor = GridBagConstraints.CENTER;
c.ipady=0;
text.add(name,c);
final JLabel copyright = new JLabel ("\u00a9" + "PC Software Solutions");
copyright.setFont(new Font("Courier New", Font.ITALIC, 16));
c.gridx=0;
c.gridy=7;
c.gridwidth=2;
c.fill = GridBagConstraints.HORIZONTAL;
c.anchor = GridBagConstraints.CENTER;
text.add(copyright,c);
}
private void aiMenuToggle()
{
if (aiActive)
{
aiDifficulty = new JLabel ("AI DIFFICULTY");
c.gridx=0;
c.gridy=3;
text.add(aiDifficulty, c);
difficulty = new JSlider(JSlider.HORIZONTAL, 1, 8, 4); //slider for AI aggression level
difficulty.setMajorTickSpacing(1);
difficulty.setPaintTicks(true);//ticks
difficulty.setPaintLabels(true);//numbers at ticks
difficulty.addChangeListener(new ChangeListener(){
public void stateChanged(ChangeEvent e){
JSlider source = (JSlider) e.getSource();
if (!source.getValueIsAdjusting()) {
int newValue = source.getValue()-1;
((AI3)ai).setDifficulty(newValue);
System.out.println(newValue);
}
}
});
c.gridx=1;
c.gridy=3;
text.add(difficulty, c);
aiDepth = new JLabel ("AI DEPTH");
c.gridx=0;
c.gridy=4;
text.add(aiDepth, c);
lookAhead = new JSlider(JSlider.HORIZONTAL, 1, 5, 4); //slider for AI aggression level
lookAhead.setMajorTickSpacing(1);
lookAhead.setPaintTicks(true);//ticks
lookAhead.setPaintLabels(true);//numbers at ticks
lookAhead.setSnapToTicks(true);
lookAhead.addChangeListener(new ChangeListener(){
public void stateChanged(ChangeEvent e){
JSlider source = (JSlider) e.getSource();
if (!source.getValueIsAdjusting()) {
int newValue = source.getValue();
AI3.setRecur(newValue);
System.out.println(newValue);
}
}
});
c.gridx=1;
c.gridy=4;
text.add(lookAhead, c);
}
else
{
text.remove(aiDifficulty);
text.remove(difficulty);
text.remove(aiDepth);
text.remove(lookAhead);
}
}
private void refreshText()
{
if (board.gameIsWon()!=null) //set victor if there is one
{
if (board.gameIsWon().getIsWhite())
{
victoryStatus.setText("WHITE");
}
else
{
victoryStatus.setText("RED");
}
}
else
{
victoryStatus.setText("???");
}
if (board.isWhiteTurn()) //display turn
turnStatus.setText("WHITE");
else
turnStatus.setText("RED");
}
private int[] pressed(MouseEvent e) //returns pixel coordinates where clicked
{
int[] coordinates = new int[2]; //[x,y]
coordinates[0] = e.getX();
coordinates[1] = e.getY();
return coordinates;
}
private int[] arrayCoord(int[] pixelCoord) //returns coordinates within the checkerboard, limited to [0,0] to [7,7]
{
for (int i=0; i<2; i++)
pixelCoord[i] /= MULTIPLIER; //Divide the pixel by the width of each piece
return pixelCoord;
}
private void move(int[][] currentSelected) //moves the pieces in the Board variable
{
board.makeMove(currentSelected[0][1],currentSelected[0][0],currentSelected[1][1],currentSelected[1][0]);
}
public static void main (String[] args) //Run the game!
{
CheckersGUI gui = new CheckersGUI();
gui.renderBoard();
}
}
| |
package com.crawljax.oracle;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import com.crawljax.oraclecomparator.Comparator;
import com.crawljax.oraclecomparator.comparators.AttributeComparator;
import com.crawljax.oraclecomparator.comparators.DateComparator;
import com.crawljax.oraclecomparator.comparators.EditDistanceComparator;
import com.crawljax.oraclecomparator.comparators.PlainStructureComparator;
import com.crawljax.oraclecomparator.comparators.RegexComparator;
import com.crawljax.oraclecomparator.comparators.ScriptComparator;
import com.crawljax.oraclecomparator.comparators.SimpleComparator;
import com.crawljax.oraclecomparator.comparators.StyleComparator;
import com.crawljax.oraclecomparator.comparators.XPathExpressionComparator;
public class OracleTest {
private void compareTwoDomsWithComparatorEqual(String original, String newDom,
Comparator comparator) {
comparator.setOriginalDom(original);
comparator.setNewDom(newDom);
assertTrue(comparator.isEquivalent());
}
private void compareTwoDomsWithComparatorNotEqual(String original, String newDom,
Comparator comparator) {
comparator.setOriginalDom(original);
comparator.setNewDom(newDom);
assertFalse(comparator.isEquivalent());
}
@Test
public void testDateOracle() {
Comparator oracle = new DateComparator();
/* dates with days */
compareTwoDomsWithComparatorEqual("<HTML>Monday 15 march 1998</HTML>",
"<HTML>Tuesday 13 december 2005</HTML>", oracle);
compareTwoDomsWithComparatorEqual("<HTML>Monday 1 feb '98</HTML>",
"<HTML>Wednesday 15 march '00</HTML>", oracle);
compareTwoDomsWithComparatorEqual("<HTML>Friday 10 february</HTML>",
"<HTML>Wednesday 3 march</HTML>", oracle);
/* dates only numeric */
compareTwoDomsWithComparatorEqual("<HTML>28-12-1983</HTML>", "<HTML>15-3-1986</HTML>",
oracle);
compareTwoDomsWithComparatorEqual("<HTML>28.1.1976</HTML>", "<HTML>3.15.1986</HTML>",
oracle);
compareTwoDomsWithComparatorEqual("<HTML>1/1/2001</HTML>", "<HTML>30/12/1988</HTML>",
oracle);
compareTwoDomsWithComparatorEqual("<HTML>28-12-1983</HTML>", "<HTML>19-2-1986</HTML>",
oracle);
compareTwoDomsWithComparatorEqual("<HTML>28.1.1976</HTML>", "<HTML>3.15.1986</HTML>",
oracle);
compareTwoDomsWithComparatorEqual("<HTML>1/1/2001</HTML>", "<HTML>30/12/1988</HTML>",
oracle);
compareTwoDomsWithComparatorEqual("<HTML>28-12-'83</HTML>", "<HTML>19-1-'86</HTML>",
oracle);
compareTwoDomsWithComparatorEqual("<HTML>28.1.'76</HTML>", "<HTML>3.15.'86</HTML>",
oracle);
compareTwoDomsWithComparatorEqual("<HTML>1/1/'01</HTML>", "<HTML>30/12/'88</HTML>",
oracle);
compareTwoDomsWithComparatorEqual("<HTML>2003-16-03</HTML>", "<HTML>1986-3-3</HTML>",
oracle);
compareTwoDomsWithComparatorEqual("<HTML>1993.12.12</HTML>", "<HTML>1997.13.09</HTML>",
oracle);
compareTwoDomsWithComparatorEqual("<HTML>2013/1/3</HTML>", "<HTML>1986/3/3</HTML>",
oracle);
/* dates with long months */
compareTwoDomsWithComparatorEqual("<HTML>19 november 1986</HTML>",
"<HTML>18 june 1973</HTML>", oracle);
compareTwoDomsWithComparatorEqual("<HTML>1th march 1986</HTML>",
"<HTML>28th december 2005</HTML>", oracle);
compareTwoDomsWithComparatorEqual("<HTML>15th november</HTML>", "<HTML>3th july</HTML>",
oracle);
compareTwoDomsWithComparatorEqual("<HTML>2003 March 15</HTML>",
"<HTML>1978 july 5</HTML>", oracle);
compareTwoDomsWithComparatorEqual("<HTML>2003Apr15</HTML>", "<HTML>1978jul5</HTML>",
oracle);
compareTwoDomsWithComparatorEqual("<HTML>March 2003</HTML>", "<HTML>October 1996</HTML>",
oracle);
compareTwoDomsWithComparatorEqual("<HTML>April '02</HTML>", "<HTML>August '99</HTML>",
oracle);
compareTwoDomsWithComparatorEqual("<HTML>April 19 2007</HTML>",
"<HTML>January 1 1994</HTML>", oracle);
compareTwoDomsWithComparatorEqual("<HTML>April 19, 2007</HTML>",
"<HTML>January 1, 1994</HTML>", oracle);
compareTwoDomsWithComparatorEqual("<HTML>April 4 '07</HTML>",
"<HTML>January 1 '87</HTML>", oracle);
compareTwoDomsWithComparatorEqual("<HTML>April 19, '66</HTML>",
"<HTML>January 1, '88</HTML>", oracle);
/* time */
compareTwoDomsWithComparatorEqual("<HTML>4:47:00 am</HTML>", "<HTML>3:59:2PM</HTML>",
oracle);
compareTwoDomsWithComparatorEqual("<HTML>2:13pm</HTML>", "<HTML>3:59am</HTML>", oracle);
compareTwoDomsWithComparatorEqual("<HTML>14:17:29</HTML>", "<HTML>7:34:26</HTML>", oracle);
}
@Test
public void testStyleOracle() {
Comparator oracle = new StyleComparator();
/* IGNORE_TAGS */
compareTwoDomsWithComparatorEqual("<HTML><B>foo</B></HTML>", "<HTML>foo</HTML>", oracle);
compareTwoDomsWithComparatorEqual("<HTML><PRE>foo</PRE></HTML>",
"<HTML><STRONG>foo</STRONG></HTML>", oracle);
compareTwoDomsWithComparatorEqual("<HTML><FONT color=\"red\">foo</FONT> bar</HTML>",
"<HTML>foo bar</HTML>", oracle);
compareTwoDomsWithComparatorEqual("<HTML><FONT color=\"red\">foo</FONT> bar</HTML>",
"<HTML><FONT color=\"green\">foo</FONT> bar</HTML>", oracle);
/* IGNORE_ATTRIBUTES */
compareTwoDomsWithComparatorEqual("<HTML><SPAN width=\"100px\">foo</SPAN></HTML>",
"<HTML><SPAN>foo</SPAN></HTML>", oracle);
compareTwoDomsWithComparatorEqual("<HTML><SPAN>foo</SPAN></HTML>",
"<HTML><SPAN valign=\"top\">foo</SPAN></HTML>", oracle);
/* STYLE ATTRIBUTES */
compareTwoDomsWithComparatorEqual(
"<HTML><SPAN style=\"color: green;\">foo</SPAN></HTML>",
"<HTML><SPAN style=\"color:red;\">foo</SPAN></HTML>", oracle);
compareTwoDomsWithComparatorEqual(
"<HTML><SPAN style=\"color: yellow\">foo</SPAN></HTML>",
"<HTML><SPAN>foo</SPAN></HTML>", oracle);
compareTwoDomsWithComparatorEqual(
"<HTML><SPAN style=\"display:inline;color:red;\">foo</SPAN></HTML>",
"<HTML><SPAN style=\"display:inline; color:green;\">foo</SPAN></HTML>", oracle);
compareTwoDomsWithComparatorNotEqual(
"<HTML><SPAN style=\"display:inline;color:red;\">foo</SPAN></HTML>",
"<HTML><SPAN style=\"display:none; color:green;\">foo</SPAN></HTML>", oracle);
}
@Test
public void testSimpleOracle() {
Comparator oracle = new SimpleComparator();
compareTwoDomsWithComparatorEqual("<HTML>\n\n<SPAN>\n foo\n</SPAN></HTML>",
"<HTML>\n<SPAN>\n foo \n\n</SPAN>\n</HTML>", oracle);
}
@Test
public void testRegexOracle() {
Comparator oracle =
new RegexComparator("[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}");
compareTwoDomsWithComparatorEqual("<HTML>192.168.1.1</HTML>", "<HTML>10.0.0.138</HTML>",
oracle);
}
@Test
public void testAttributeOracle() {
String control = "<HTML><A href=\"foo.html\" myattr=\"true\">foo</A><HTML>";
String test = "<HTML><A href=\"foo.html\" myattr=\"false\">foo</A><HTML>";
compareTwoDomsWithComparatorEqual(control, test, new AttributeComparator("myattr"));
}
@Test
public void testPlainStructureOracle() {
String control =
"<HTML><A href=\"foo.html\" jquery12421421=\"bla\" myattr=\"true\">foo</A><HTML>";
String test = "<HTML><A></A><HTML>";
compareTwoDomsWithComparatorEqual(control, test, new PlainStructureComparator(control,
test));
}
@Test
public void testScriptComparator() {
String control =
"<HTML><head><script>JavaScript();</script><title>Test</title></head><body><script>JavaScript23();</script>test</body><HTML>";
String test = "<HTML><head><title>Test</title></head><body>test</body><HTML>";
compareTwoDomsWithComparatorEqual(control, test, new ScriptComparator(control, test));
}
@Test
public void testEditDistanceComparator() {
String control = "<HTML><head><title>Test</title></head><body>test</body><HTML>";
String test = "<HTML><head><title>Test</title></head><body>test</body><HTML>";
assertTrue(control.equals(test));
compareTwoDomsWithComparatorEqual(control, test, new EditDistanceComparator(0));
compareTwoDomsWithComparatorEqual(control, test, new EditDistanceComparator(1));
test = "TheIsAlotOfRubish";
compareTwoDomsWithComparatorNotEqual(control, test, new EditDistanceComparator(1));
compareTwoDomsWithComparatorEqual(control, test, new EditDistanceComparator(0));
// We miss the title
test = "<HTML><head></head><body>test</body><HTML>";
Comparator oracle = new EditDistanceComparator(0.5);
compareTwoDomsWithComparatorEqual(control, test, oracle);
compareTwoDomsWithComparatorNotEqual(control, test, new EditDistanceComparator(1));
compareTwoDomsWithComparatorEqual(control, test, new EditDistanceComparator(0));
}
@Test
public void testXPathExpressionComparator() {
String control = "<HTML><head><title>Test</title></head><body>test</body><HTML>";
String test = "<HTML><head><title>Test</title></head><body>test</body><HTML>";
assertTrue(control.equals(test));
XPathExpressionComparator oracle = new XPathExpressionComparator();
compareTwoDomsWithComparatorEqual(control, test, oracle);
compareTwoDomsWithComparatorEqual(control, test, new XPathExpressionComparator(control,
test));
test =
"<HTML><head><title>Test</title></head><body>test<div id='ignoreme'>"
+ "ignoreme</div></body><HTML>";
compareTwoDomsWithComparatorNotEqual(control, test, oracle);
compareTwoDomsWithComparatorNotEqual(control, test, new XPathExpressionComparator(
control, test));
oracle.addExpression("//*[@id='ignoreme']");
compareTwoDomsWithComparatorEqual(control, test, oracle);
compareTwoDomsWithComparatorEqual(test, control, oracle);
control =
"<HTML><head><title>Test</title></head><body>test<div id='ignoreme'>"
+ "ignoreme123</div></body><HTML>";
compareTwoDomsWithComparatorEqual(control, test, oracle);
compareTwoDomsWithComparatorEqual(test, control, oracle);
}
}
| |
package com.asha.md360player4android;
import android.animation.PropertyValuesHolder;
import android.animation.ValueAnimator;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.util.SimpleArrayMap;
import android.util.SparseArray;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.TextView;
import android.widget.Toast;
import com.asha.vrlib.MDDirectorCamUpdate;
import com.asha.vrlib.MDVRLibrary;
import com.asha.vrlib.model.MDHitEvent;
import com.asha.vrlib.model.MDHotspotBuilder;
import com.asha.vrlib.model.MDPosition;
import com.asha.vrlib.model.MDRay;
import com.asha.vrlib.model.MDViewBuilder;
import com.asha.vrlib.model.position.MDMutablePosition;
import com.asha.vrlib.plugins.MDAbsPlugin;
import com.asha.vrlib.plugins.MDWidgetPlugin;
import com.asha.vrlib.plugins.hotspot.IMDHotspot;
import com.asha.vrlib.plugins.hotspot.MDAbsHotspot;
import com.asha.vrlib.plugins.hotspot.MDAbsView;
import com.asha.vrlib.plugins.hotspot.MDSimpleHotspot;
import com.asha.vrlib.plugins.hotspot.MDView;
import com.asha.vrlib.texture.MD360BitmapTexture;
import com.squareup.picasso.Picasso;
import com.squareup.picasso.Target;
import java.io.FileNotFoundException;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import static android.animation.PropertyValuesHolder.ofFloat;
import static com.squareup.picasso.MemoryPolicy.NO_CACHE;
import static com.squareup.picasso.MemoryPolicy.NO_STORE;
/**
* using MD360Renderer
*
* Created by hzqiujiadi on 16/1/22.
* hzqiujiadi ashqalcn@gmail.com
*/
public abstract class MD360PlayerActivity extends Activity {
private static final String TAG = "MD360PlayerActivity";
private static final SparseArray<String> sDisplayMode = new SparseArray<>();
private static final SparseArray<String> sInteractiveMode = new SparseArray<>();
private static final SparseArray<String> sProjectionMode = new SparseArray<>();
private static final SparseArray<String> sAntiDistortion = new SparseArray<>();
private static final SparseArray<String> sPitchFilter = new SparseArray<>();
private static final SparseArray<String> sFlingEnabled = new SparseArray<>();
static {
sDisplayMode.put(MDVRLibrary.DISPLAY_MODE_NORMAL,"NORMAL");
sDisplayMode.put(MDVRLibrary.DISPLAY_MODE_GLASS,"GLASS");
sInteractiveMode.put(MDVRLibrary.INTERACTIVE_MODE_MOTION,"MOTION");
sInteractiveMode.put(MDVRLibrary.INTERACTIVE_MODE_TOUCH,"TOUCH");
sInteractiveMode.put(MDVRLibrary.INTERACTIVE_MODE_MOTION_WITH_TOUCH,"M & T");
sInteractiveMode.put(MDVRLibrary.INTERACTIVE_MODE_CARDBORAD_MOTION,"CARDBOARD M");
sInteractiveMode.put(MDVRLibrary.INTERACTIVE_MODE_CARDBORAD_MOTION_WITH_TOUCH,"CARDBOARD M&T");
sProjectionMode.put(MDVRLibrary.PROJECTION_MODE_SPHERE,"SPHERE");
sProjectionMode.put(MDVRLibrary.PROJECTION_MODE_DOME180,"DOME 180");
sProjectionMode.put(MDVRLibrary.PROJECTION_MODE_DOME230,"DOME 230");
sProjectionMode.put(MDVRLibrary.PROJECTION_MODE_DOME180_UPPER,"DOME 180 UPPER");
sProjectionMode.put(MDVRLibrary.PROJECTION_MODE_DOME230_UPPER,"DOME 230 UPPER");
sProjectionMode.put(MDVRLibrary.PROJECTION_MODE_STEREO_SPHERE_HORIZONTAL,"STEREO H SPHERE");
sProjectionMode.put(MDVRLibrary.PROJECTION_MODE_STEREO_SPHERE_VERTICAL,"STEREO V SPHERE");
sProjectionMode.put(MDVRLibrary.PROJECTION_MODE_PLANE_FIT,"PLANE FIT");
sProjectionMode.put(MDVRLibrary.PROJECTION_MODE_PLANE_CROP,"PLANE CROP");
sProjectionMode.put(MDVRLibrary.PROJECTION_MODE_PLANE_FULL,"PLANE FULL");
sProjectionMode.put(MDVRLibrary.PROJECTION_MODE_MULTI_FISH_EYE_HORIZONTAL,"MULTI FISH EYE HORIZONTAL");
sProjectionMode.put(MDVRLibrary.PROJECTION_MODE_MULTI_FISH_EYE_VERTICAL,"MULTI FISH EYE VERTICAL");
sProjectionMode.put(CustomProjectionFactory.CUSTOM_PROJECTION_FISH_EYE_RADIUS_VERTICAL,"CUSTOM MULTI FISH EYE");
sAntiDistortion.put(1,"ANTI-ENABLE");
sAntiDistortion.put(0,"ANTI-DISABLE");
sPitchFilter.put(1,"FILTER PITCH");
sPitchFilter.put(0,"FILTER NOP");
sFlingEnabled.put(1, "FLING ENABLED");
sFlingEnabled.put(0, "FLING DISABLED");
}
public static void startVideo(Context context, Uri uri){
start(context, uri, VideoPlayerActivity.class);
}
public static void startBitmap(Context context, Uri uri){
start(context, uri, BitmapPlayerActivity.class);
}
public static void startCubemap(Context context, Uri uri){
start(context, uri, CubemapPlayerActivity.class);
}
private static void start(Context context, Uri uri, Class<? extends Activity> clz){
Intent i = new Intent(context,clz);
i.setData(uri);
context.startActivity(i);
}
private MDVRLibrary mVRLibrary;
// load resource from android drawable and remote url.
private MDVRLibrary.IImageLoadProvider mImageLoadProvider = new ImageLoadProvider();
// load resource from android drawable only.
private MDVRLibrary.IImageLoadProvider mAndroidProvider = new AndroidProvider(this);
private List<MDAbsPlugin> plugins = new LinkedList<>();
private MDPosition logoPosition = MDMutablePosition.newInstance().setY(-8.0f).setYaw(-90.0f);
private MDPosition[] positions = new MDPosition[]{
MDPosition.newInstance().setZ(-8.0f).setYaw(-45.0f),
MDPosition.newInstance().setZ(-18.0f).setYaw(15.0f).setAngleX(15),
MDPosition.newInstance().setZ(-10.0f).setYaw(-10.0f).setAngleX(-15),
MDPosition.newInstance().setZ(-10.0f).setYaw(30.0f).setAngleX(30),
MDPosition.newInstance().setZ(-10.0f).setYaw(-30.0f).setAngleX(-30),
MDPosition.newInstance().setZ(-5.0f).setYaw(30.0f).setAngleX(60),
MDPosition.newInstance().setZ(-3.0f).setYaw(15.0f).setAngleX(-45),
MDPosition.newInstance().setZ(-3.0f).setYaw(15.0f).setAngleX(-45).setAngleY(45),
MDPosition.newInstance().setZ(-3.0f).setYaw(0.0f).setAngleX(90),
};
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// no title
requestWindowFeature(Window.FEATURE_NO_TITLE);
// full screen
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
// set content view
setContentView(R.layout.activity_md_using_surface_view);
// init VR Library
mVRLibrary = createVRLibrary();
final Activity activity = this;
final List<View> hotspotPoints = new LinkedList<>();
hotspotPoints.add(findViewById(R.id.hotspot_point1));
hotspotPoints.add(findViewById(R.id.hotspot_point2));
SpinnerHelper.with(this)
.setData(sDisplayMode)
.setDefault(mVRLibrary.getDisplayMode())
.setClickHandler(new SpinnerHelper.ClickHandler() {
@Override
public void onSpinnerClicked(int index, int key, String value) {
mVRLibrary.switchDisplayMode(MD360PlayerActivity.this, key);
int i = 0;
int size = key == MDVRLibrary.DISPLAY_MODE_GLASS ? 2 : 1;
for (View point : hotspotPoints){
point.setVisibility(i < size ? View.VISIBLE : View.GONE);
i++;
}
}
})
.init(R.id.spinner_display);
SpinnerHelper.with(this)
.setData(sInteractiveMode)
.setDefault(mVRLibrary.getInteractiveMode())
.setClickHandler(new SpinnerHelper.ClickHandler() {
@Override
public void onSpinnerClicked(int index, int key, String value) {
mVRLibrary.switchInteractiveMode(MD360PlayerActivity.this, key);
}
})
.init(R.id.spinner_interactive);
SpinnerHelper.with(this)
.setData(sProjectionMode)
.setDefault(mVRLibrary.getProjectionMode())
.setClickHandler(new SpinnerHelper.ClickHandler() {
@Override
public void onSpinnerClicked(int index, int key, String value) {
mVRLibrary.switchProjectionMode(MD360PlayerActivity.this, key);
}
})
.init(R.id.spinner_projection);
SpinnerHelper.with(this)
.setData(sAntiDistortion)
.setDefault(mVRLibrary.isAntiDistortionEnabled() ? 1 : 0)
.setClickHandler(new SpinnerHelper.ClickHandler() {
@Override
public void onSpinnerClicked(int index, int key, String value) {
mVRLibrary.setAntiDistortionEnabled(key != 0);
}
})
.init(R.id.spinner_distortion);
findViewById(R.id.button_add_plugin).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
final int index = (int) (Math.random() * 100) % positions.length;
MDPosition position = positions[index];
MDHotspotBuilder builder = MDHotspotBuilder.create(mImageLoadProvider)
.size(4f,4f)
.provider(0, activity, android.R.drawable.star_off)
.provider(1, activity, android.R.drawable.star_on)
.provider(10, activity, android.R.drawable.checkbox_off_background)
.provider(11, activity, android.R.drawable.checkbox_on_background)
.listenClick(new MDVRLibrary.ITouchPickListener() {
@Override
public void onHotspotHit(IMDHotspot hitHotspot, MDRay ray) {
if (hitHotspot instanceof MDWidgetPlugin){
MDWidgetPlugin widgetPlugin = (MDWidgetPlugin) hitHotspot;
widgetPlugin.setChecked(!widgetPlugin.getChecked());
}
}
})
.title("star" + index)
.position(position)
.status(0,1)
.checkedStatus(10,11);
MDWidgetPlugin plugin = new MDWidgetPlugin(builder);
plugins.add(plugin);
getVRLibrary().addPlugin(plugin);
Toast.makeText(MD360PlayerActivity.this, "add plugin position:" + position, Toast.LENGTH_SHORT).show();
}
});
findViewById(R.id.button_add_plugin_logo).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
MDHotspotBuilder builder = MDHotspotBuilder.create(mImageLoadProvider)
.size(4f,4f)
.provider(activity, R.drawable.moredoo_logo)
.title("logo")
.position(logoPosition)
.listenClick(new MDVRLibrary.ITouchPickListener() {
@Override
public void onHotspotHit(IMDHotspot hitHotspot, MDRay ray) {
Toast.makeText(MD360PlayerActivity.this, "click logo", Toast.LENGTH_SHORT).show();
}
});
MDAbsHotspot hotspot = new MDSimpleHotspot(builder);
plugins.add(hotspot);
getVRLibrary().addPlugin(hotspot);
Toast.makeText(MD360PlayerActivity.this, "add plugin logo" , Toast.LENGTH_SHORT).show();
}
});
findViewById(R.id.button_remove_plugin).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (plugins.size() > 0){
MDAbsPlugin plugin = plugins.remove(plugins.size() - 1);
getVRLibrary().removePlugin(plugin);
}
}
});
findViewById(R.id.button_remove_plugins).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
plugins.clear();
getVRLibrary().removePlugins();
}
});
findViewById(R.id.button_add_hotspot_front).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
MDHotspotBuilder builder = MDHotspotBuilder.create(mImageLoadProvider)
.size(4f,4f)
.provider(activity, R.drawable.moredoo_logo)
.title("front logo")
.tag("tag-front")
.position(MDPosition.newInstance().setZ(-12.0f).setY(-1.0f));
MDAbsHotspot hotspot = new MDSimpleHotspot(builder);
hotspot.rotateToCamera();
plugins.add(hotspot);
getVRLibrary().addPlugin(hotspot);
}
});
findViewById(R.id.button_rotate_to_camera_plugin).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
IMDHotspot hotspot = getVRLibrary().findHotspotByTag("tag-front");
if (hotspot != null){
hotspot.rotateToCamera();
}
}
});
findViewById(R.id.button_add_md_view).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
TextView textView = new TextView(activity);
textView.setBackgroundColor(0x55FFCC11);
textView.setText("Hello world.");
MDViewBuilder builder = MDViewBuilder.create()
.provider(textView, 400/*view width*/, 100/*view height*/)
.size(4, 1)
.position(MDPosition.newInstance().setZ(-12.0f))
.title("md view")
.tag("tag-md-text-view")
;
MDAbsView mdView = new MDView(builder);
plugins.add(mdView);
getVRLibrary().addPlugin(mdView);
}
});
findViewById(R.id.button_update_md_view).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
MDAbsView mdView = getVRLibrary().findViewByTag("tag-md-text-view");
if (mdView != null){
TextView textView = mdView.castAttachedView(TextView.class);
textView.setText("Cheer up!");
textView.setBackgroundColor(0x8800FF00);
mdView.invalidate();
}
}
});
findViewById(R.id.button_md_view_hover).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
View view = new HoverView(activity);
view.setBackgroundColor(0x55FFCC11);
MDViewBuilder builder = MDViewBuilder.create()
.provider(view, 300/*view width*/, 200/*view height*/)
.size(3, 2)
.position(MDPosition.newInstance().setZ(-8.0f))
.title("md view")
.tag("tag-md-text-view")
;
MDAbsView mdView = new MDView(builder);
mdView.rotateToCamera();
plugins.add(mdView);
getVRLibrary().addPlugin(mdView);
}
});
final TextView hotspotText = (TextView) findViewById(R.id.hotspot_text);
final TextView directorBriefText = (TextView) findViewById(R.id.director_brief_text);
getVRLibrary().setEyePickChangedListener(new MDVRLibrary.IEyePickListener2() {
@Override
public void onHotspotHit(MDHitEvent hitEvent) {
IMDHotspot hotspot = hitEvent.getHotspot();
long hitTimestamp = hitEvent.getTimestamp();
String text = hotspot == null ? "nop" : String.format(Locale.CHINESE, "%s %fs", hotspot.getTitle(), (System.currentTimeMillis() - hitTimestamp) / 1000.0f );
hotspotText.setText(text);
String brief = getVRLibrary().getDirectorBrief().toString();
directorBriefText.setText(brief);
if (System.currentTimeMillis() - hitTimestamp > 5000){
getVRLibrary().resetEyePick();
}
}
});
findViewById(R.id.button_camera_little_planet).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
MDDirectorCamUpdate cameraUpdate = getVRLibrary().updateCamera();
PropertyValuesHolder near = ofFloat("near", cameraUpdate.getNearScale(), -0.5f);
PropertyValuesHolder eyeZ = PropertyValuesHolder.ofFloat("eyeZ", cameraUpdate.getEyeZ(), 18f);
PropertyValuesHolder pitch = PropertyValuesHolder.ofFloat("pitch", cameraUpdate.getPitch(), 90f);
PropertyValuesHolder yaw = PropertyValuesHolder.ofFloat("yaw", cameraUpdate.getYaw(), 90f);
PropertyValuesHolder roll = PropertyValuesHolder.ofFloat("roll", cameraUpdate.getRoll(), 0f);
startCameraAnimation(cameraUpdate, near, eyeZ, pitch, yaw, roll);
}
});
findViewById(R.id.button_camera_to_normal).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
MDDirectorCamUpdate cameraUpdate = getVRLibrary().updateCamera();
PropertyValuesHolder near = ofFloat("near", cameraUpdate.getNearScale(), 0f);
PropertyValuesHolder eyeZ = PropertyValuesHolder.ofFloat("eyeZ", cameraUpdate.getEyeZ(), 0f);
PropertyValuesHolder pitch = PropertyValuesHolder.ofFloat("pitch", cameraUpdate.getPitch(), 0f);
PropertyValuesHolder yaw = PropertyValuesHolder.ofFloat("yaw", cameraUpdate.getYaw(), 0f);
PropertyValuesHolder roll = PropertyValuesHolder.ofFloat("roll", cameraUpdate.getRoll(), 0f);
startCameraAnimation(cameraUpdate, near, eyeZ, pitch, yaw, roll);
}
});
SpinnerHelper.with(this)
.setData(sPitchFilter)
.setDefault(0)
.setClickHandler(new SpinnerHelper.ClickHandler() {
@Override
public void onSpinnerClicked(int index, int key, String value) {
MDVRLibrary.IDirectorFilter filter = key == 0 ? null : new MDVRLibrary.DirectorFilterAdatper() {
@Override
public float onFilterPitch(float input) {
if (input > 70){
return 70;
}
if (input < -70){
return -70;
}
return input;
}
};
getVRLibrary().setDirectorFilter(filter);
}
})
.init(R.id.spinner_pitch_filter);
SpinnerHelper.with(this)
.setData(sFlingEnabled)
.setDefault(getVRLibrary().isFlingEnabled() ? 1 : 0)
.setClickHandler(new SpinnerHelper.ClickHandler() {
@Override
public void onSpinnerClicked(int index, int key, String value) {
getVRLibrary().setFlingEnabled(key == 1);
}
})
.init(R.id.spinner_fling_enable);
}
private ValueAnimator animator;
private void startCameraAnimation(final MDDirectorCamUpdate cameraUpdate, PropertyValuesHolder... values){
if (animator != null){
animator.cancel();
}
animator = ValueAnimator.ofPropertyValuesHolder(values).setDuration(2000);
animator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
float near = (float) animation.getAnimatedValue("near");
float eyeZ = (float) animation.getAnimatedValue("eyeZ");
float pitch = (float) animation.getAnimatedValue("pitch");
float yaw = (float) animation.getAnimatedValue("yaw");
float roll = (float) animation.getAnimatedValue("roll");
cameraUpdate.setEyeZ(eyeZ).setNearScale(near).setPitch(pitch).setYaw(yaw).setRoll(roll);
}
});
animator.start();
}
abstract protected MDVRLibrary createVRLibrary();
public MDVRLibrary getVRLibrary() {
return mVRLibrary;
}
@Override
protected void onResume() {
super.onResume();
mVRLibrary.onResume(this);
}
@Override
protected void onPause() {
super.onPause();
mVRLibrary.onPause(this);
}
@Override
protected void onDestroy() {
super.onDestroy();
mVRLibrary.onDestroy();
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
mVRLibrary.onOrientationChanged(this);
}
protected Uri getUri() {
Intent i = getIntent();
if (i == null || i.getData() == null){
return null;
}
return i.getData();
}
public void cancelBusy(){
findViewById(R.id.progress).setVisibility(View.GONE);
}
public void busy(){
findViewById(R.id.progress).setVisibility(View.VISIBLE);
}
// android impl
private class AndroidProvider implements MDVRLibrary.IImageLoadProvider {
Activity activity;
public AndroidProvider(Activity activity) {
this.activity = activity;
}
@Override
public void onProvideBitmap(Uri uri, MD360BitmapTexture.Callback callback) {
try {
Bitmap bitmap = BitmapFactory.decodeStream(activity.getContentResolver().openInputStream(uri));
callback.texture(bitmap);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
}
// picasso impl
private class ImageLoadProvider implements MDVRLibrary.IImageLoadProvider{
private SimpleArrayMap<Uri,Target> targetMap = new SimpleArrayMap<>();
@Override
public void onProvideBitmap(final Uri uri, final MD360BitmapTexture.Callback callback) {
final Target target = new Target() {
@Override
public void onBitmapLoaded(Bitmap bitmap, Picasso.LoadedFrom from) {
// texture
callback.texture(bitmap);
targetMap.remove(uri);
}
@Override
public void onBitmapFailed(Drawable errorDrawable) {
targetMap.remove(uri);
}
@Override
public void onPrepareLoad(Drawable placeHolderDrawable) {
}
};
targetMap.put(uri, target);
Picasso.with(getApplicationContext()).load(uri).resize(callback.getMaxTextureSize(),callback.getMaxTextureSize()).onlyScaleDown().centerInside().memoryPolicy(NO_CACHE, NO_STORE).into(target);
}
}
}
| |
/*
* Copyright 2012-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.jooq;
import javax.sql.DataSource;
import org.jooq.DSLContext;
import org.jooq.ExecuteListener;
import org.jooq.ExecuteListenerProvider;
import org.jooq.Record;
import org.jooq.RecordListener;
import org.jooq.RecordListenerProvider;
import org.jooq.RecordMapper;
import org.jooq.RecordMapperProvider;
import org.jooq.RecordType;
import org.jooq.SQLDialect;
import org.jooq.TransactionalRunnable;
import org.jooq.VisitListener;
import org.jooq.VisitListenerProvider;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.springframework.boot.autoconfigure.PropertyPlaceholderAutoConfiguration;
import org.springframework.boot.autoconfigure.jdbc.DataSourceBuilder;
import org.springframework.boot.test.EnvironmentTestUtils;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
/**
* Tests for {@link JooqAutoConfiguration}.
*
* @author Andreas Ahlenstorf
* @author Phillip Webb
* @author Andy Wilkinson
*/
public class JooqAutoConfigurationTests {
private static final String[] NO_BEANS = {};
@Rule
public ExpectedException thrown = ExpectedException.none();
private AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
@Before
public void init() {
EnvironmentTestUtils.addEnvironment(this.context,
"spring.datasource.name:jooqtest");
EnvironmentTestUtils.addEnvironment(this.context, "spring.jooq.sql-dialect:H2");
}
@After
public void close() {
if (this.context != null) {
this.context.close();
}
}
@Test
public void noDataSource() throws Exception {
registerAndRefresh(JooqAutoConfiguration.class,
PropertyPlaceholderAutoConfiguration.class);
assertThat(this.context.getBeanNamesForType(DSLContext.class).length)
.isEqualTo(0);
}
@Test
public void jooqWithoutTx() throws Exception {
registerAndRefresh(JooqDataSourceConfiguration.class, JooqAutoConfiguration.class,
PropertyPlaceholderAutoConfiguration.class);
assertThat(getBeanNames(PlatformTransactionManager.class)).isEqualTo(NO_BEANS);
assertThat(getBeanNames(SpringTransactionProvider.class)).isEqualTo(NO_BEANS);
DSLContext dsl = this.context.getBean(DSLContext.class);
dsl.execute("create table jooqtest (name varchar(255) primary key);");
dsl.transaction(
new AssertFetch(dsl, "select count(*) as total from jooqtest;", "0"));
dsl.transaction(
new ExecuteSql(dsl, "insert into jooqtest (name) values ('foo');"));
dsl.transaction(
new AssertFetch(dsl, "select count(*) as total from jooqtest;", "1"));
try {
dsl.transaction(
new ExecuteSql(dsl, "insert into jooqtest (name) values ('bar');",
"insert into jooqtest (name) values ('foo');"));
fail("An DataIntegrityViolationException should have been thrown.");
}
catch (DataIntegrityViolationException ex) {
// Ignore
}
dsl.transaction(
new AssertFetch(dsl, "select count(*) as total from jooqtest;", "2"));
}
@Test
public void jooqWithTx() throws Exception {
registerAndRefresh(JooqDataSourceConfiguration.class,
PropertyPlaceholderAutoConfiguration.class, TxManagerConfiguration.class,
JooqAutoConfiguration.class);
this.context.getBean(PlatformTransactionManager.class);
DSLContext dsl = this.context.getBean(DSLContext.class);
assertThat(dsl.configuration().dialect()).isEqualTo(SQLDialect.H2);
dsl.execute("create table jooqtest_tx (name varchar(255) primary key);");
dsl.transaction(
new AssertFetch(dsl, "select count(*) as total from jooqtest_tx;", "0"));
dsl.transaction(
new ExecuteSql(dsl, "insert into jooqtest_tx (name) values ('foo');"));
dsl.transaction(
new AssertFetch(dsl, "select count(*) as total from jooqtest_tx;", "1"));
try {
dsl.transaction(
new ExecuteSql(dsl, "insert into jooqtest (name) values ('bar');",
"insert into jooqtest (name) values ('foo');"));
fail("A DataIntegrityViolationException should have been thrown.");
}
catch (DataIntegrityViolationException ex) {
// Ignore
}
dsl.transaction(
new AssertFetch(dsl, "select count(*) as total from jooqtest_tx;", "1"));
}
@Test
public void customProvidersArePickedUp() {
registerAndRefresh(JooqDataSourceConfiguration.class,
PropertyPlaceholderAutoConfiguration.class, TxManagerConfiguration.class,
TestRecordMapperProvider.class, TestRecordListenerProvider.class,
TestExecuteListenerProvider.class, TestVisitListenerProvider.class,
JooqAutoConfiguration.class);
DSLContext dsl = this.context.getBean(DSLContext.class);
assertThat(dsl.configuration().recordMapperProvider().getClass())
.isEqualTo(TestRecordMapperProvider.class);
assertThat(dsl.configuration().recordListenerProviders().length).isEqualTo(1);
assertThat(dsl.configuration().executeListenerProviders().length).isEqualTo(2);
assertThat(dsl.configuration().visitListenerProviders().length).isEqualTo(1);
}
@Test
public void relaxedBindingOfSqlDialect() {
EnvironmentTestUtils.addEnvironment(this.context,
"spring.jooq.sql-dialect:PoSTGrES");
registerAndRefresh(JooqDataSourceConfiguration.class,
JooqAutoConfiguration.class);
assertThat(this.context.getBean(org.jooq.Configuration.class).dialect())
.isEqualTo(SQLDialect.POSTGRES);
}
private void registerAndRefresh(Class<?>... annotatedClasses) {
this.context.register(annotatedClasses);
this.context.refresh();
}
private String[] getBeanNames(Class<?> type) {
return this.context.getBeanNamesForType(type);
}
private static class AssertFetch implements TransactionalRunnable {
private final DSLContext dsl;
private final String sql;
private final String expected;
AssertFetch(DSLContext dsl, String sql, String expected) {
this.dsl = dsl;
this.sql = sql;
this.expected = expected;
}
@Override
public void run(org.jooq.Configuration configuration) throws Exception {
assertThat(this.dsl.fetch(this.sql).getValue(0, 0).toString())
.isEqualTo(this.expected);
}
}
private static class ExecuteSql implements TransactionalRunnable {
private final DSLContext dsl;
private final String[] sql;
ExecuteSql(DSLContext dsl, String... sql) {
this.dsl = dsl;
this.sql = sql;
}
@Override
public void run(org.jooq.Configuration configuration) throws Exception {
for (String statement : this.sql) {
this.dsl.execute(statement);
}
}
}
@Configuration
protected static class JooqDataSourceConfiguration {
@Bean
public DataSource jooqDataSource() {
return DataSourceBuilder.create().url("jdbc:hsqldb:mem:jooqtest")
.username("sa").build();
}
}
@Configuration
protected static class TxManagerConfiguration {
@Bean
public PlatformTransactionManager transactionManager(DataSource dataSource) {
return new DataSourceTransactionManager(dataSource);
}
}
protected static class TestRecordMapperProvider implements RecordMapperProvider {
@Override
public <R extends Record, E> RecordMapper<R, E> provide(RecordType<R> recordType,
Class<? extends E> aClass) {
return null;
}
}
protected static class TestRecordListenerProvider implements RecordListenerProvider {
@Override
public RecordListener provide() {
return null;
}
}
protected static class TestExecuteListenerProvider
implements ExecuteListenerProvider {
@Override
public ExecuteListener provide() {
return null;
}
}
protected static class TestVisitListenerProvider implements VisitListenerProvider {
@Override
public VisitListener provide() {
return null;
}
}
}
| |
package org.quizGen.shasha.templates;
import android.app.Activity;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.support.v7.app.AlertDialog;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.BaseAdapter;
import android.widget.EditText;
import android.widget.TextView;
import org.quizGen.shasha.R;
import org.quizGen.shasha.dictationtemplate.fragment.SplashFragment;
import org.quizGen.shasha.model.Template;
import org.quizGen.shasha.model.TemplateInterface;
import org.quizGen.shasha.utilities.FileDialog;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
/**
* @brief Dictation template code implementing methods of TemplateInterface
* <p/>
*/
public class DictationTemplate implements TemplateInterface {
transient private DictationAdapter adapter;
private ArrayList<DictationModel> dictData;
private int templateId;
public DictationTemplate() {
dictData = new ArrayList<>();
}
private boolean validated(Context context, EditText title, EditText passage) {
if (title == null || passage == null) {
return false;
}
String titleText = title.getText().toString().trim();
String passageText = passage.getText().toString().trim();
if ("".equals(titleText)) {
title.setError(context.getString(R.string.dictation_template_title_hint));
return false;
} else if ("".equals(passageText)) {
passage.setError(context.getString(R.string.dictation_template_passage_hint));
return false;
}
return true;
}
@Override
public BaseAdapter newTemplateEditorAdapter(Context context) {
adapter = new DictationAdapter(context, dictData);
setEmptyView((Activity) context);
return adapter;
}
@Override
public BaseAdapter newMetaEditorAdapter(Context context) {
return null;
}
@Override
public BaseAdapter currentTemplateEditorAdapter() {
return adapter;
}
@Override
public BaseAdapter currentMetaEditorAdapter() {
return null;
}
@Override
public BaseAdapter loadProjectMetaEditor(Context context, Document doc) {
return null;
}
@Override
public BaseAdapter loadProjectTemplateEditor(Context context, ArrayList<Element> data) {
dictData = new ArrayList<>();
for (Element item : data) {
String dictTitle = item.getElementsByTagName(DictationModel.TITLE_TAG).item(0).getTextContent();
String dictPassage = item.getElementsByTagName(DictationModel.PASSAGE_TAG).item(0).getTextContent();
dictData.add(new DictationModel(dictTitle, dictPassage));
}
adapter = new DictationAdapter(context, dictData);
setEmptyView((Activity) context);
return adapter;
}
@Override
public String getTitle() {
String TEMPLATE_NAME = "Dictation Template";
return TEMPLATE_NAME;
}
@Override
public void addItem(final Activity activity) {
LayoutInflater inflater = activity.getLayoutInflater();
final View dialogView = inflater.inflate(R.layout.dict_dialog_add_edit_data, null);
final AlertDialog dialog = new AlertDialog.Builder(activity)
.setTitle(R.string.info_add_new_title)
.setView(dialogView,
activity.getResources().getDimensionPixelSize(R.dimen.spacing_left),
activity.getResources().getDimensionPixelSize(R.dimen.spacing_top),
activity.getResources().getDimensionPixelSize(R.dimen.spacing_right),
activity.getResources().getDimensionPixelSize(R.dimen.spacing_bottom))
.setPositiveButton(R.string.info_template_add, null)
.setNegativeButton(R.string.info_template_cancel, null)
.create();
dialog.show();
final EditText title = (EditText) dialogView.findViewById(R.id.dict_title);
final EditText passage = (EditText) dialogView.findViewById(R.id.dict_passage);
dialogView.findViewById(R.id.upload).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
FileDialog fileDialog = new FileDialog(activity);
fileDialog.setFileEndsWith();
fileDialog.addFileListener(new FileDialog.FileSelectListener() {
public void fileSelected(File file) {
((TextView) dialogView.findViewById(R.id.file_name)).setText(file.toString());
((TextView) dialogView.findViewById(R.id.dict_passage)).setText(readFile(file));
}
});
fileDialog.showDialog();
}
});
dialog.getButton(DialogInterface.BUTTON_POSITIVE).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (validated(activity, title, passage)) {
String titleText = title.getText().toString().trim();
String passageText = passage.getText().toString().trim();
DictationModel temp = new DictationModel(titleText, passageText);
dictData.add(temp);
adapter.notifyDataSetChanged();
setEmptyView(activity);
dialog.dismiss();
}
}
});
}
@Override
public void addMetaData(Activity activity) {
}
@Override
public void editItem(final Activity activity, final int position) {
LayoutInflater inflater = activity.getLayoutInflater();
final View dialogView = inflater.inflate(R.layout.dict_dialog_add_edit_data, null);
final AlertDialog dialog = new AlertDialog.Builder(activity)
.setTitle(R.string.info_edit_title)
.setView(dialogView,
activity.getResources().getDimensionPixelSize(R.dimen.spacing_left),
activity.getResources().getDimensionPixelSize(R.dimen.spacing_top),
activity.getResources().getDimensionPixelSize(R.dimen.spacing_right),
activity.getResources().getDimensionPixelSize(R.dimen.spacing_bottom))
.setPositiveButton(R.string.info_template_ok, null)
.setNegativeButton(R.string.info_template_cancel, null)
.create();
dialog.show();
final DictationModel data = dictData.get(position);
final EditText title = (EditText) dialogView.findViewById(R.id.dict_title);
final EditText passage = (EditText) dialogView.findViewById(R.id.dict_passage);
title.setText(data.getTitle().trim());
passage.setText(data.getPassage().trim());
dialogView.findViewById(R.id.upload).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
FileDialog fileDialog = new FileDialog(activity);
fileDialog.setFileEndsWith();
fileDialog.addFileListener(new FileDialog.FileSelectListener() {
public void fileSelected(File file) {
((TextView) dialogView.findViewById(R.id.file_name)).setText(file.toString());
((TextView) dialogView.findViewById(R.id.dict_passage)).setText(readFile(file));
}
});
fileDialog.showDialog();
}
});
dialog.getButton(DialogInterface.BUTTON_POSITIVE).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (validated(activity, title, passage)) {
String titleText = title.getText().toString().trim();
String passageText = passage.getText().toString().trim();
data.setTitle(titleText);
data.setPassage(passageText);
adapter.notifyDataSetChanged();
dialog.dismiss();
}
}
});
}
@Override
public Object deleteItem(Activity activity, int position) {
DictationModel dictationModel = dictData.get(position);
dictData.remove(position);
setEmptyView(activity);
adapter.notifyDataSetChanged();
return dictationModel;
}
@Override
public void restoreItem(Activity activity, int position, Object object) {
if (object instanceof DictationModel)
{
DictationModel dictationModel = (DictationModel)object;
if (dictationModel!=null)
{
dictData.add(position,dictationModel);
adapter.notifyDataSetChanged();
}
}
}
@Override
public ArrayList<Element> getItems(Document doc) {
ArrayList<Element> itemElements = new ArrayList<>();
for (DictationModel data : dictData) {
itemElements.add(data.getXml(doc));
}
return itemElements;
}
@Override
public android.support.v4.app.Fragment getSimulatorFragment(String filePathWithName) {
return SplashFragment.newInstance(filePathWithName);
}
@Override
public void setTemplateId(int templateId) {
this.templateId = templateId;
}
@Override
public String getAssetsFileName(Context context) {
Template[] templates = Template.values();
return context.getString(templates[templateId].getAssetsName());
}
@Override
public String getAssetsFilePath() {
return "assets/";
}
@Override
public String getApkFilePath() {
return "DictationApp.apk";
}
@Override
public void onActivityResult(Context context, int requestCode, int resultCode, Intent intent) {
// This is intentionally empty
}
/**
* @brief Toggles the visibility of empty text if Array has zero elements
*/
private void setEmptyView(Activity activity) {
if (dictData.size() < 1) {
activity.findViewById(R.id.empty).setVisibility(View.VISIBLE);
} else {
activity.findViewById(R.id.empty).setVisibility(View.GONE);
}
}
private String readFile(File file) {
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
StringBuilder sb = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
sb.append(line).append("\n");
}
reader.close();
return sb.toString();
} catch (IOException e) {
e.printStackTrace();
}
return "";
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.securityhub.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/securityhub-2018-10-26/GetMembers" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetMembersResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* A list of details about the Security Hub member accounts.
* </p>
*/
private java.util.List<Member> members;
/**
* <p>
* A list of account ID and email address pairs of the AWS accounts that couldn't be processed.
* </p>
*/
private java.util.List<Result> unprocessedAccounts;
/**
* <p>
* A list of details about the Security Hub member accounts.
* </p>
*
* @return A list of details about the Security Hub member accounts.
*/
public java.util.List<Member> getMembers() {
return members;
}
/**
* <p>
* A list of details about the Security Hub member accounts.
* </p>
*
* @param members
* A list of details about the Security Hub member accounts.
*/
public void setMembers(java.util.Collection<Member> members) {
if (members == null) {
this.members = null;
return;
}
this.members = new java.util.ArrayList<Member>(members);
}
/**
* <p>
* A list of details about the Security Hub member accounts.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setMembers(java.util.Collection)} or {@link #withMembers(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param members
* A list of details about the Security Hub member accounts.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetMembersResult withMembers(Member... members) {
if (this.members == null) {
setMembers(new java.util.ArrayList<Member>(members.length));
}
for (Member ele : members) {
this.members.add(ele);
}
return this;
}
/**
* <p>
* A list of details about the Security Hub member accounts.
* </p>
*
* @param members
* A list of details about the Security Hub member accounts.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetMembersResult withMembers(java.util.Collection<Member> members) {
setMembers(members);
return this;
}
/**
* <p>
* A list of account ID and email address pairs of the AWS accounts that couldn't be processed.
* </p>
*
* @return A list of account ID and email address pairs of the AWS accounts that couldn't be processed.
*/
public java.util.List<Result> getUnprocessedAccounts() {
return unprocessedAccounts;
}
/**
* <p>
* A list of account ID and email address pairs of the AWS accounts that couldn't be processed.
* </p>
*
* @param unprocessedAccounts
* A list of account ID and email address pairs of the AWS accounts that couldn't be processed.
*/
public void setUnprocessedAccounts(java.util.Collection<Result> unprocessedAccounts) {
if (unprocessedAccounts == null) {
this.unprocessedAccounts = null;
return;
}
this.unprocessedAccounts = new java.util.ArrayList<Result>(unprocessedAccounts);
}
/**
* <p>
* A list of account ID and email address pairs of the AWS accounts that couldn't be processed.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setUnprocessedAccounts(java.util.Collection)} or {@link #withUnprocessedAccounts(java.util.Collection)}
* if you want to override the existing values.
* </p>
*
* @param unprocessedAccounts
* A list of account ID and email address pairs of the AWS accounts that couldn't be processed.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetMembersResult withUnprocessedAccounts(Result... unprocessedAccounts) {
if (this.unprocessedAccounts == null) {
setUnprocessedAccounts(new java.util.ArrayList<Result>(unprocessedAccounts.length));
}
for (Result ele : unprocessedAccounts) {
this.unprocessedAccounts.add(ele);
}
return this;
}
/**
* <p>
* A list of account ID and email address pairs of the AWS accounts that couldn't be processed.
* </p>
*
* @param unprocessedAccounts
* A list of account ID and email address pairs of the AWS accounts that couldn't be processed.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetMembersResult withUnprocessedAccounts(java.util.Collection<Result> unprocessedAccounts) {
setUnprocessedAccounts(unprocessedAccounts);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getMembers() != null)
sb.append("Members: ").append(getMembers()).append(",");
if (getUnprocessedAccounts() != null)
sb.append("UnprocessedAccounts: ").append(getUnprocessedAccounts());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetMembersResult == false)
return false;
GetMembersResult other = (GetMembersResult) obj;
if (other.getMembers() == null ^ this.getMembers() == null)
return false;
if (other.getMembers() != null && other.getMembers().equals(this.getMembers()) == false)
return false;
if (other.getUnprocessedAccounts() == null ^ this.getUnprocessedAccounts() == null)
return false;
if (other.getUnprocessedAccounts() != null && other.getUnprocessedAccounts().equals(this.getUnprocessedAccounts()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getMembers() == null) ? 0 : getMembers().hashCode());
hashCode = prime * hashCode + ((getUnprocessedAccounts() == null) ? 0 : getUnprocessedAccounts().hashCode());
return hashCode;
}
@Override
public GetMembersResult clone() {
try {
return (GetMembersResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
package hex.glm;
import hex.CreateFrame;
import hex.FrameSplitter;
import hex.ModelMetricsBinomialGLM.ModelMetricsMultinomialGLM;
import hex.SplitFrame;
import hex.deeplearning.DeepLearningModel;
import hex.glm.GLMModel.GLMParameters;
import hex.glm.GLMModel.GLMParameters.Family;
import hex.glm.GLMModel.GLMParameters.Solver;
import org.junit.*;
import org.junit.rules.ExpectedException;
import water.*;
import water.exceptions.H2OIllegalArgumentException;
import water.fvec.Frame;
import water.fvec.Vec;
import java.util.Random;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Created by tomasnykodym on 10/28/15.
*/
public class GLMBasicTestMultinomial extends TestUtil {
static Frame _covtype;
static Frame _train;
static Frame _test;
double _tol = 1e-10;
@Rule
public ExpectedException expectedException = ExpectedException.none();
@BeforeClass
public static void setup() {
stall_till_cloudsize(1);
_covtype = parse_test_file("smalldata/covtype/covtype.20k.data");
_covtype.replace(_covtype.numCols()-1,_covtype.lastVec().toCategoricalVec()).remove();
Key[] keys = new Key[]{Key.make("train"),Key.make("test")};
H2O.submitTask(new FrameSplitter(_covtype, new double[]{.8},keys,null)).join();
_train = DKV.getGet(keys[0]);
_test = DKV.getGet(keys[1]);
}
@AfterClass
public static void cleanUp() {
if(_covtype != null) _covtype.delete();
if(_train != null) _train.delete();
if(_test != null) _test.delete();
}
@Test
public void testMultinomialPredMojoPojo() {
try {
Scope.enter();
CreateFrame cf = new CreateFrame();
Random generator = new Random();
int numRows = generator.nextInt(10000)+15000+200;
int numCols = generator.nextInt(17)+3;
int response_factors = generator.nextInt(7)+3;
cf.rows= numRows;
cf.cols = numCols;
cf.factors=10;
cf.has_response=true;
cf.response_factors = response_factors;
cf.positive_response=true;
cf.missing_fraction = 0;
cf.seed = System.currentTimeMillis();
System.out.println("Createframe parameters: rows: "+numRows+" cols:"+numCols+" response number:"
+response_factors+" seed: "+cf.seed);
Frame trainMultinomial = Scope.track(cf.execImpl().get());
SplitFrame sf = new SplitFrame(trainMultinomial, new double[]{0.8,0.2}, new Key[] {Key.make("train.hex"), Key.make("test.hex")});
sf.exec().get();
Key[] ksplits = sf._destination_frames;
Frame tr = DKV.get(ksplits[0]).get();
Frame te = DKV.get(ksplits[1]).get();
Scope.track(tr);
Scope.track(te);
GLMModel.GLMParameters paramsO = new GLMModel.GLMParameters(GLMModel.GLMParameters.Family.multinomial,
Family.multinomial.defaultLink, new double[]{0}, new double[]{0}, 0, 0);
paramsO._train = tr._key;
paramsO._lambda_search = false;
paramsO._response_column = "response";
paramsO._lambda = new double[]{0};
paramsO._alpha = new double[]{0.001}; // l1pen
paramsO._objective_epsilon = 1e-6;
paramsO._beta_epsilon = 1e-4;
paramsO._standardize = false;
GLMModel model = new GLM(paramsO).trainModel().get();
Scope.track_generic(model);
Frame pred = model.score(te);
Scope.track(pred);
Assert.assertTrue(model.testJavaScoring(te, pred, _tol));
} finally {
Scope.exit();
}
}
@Test
public void testCovtypeNoIntercept(){
GLMParameters params = new GLMParameters(Family.multinomial);
GLMModel model = null;
Frame preds = null;
Vec weights = _covtype.anyVec().makeCon(1);
Key k = Key.<Frame>make("cov_with_weights");
Frame f = new Frame(k,_covtype.names(),_covtype.vecs());
f.add("weights",weights);
DKV.put(f);
try {
params._response_column = "C55";
params._train = k;
params._valid = _covtype._key;
params._objective_epsilon = 1e-6;
params._beta_epsilon = 1e-4;
params._weights_column = "weights";
params._missing_values_handling = DeepLearningModel.DeepLearningParameters.MissingValuesHandling.Skip;
params._intercept = false;
double[] alpha = new double[]{0,.5,.1};
Solver s = Solver.L_BFGS;
System.out.println("solver = " + s);
params._solver = s;
params._max_iterations = 5000;
for (int i = 0; i < alpha.length; ++i) {
params._alpha = new double[]{alpha[i]};
// params._lambda[0] = lambda[i];
model = new GLM(params).trainModel().get();
System.out.println(model.coefficients());
// Assert.assertEquals(0,model.coefficients().get("Intercept"),0);
double [][] bs = model._output.getNormBetaMultinomial();
for(double [] b:bs)
Assert.assertEquals(0,b[b.length-1],0);
System.out.println(model._output._model_summary);
System.out.println(model._output._training_metrics);
System.out.println(model._output._validation_metrics);
preds = model.score(_covtype);
ModelMetricsMultinomialGLM mmTrain = (ModelMetricsMultinomialGLM) hex.ModelMetricsMultinomial.getFromDKV(model, _covtype);
assertTrue(model._output._training_metrics.equals(mmTrain));
model.delete();
model = null;
preds.delete();
preds = null;
}
} finally{
weights.remove();
DKV.remove(k);
if(model != null)model.delete();
if(preds != null)preds.delete();
}
}
@Test
public void testCovtypeBasic(){
GLMParameters params = new GLMParameters(Family.multinomial);
GLMModel model = null;
Frame preds = null;
Vec weights = _covtype.anyVec().makeCon(1);
Key k = Key.<Frame>make("cov_with_weights");
Frame f = new Frame(k,_covtype.names(),_covtype.vecs());
f.add("weights",weights);
DKV.put(f);
try {
params._response_column = "C55";
params._train = k;
params._valid = _covtype._key;
params._lambda = new double[]{4.881e-05};
params._alpha = new double[]{1};
params._objective_epsilon = 1e-6;
params._beta_epsilon = 1e-4;
params._weights_column = "weights";
params._missing_values_handling = DeepLearningModel.DeepLearningParameters.MissingValuesHandling.Skip;
double[] alpha = new double[]{1};
double[] expected_deviance = new double[]{25499.76};
double[] lambda = new double[]{2.544750e-05};
for (Solver s : new Solver[]{Solver.IRLSM, Solver.COORDINATE_DESCENT, Solver.L_BFGS}) {
System.out.println("solver = " + s);
params._solver = s;
params._max_iterations = params._solver == Solver.L_BFGS?300:10;
for (int i = 0; i < alpha.length; ++i) {
params._alpha[0] = alpha[i];
params._lambda[0] = lambda[i];
model = new GLM(params).trainModel().get();
System.out.println(model._output._model_summary);
System.out.println(model._output._training_metrics);
System.out.println(model._output._validation_metrics);
assertTrue(model._output._training_metrics.equals(model._output._validation_metrics));
assertTrue(((ModelMetricsMultinomialGLM) model._output._training_metrics)._resDev <= expected_deviance[i] * 1.1);
preds = model.score(_covtype);
ModelMetricsMultinomialGLM mmTrain = (ModelMetricsMultinomialGLM) hex.ModelMetricsMultinomial.getFromDKV(model, _covtype);
assertTrue(model._output._training_metrics.equals(mmTrain));
model.delete();
model = null;
preds.delete();
preds = null;
}
}
} finally{
weights.remove();
DKV.remove(k);
if(model != null)model.delete();
if(preds != null)preds.delete();
}
}
@Test
public void testCovtypeMinActivePredictors(){
GLMParameters params = new GLMParameters(Family.multinomial);
GLMModel model = null;
Frame preds = null;
try {
params._response_column = "C55";
params._train = _covtype._key;
params._valid = _covtype._key;
params._lambda = new double[]{4.881e-05};
params._alpha = new double[]{1};
params._objective_epsilon = 1e-6;
params._beta_epsilon = 1e-4;
params._max_active_predictors = 50;
params._max_iterations = 10;
double[] alpha = new double[]{.99};
double expected_deviance = 33000;
double[] lambda = new double[]{2.544750e-05};
Solver s = Solver.COORDINATE_DESCENT;
System.out.println("solver = " + s);
params._solver = s;
model = new GLM(params).trainModel().get();
System.out.println(model._output._model_summary);
System.out.println(model._output._training_metrics);
System.out.println(model._output._validation_metrics);
System.out.println("rank = " + model._output.rank() + ", max active preds = " + (params._max_active_predictors + model._output.nclasses()));
assertTrue(model._output.rank() <= params._max_active_predictors + model._output.nclasses());
assertTrue(model._output._training_metrics.equals(model._output._validation_metrics));
assertTrue(((ModelMetricsMultinomialGLM) model._output._training_metrics)._resDev <= expected_deviance * 1.1);
preds = model.score(_covtype);
ModelMetricsMultinomialGLM mmTrain = (ModelMetricsMultinomialGLM) hex.ModelMetricsMultinomial.getFromDKV(model, _covtype);
assertTrue(model._output._training_metrics.equals(mmTrain));
model.delete();
model = null;
preds.delete();
preds = null;
} finally{
if(model != null)model.delete();
if(preds != null)preds.delete();
}
}
@Test
public void testCovtypeLS(){
GLMParameters params = new GLMParameters(Family.multinomial);
GLMModel model = null;
Frame preds = null;
try {
double expected_deviance = 33000;
params._nlambdas = 3;
params._response_column = "C55";
params._train = _covtype._key;
params._valid = _covtype._key;
params._alpha = new double[]{.99};
params._objective_epsilon = 1e-6;
params._beta_epsilon = 1e-4;
params._max_active_predictors = 50;
params._max_iterations = 500;
params._solver = Solver.AUTO;
params._lambda_search = true;
model = new GLM(params).trainModel().get();
System.out.println(model._output._training_metrics);
System.out.println(model._output._validation_metrics);
assertTrue(model._output._training_metrics.equals(model._output._validation_metrics));
preds = model.score(_covtype);
ModelMetricsMultinomialGLM mmTrain = (ModelMetricsMultinomialGLM) hex.ModelMetricsMultinomial.getFromDKV(model, _covtype);
assertTrue(model._output._training_metrics.equals(mmTrain));
assertTrue(((ModelMetricsMultinomialGLM) model._output._training_metrics)._resDev <= expected_deviance);
System.out.println(model._output._model_summary);
model.delete();
model = null;
preds.delete();
preds = null;
} finally{
if(model != null)model.delete();
if(preds != null)preds.delete();
}
}
@Test
public void testCovtypeNAs(){
GLMParameters params = new GLMParameters(Family.multinomial);
GLMModel model = null;
Frame preds = null;
Frame covtype_subset = null, covtype_copy = null;
try {
double expected_deviance = 26000;
covtype_copy = _covtype.deepCopy("covtype_copy");
DKV.put(covtype_copy);
Vec.Writer w = covtype_copy.vec(54).open();
w.setNA(10);
w.setNA(20);
w.setNA(30);
w.close();
covtype_subset = new Frame(Key.<Frame>make("covtype_subset"),new String[]{"C51","C52","C53","C54","C55"},covtype_copy.vecs(new int[]{50,51,52,53,54}));
DKV.put(covtype_subset);
// params._nlambdas = 3;
params._response_column = "C55";
params._train = covtype_copy._key;
params._valid = covtype_copy._key;
params._alpha = new double[]{.99};
params._objective_epsilon = 1e-6;
params._beta_epsilon = 1e-4;
params._max_active_predictors = 50;
params._max_iterations = 500;
params._solver = Solver.L_BFGS;
params._missing_values_handling = DeepLearningModel.DeepLearningParameters.MissingValuesHandling.Skip;
// params._lambda_search = true;
model = new GLM(params).trainModel().get();
assertEquals(covtype_copy.numRows()-3-1,model._nullDOF);
System.out.println(model._output._training_metrics);
System.out.println(model._output._validation_metrics);
assertTrue(model._output._training_metrics.equals(model._output._validation_metrics));
preds = model.score(covtype_copy);
ModelMetricsMultinomialGLM mmTrain = (ModelMetricsMultinomialGLM) hex.ModelMetricsMultinomial.getFromDKV(model, covtype_copy);
assertTrue(model._output._training_metrics.equals(mmTrain));
assertTrue(((ModelMetricsMultinomialGLM) model._output._training_metrics)._resDev <= expected_deviance);
System.out.println(model._output._model_summary);
model.delete();
model = null;
preds.delete();
preds = null;
// now run the same on the subset
params._train = covtype_subset._key;
model = new GLM(params).trainModel().get();
assertEquals(covtype_copy.numRows()-3-1,model._nullDOF);
System.out.println(model._output._training_metrics);
System.out.println(model._output._validation_metrics);
assertTrue(model._output._training_metrics.equals(model._output._validation_metrics));
preds = model.score(_covtype);
System.out.println(model._output._model_summary);
assertTrue(((ModelMetricsMultinomialGLM) model._output._training_metrics)._resDev <= 66000);
model.delete();
model = null;
preds.delete();
preds = null;
} finally{
if(covtype_subset != null) covtype_subset.delete();
if(covtype_copy != null)covtype_copy.delete();
if(model != null)model.delete();
if(preds != null)preds.delete();
}
}
@Test
public void testNaiveCoordinateDescent() {
expectedException.expect(H2OIllegalArgumentException.class);
expectedException.expectMessage("Naive coordinate descent is not supported.");
GLMParameters params = new GLMParameters(Family.multinomial);
params._solver = Solver.COORDINATE_DESCENT_NAIVE;
// Should throw exception with information about unsupported message
new GLM(params).trainModel().get();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.netty.buffer;
import io.netty.util.internal.PlatformDependent;
import java.nio.ByteOrder;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.drill.exec.util.AssertionUtil;
public final class UnsafeDirectLittleEndian extends WrappedByteBuf {
private static final boolean NATIVE_ORDER = ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN;
private final AbstractByteBuf wrapped;
private final long memoryAddress;
private AtomicLong bufferCount;
private AtomicLong bufferSize;
private long initCap = -1;
UnsafeDirectLittleEndian(LargeBuffer buf) {
this(buf, true);
}
UnsafeDirectLittleEndian(PooledUnsafeDirectByteBuf buf, AtomicLong bufferCount, AtomicLong bufferSize) {
this(buf, true);
this.bufferCount = bufferCount;
this.bufferSize = bufferSize;
// initCap is used if we're tracking memory release. If we're in non-debug mode, we'll skip this.
this.initCap = AssertionUtil.ASSERT_ENABLED ? capacity() : -1;
}
private UnsafeDirectLittleEndian(AbstractByteBuf buf, boolean fake) {
super(buf);
if (!NATIVE_ORDER || buf.order() != ByteOrder.BIG_ENDIAN) {
throw new IllegalStateException("Drill only runs on LittleEndian systems.");
}
wrapped = buf;
this.memoryAddress = buf.memoryAddress();
}
private long addr(int index) {
return memoryAddress + index;
}
@Override
public long getLong(int index) {
// wrapped.checkIndex(index, 8);
long v = PlatformDependent.getLong(addr(index));
return v;
}
@Override
public float getFloat(int index) {
return Float.intBitsToFloat(getInt(index));
}
@Override
public ByteBuf slice() {
return slice(this.readerIndex(), readableBytes());
}
@Override
public ByteBuf slice(int index, int length) {
return new SlicedByteBuf(this, index, length);
}
@Override
public ByteOrder order() {
return ByteOrder.LITTLE_ENDIAN;
}
@Override
public ByteBuf order(ByteOrder endianness) {
return this;
}
@Override
public double getDouble(int index) {
return Double.longBitsToDouble(getLong(index));
}
@Override
public char getChar(int index) {
return (char) getShort(index);
}
@Override
public long getUnsignedInt(int index) {
return getInt(index) & 0xFFFFFFFFL;
}
@Override
public int getInt(int index) {
// wrapped.checkIndex(index, 4);
int v = PlatformDependent.getInt(addr(index));
return v;
}
@Override
public int getUnsignedShort(int index) {
return getShort(index) & 0xFFFF;
}
@Override
public short getShort(int index) {
// wrapped.checkIndex(index, 2);
short v = PlatformDependent.getShort(addr(index));
return v;
}
@Override
public ByteBuf setShort(int index, int value) {
wrapped.checkIndex(index, 2);
_setShort(index, value);
return this;
}
@Override
public ByteBuf setInt(int index, int value) {
wrapped.checkIndex(index, 4);
_setInt(index, value);
return this;
}
@Override
public ByteBuf setLong(int index, long value) {
wrapped.checkIndex(index, 8);
_setLong(index, value);
return this;
}
@Override
public ByteBuf setChar(int index, int value) {
setShort(index, value);
return this;
}
@Override
public ByteBuf setFloat(int index, float value) {
setInt(index, Float.floatToRawIntBits(value));
return this;
}
@Override
public ByteBuf setDouble(int index, double value) {
setLong(index, Double.doubleToRawLongBits(value));
return this;
}
@Override
public ByteBuf writeShort(int value) {
wrapped.ensureWritable(2);
_setShort(wrapped.writerIndex, value);
wrapped.writerIndex += 2;
return this;
}
@Override
public ByteBuf writeInt(int value) {
wrapped.ensureWritable(4);
_setInt(wrapped.writerIndex, value);
wrapped.writerIndex += 4;
return this;
}
@Override
public ByteBuf writeLong(long value) {
wrapped.ensureWritable(8);
_setLong(wrapped.writerIndex, value);
wrapped.writerIndex += 8;
return this;
}
@Override
public ByteBuf writeChar(int value) {
writeShort(value);
return this;
}
@Override
public ByteBuf writeFloat(float value) {
writeInt(Float.floatToRawIntBits(value));
return this;
}
@Override
public ByteBuf writeDouble(double value) {
writeLong(Double.doubleToRawLongBits(value));
return this;
}
private void _setShort(int index, int value) {
PlatformDependent.putShort(addr(index), (short) value);
}
private void _setInt(int index, int value) {
PlatformDependent.putInt(addr(index), value);
}
private void _setLong(int index, long value) {
PlatformDependent.putLong(addr(index), value);
}
@Override
public byte getByte(int index) {
return PlatformDependent.getByte(addr(index));
}
@Override
public ByteBuf setByte(int index, int value) {
PlatformDependent.putByte(addr(index), (byte) value);
return this;
}
@Override
public boolean release() {
return release(1);
}
@Override
public boolean release(int decrement) {
boolean released = super.release(decrement);
if (released && initCap != -1) {
bufferCount.decrementAndGet();
bufferSize.addAndGet(-initCap);
}
return released;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.axis2.description;
import org.apache.axiom.om.OMAbstractFactory;
import org.apache.axiom.soap.SOAPBody;
import org.apache.axiom.soap.SOAPEnvelope;
import org.apache.axiom.soap.SOAPFactory;
import org.apache.axiom.util.UIDGenerator;
import org.apache.axis2.AxisFault;
import org.apache.axis2.Constants;
import org.apache.axis2.addressing.AddressingConstants;
import org.apache.axis2.addressing.EndpointReference;
import org.apache.axis2.client.OperationClient;
import org.apache.axis2.client.Options;
import org.apache.axis2.client.async.AxisCallback;
import org.apache.axis2.context.ConfigurationContext;
import org.apache.axis2.context.MessageContext;
import org.apache.axis2.context.OperationContext;
import org.apache.axis2.context.ServiceContext;
import org.apache.axis2.engine.AxisEngine;
import org.apache.axis2.i18n.Messages;
import org.apache.axis2.transport.TransportUtils;
import org.apache.axis2.transport.http.HTTPConstants;
import org.apache.axis2.util.CallbackReceiver;
import org.apache.axis2.util.Utils;
import org.apache.axis2.wsdl.WSDLConstants;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.io.IOException;
import java.io.InputStream;
import java.io.PushbackInputStream;
import java.util.HashMap;
import java.util.Map;
import javax.xml.namespace.QName;
public class OutInAxisOperation extends TwoChannelAxisOperation {
private static final Log log = LogFactory.getLog(OutInAxisOperation.class);
public OutInAxisOperation() {
super();
//setup a temporary name
QName tmpName = new QName(this.getClass().getName() + "_" + UIDGenerator.generateUID());
this.setName(tmpName);
setMessageExchangePattern(WSDL2Constants.MEP_URI_OUT_IN);
}
public OutInAxisOperation(QName name) {
super(name);
setMessageExchangePattern(WSDL2Constants.MEP_URI_OUT_IN);
}
public void addMessageContext(MessageContext msgContext,
OperationContext opContext) throws AxisFault {
HashMap<String, MessageContext> mep = opContext.getMessageContexts();
MessageContext immsgContext = (MessageContext) mep
.get(MESSAGE_LABEL_IN_VALUE);
MessageContext outmsgContext = (MessageContext) mep
.get(MESSAGE_LABEL_OUT_VALUE);
if ((immsgContext != null) && (outmsgContext != null)) {
throw new AxisFault(Messages.getMessage("mepcompleted"));
}
if (outmsgContext == null) {
mep.put(MESSAGE_LABEL_OUT_VALUE, msgContext);
} else {
mep.put(MESSAGE_LABEL_IN_VALUE, msgContext);
opContext.setComplete(true);
opContext.cleanup();
}
}
/**
* Returns a MEP client for an Out-IN operation. This client can be used to
* interact with a server which is offering an In-Out operation. To use the
* client, you must call addMessageContext() with a message context and then
* call execute() to execute the client.
*
* @param sc The service context for this client to live within. Cannot be
* null.
* @param options Options to use as defaults for this client. If any options are
* set specifically on the client then those override options
* here.
*/
public OperationClient createClient(ServiceContext sc, Options options) {
return new OutInAxisOperationClient(this, sc, options);
}
}
/**
* MEP client for moi.
*/
class OutInAxisOperationClient extends OperationClient {
private static Log log = LogFactory.getLog(OutInAxisOperationClient.class);
// System property to allow empty body response for HTTP SC 200, 201, 202
private static boolean isEmptyBodyForHttpEnabled = Boolean.getBoolean("allowEmptyBodyForHttp2xx");
OutInAxisOperationClient(OutInAxisOperation axisOp, ServiceContext sc,
Options options) {
super(axisOp, sc, options);
}
/**
* Adds message context to operation context, so that it will handle the
* logic correctly if the OperationContext is null then new one will be
* created, and Operation Context will become null when some one calls reset().
*
* @param msgContext the MessageContext to add
* @throws AxisFault
*/
public void addMessageContext(MessageContext msgContext) throws AxisFault {
msgContext.setServiceContext(sc);
if (msgContext.getMessageID() == null) {
setMessageID(msgContext);
}
axisOp.registerOperationContext(msgContext, oc);
}
/**
* Returns the message context for a given message label.
*
* @param messageLabel :
* label of the message and that can be either "Out" or "In" and
* nothing else
* @return Returns MessageContext.
* @throws AxisFault
*/
public MessageContext getMessageContext(String messageLabel)
throws AxisFault {
return oc.getMessageContext(messageLabel);
}
/**
* Executes the MEP. What this does depends on the specific MEP client. The
* basic idea is to have the MEP client execute and do something with the
* messages that have been added to it so far. For example, if its an Out-In
* MEP, then if the Out message has been set, then executing the client asks
* it to send the message and get the In message, possibly using a different
* thread.
*
* @param block Indicates whether execution should block or return ASAP. What
* block means is of course a function of the specific MEP
* client. IGNORED BY THIS MEP CLIENT.
* @throws AxisFault if something goes wrong during the execution of the MEP.
*/
public void executeImpl(boolean block) throws AxisFault {
if (log.isDebugEnabled()) {
log.debug("Entry: OutInAxisOperationClient::execute, " + block);
}
if (completed) {
throw new AxisFault(Messages.getMessage("mepiscomplted"));
}
ConfigurationContext cc = sc.getConfigurationContext();
// copy interesting info from options to message context.
MessageContext mc = oc.getMessageContext(WSDLConstants.MESSAGE_LABEL_OUT_VALUE);
if (mc == null) {
throw new AxisFault(Messages.getMessage("outmsgctxnull"));
}
prepareMessageContext(cc, mc);
if (options.getTransportIn() == null && mc.getTransportIn() == null) {
mc.setTransportIn(ClientUtils.inferInTransport(cc
.getAxisConfiguration(), options, mc));
} else if (mc.getTransportIn() == null) {
mc.setTransportIn(options.getTransportIn());
}
/**
* If a module has set the USE_ASYNC_OPERATIONS option then we override the behaviour
* for sync calls, and effectively USE_CUSTOM_LISTENER too. However we leave real
* async calls alone.
*/
boolean useAsync = false;
if (!mc.getOptions().isUseSeparateListener()) {
Boolean useAsyncOption =
(Boolean) mc.getProperty(Constants.Configuration.USE_ASYNC_OPERATIONS);
if (log.isDebugEnabled()) log.debug("OutInAxisOperationClient: useAsyncOption " + useAsyncOption);
if (useAsyncOption != null) {
useAsync = useAsyncOption.booleanValue();
}
}
EndpointReference replyTo = mc.getReplyTo();
if (replyTo != null) {
if (replyTo.hasNoneAddress()) {
throw new AxisFault( replyTo.getAddress() + "" +
" can not be used with OutInAxisOperationClient , user either "
+ "fireAndForget or sendRobust)");
}
else if (replyTo.isWSAddressingAnonymous() &&
replyTo.getAllReferenceParameters() != null) {
mc.setProperty(AddressingConstants.INCLUDE_OPTIONAL_HEADERS, Boolean.TRUE);
}
String customReplyTo = (String)options.getProperty(Options.CUSTOM_REPLYTO_ADDRESS);
if ( ! (Options.CUSTOM_REPLYTO_ADDRESS_TRUE.equals(customReplyTo))) {
if (!replyTo.hasAnonymousAddress()){
useAsync = true;
}
}
}
if (useAsync || mc.getOptions().isUseSeparateListener()) {
sendAsync(useAsync, mc);
} else {
if (block) {
// Send the SOAP Message and receive a response
send(mc);
completed = true;
} else {
sc.getConfigurationContext().getThreadPool().execute(
new NonBlockingInvocationWorker(mc, axisCallback));
}
}
}
private void sendAsync(boolean useAsync, MessageContext mc)
throws AxisFault {
if (log.isDebugEnabled()) {
log.debug("useAsync=" + useAsync + ", seperateListener=" +
mc.getOptions().isUseSeparateListener());
}
/**
* We are following the async path. If the user hasn't set a callback object then we must
* block until the whole MEP is complete, as they have no other way to get their reply message.
*/
// THREADSAFE issue: Multiple threads could be trying to initialize the callback receiver
// so it is synchronized. It is not done within the else clause to avoid the
// double-checked lock antipattern.
CallbackReceiver callbackReceiver;
synchronized (axisOp) {
if (axisOp.getMessageReceiver() != null &&
axisOp.getMessageReceiver() instanceof CallbackReceiver) {
callbackReceiver = (CallbackReceiver) axisOp.getMessageReceiver();
} else {
if (log.isDebugEnabled()) {
log.debug("Creating new callback receiver");
}
callbackReceiver = new CallbackReceiver();
axisOp.setMessageReceiver(callbackReceiver);
if (log.isDebugEnabled()) log.debug("OutInAxisOperation: callbackReceiver " + callbackReceiver + " : " + axisOp);
}
}
SyncCallBack internalCallback = null;
if (axisCallback != null) {
callbackReceiver.addCallback(mc.getMessageID(), axisCallback);
if (log.isDebugEnabled()) log.debug("OutInAxisOperationClient: Creating axis callback");
} else {
if (log.isDebugEnabled()) {
log.debug("Creating internal callback");
}
internalCallback = new SyncCallBack();
callbackReceiver.addCallback(mc.getMessageID(), internalCallback);
if (log.isDebugEnabled()) log.debug("OutInAxisOperationClient: Creating internal callback");
}
/**
* If USE_CUSTOM_LISTENER is set to 'true' the replyTo value will not be replaced and Axis2 will not
* start its internal listner. Some other enntity (e.g. a module) should take care of obtaining the
* response message.
*/
Boolean useCustomListener =
(Boolean) options.getProperty(Constants.Configuration.USE_CUSTOM_LISTENER);
if (useAsync) {
useCustomListener = Boolean.TRUE;
}
if (useCustomListener == null || !useCustomListener.booleanValue()) {
EndpointReference replyTo = mc.getReplyTo();
if (replyTo == null || replyTo.hasAnonymousAddress()){
EndpointReference replyToFromTransport =
mc.getConfigurationContext().getListenerManager().
getEPRforService(sc.getAxisService().getName(),
axisOp.getName().getLocalPart(), mc
.getTransportIn().getName());
if (replyTo == null) {
mc.setReplyTo(replyToFromTransport);
} else {
replyTo.setAddress(replyToFromTransport.getAddress());
}
}
}
//if we don't do this , this guy will wait till it gets HTTP 202 in the HTTP case
mc.setProperty(MessageContext.CLIENT_API_NON_BLOCKING, Boolean.TRUE);
mc.getConfigurationContext().registerOperationContext(mc.getMessageID(), oc);
AxisEngine.send(mc);
if (internalCallback != null) {
internalCallback.waitForCompletion(options.getTimeOutInMilliSeconds());
// process the result of the invocation
if (internalCallback.envelope == null) {
if (internalCallback.error == null) {
log.error("Callback had neither error nor response");
}
if (options.isExceptionToBeThrownOnSOAPFault()) {
throw AxisFault.makeFault(internalCallback.error);
}
}
}
}
/**
* When synchronous send() gets back a response MessageContext, this is the workhorse
* method which processes it.
*
* @param responseMessageContext the active response MessageContext
* @throws AxisFault if something went wrong
*/
protected void handleResponse(MessageContext responseMessageContext) throws AxisFault{
// Options object reused above so soapAction needs to be removed so
// that soapAction+wsa:Action on response don't conflict
responseMessageContext.setSoapAction(null);
if (responseMessageContext.getEnvelope() == null) {
// If request is REST we assume the responseMessageContext is REST, so
// set the variable
/*
* old code here was using the outbound message context to set the inbound SOAP namespace,
* as such and passing it to TransportUtils.createSOAPMessage
*
* msgctx.getEnvelope().getNamespace().getNamespaceURI()
*
* However, the SOAP1.2 spec, appendix A indicates that if a SOAP1.2 message is sent to a SOAP1.1
* endpoint, we will get a SOAP1.1 (fault) message response. We need another way to set
* the inbound SOAP version. Best way to do this is to trust the content type and let
* createSOAPMessage take care of figuring out what the SOAP namespace is.
*/
SOAPEnvelope resenvelope = null;
if (checkContentLength(responseMessageContext) && canResponseHaveBody(responseMessageContext)) {
resenvelope = TransportUtils.createSOAPMessage(responseMessageContext);
} else {
/*
* Despite the Content-Length equals 0 we need envelope object in context
* to protect from NullPointerException in Axis2Sender.sendBack
*/
SOAPFactory soapFactory = OMAbstractFactory.getSOAP11Factory();
resenvelope = soapFactory.getDefaultEnvelope();
}
if (resenvelope != null) {
responseMessageContext.setEnvelope(resenvelope);
} else {
throw new AxisFault(Messages
.getMessage("blockingInvocationExpectsResponse"));
}
}
SOAPEnvelope resenvelope = responseMessageContext.getEnvelope();
if (resenvelope != null) {
AxisEngine.receive(responseMessageContext);
if (responseMessageContext.getReplyTo() != null) {
sc.setTargetEPR(responseMessageContext.getReplyTo());
}
// rampart handlers change the envelope and set the decrypted envelope
// so need to check the new one else resenvelope.hasFault() become false.
resenvelope = responseMessageContext.getEnvelope();
if (resenvelope.hasFault()||responseMessageContext.isProcessingFault()) {
if (options.isExceptionToBeThrownOnSOAPFault()) {
// does the SOAPFault has a detail element for Excpetion
throw Utils.getInboundFaultFromMessageContext(responseMessageContext);
}
}
}
}
protected boolean checkContentLength(MessageContext responseMessageContext) {
Map<String, String> transportHeaders = (Map<String, String>) responseMessageContext
.getProperty(MessageContext.TRANSPORT_HEADERS);
if (transportHeaders == null) {
// transportHeaders = null , we can't check this further and
// allow to try with message building.
return true;
}
String contentLengthStr = (String) transportHeaders.get(HTTPConstants.HEADER_CONTENT_LENGTH);
if (contentLengthStr == null) {
// contentLengthStr = null we can't check this further and allow
// to try with message building.
return true;
}
int contentLength = -1;
contentLength = Integer.parseInt(contentLengthStr);
if (contentLength > 0) {
//We have valid Content-Length no issue with message building.
return true;
}
return false;
}
/**
* Synchronously send the request and receive a response. This relies on the transport
* correctly connecting the response InputStream!
*
* @param msgContext the request MessageContext to send.
* @return Returns MessageContext.
* @throws AxisFault Sends the message using a two way transport and waits for a response
*/
protected MessageContext send(MessageContext msgContext) throws AxisFault {
// create the responseMessageContext
MessageContext responseMessageContext =
msgContext.getConfigurationContext().createMessageContext();
responseMessageContext.setServerSide(false);
responseMessageContext.setOperationContext(msgContext.getOperationContext());
responseMessageContext.setOptions(new Options(options));
responseMessageContext.setMessageID(msgContext.getMessageID());
addMessageContext(responseMessageContext);
responseMessageContext.setServiceContext(msgContext.getServiceContext());
responseMessageContext.setAxisMessage(
axisOp.getMessage(WSDLConstants.MESSAGE_LABEL_IN_VALUE));
//sending the message
AxisEngine.send(msgContext);
responseMessageContext.setDoingREST(msgContext.isDoingREST());
// Copy RESPONSE properties which the transport set onto the request message context when it processed
// the incoming response recieved in reply to an outgoing request.
responseMessageContext.setProperty(MessageContext.TRANSPORT_HEADERS,
msgContext.getProperty(MessageContext.TRANSPORT_HEADERS));
responseMessageContext.setProperty(HTTPConstants.MC_HTTP_STATUS_CODE,
msgContext.getProperty(HTTPConstants.MC_HTTP_STATUS_CODE));
responseMessageContext.setProperty(MessageContext.TRANSPORT_IN, msgContext
.getProperty(MessageContext.TRANSPORT_IN));
responseMessageContext.setTransportIn(msgContext.getTransportIn());
responseMessageContext.setTransportOut(msgContext.getTransportOut());
handleResponse(responseMessageContext);
return responseMessageContext;
}
/**
* This class is the workhorse for a non-blocking invocation that uses a two
* way transport.
*/
private class NonBlockingInvocationWorker implements Runnable {
private MessageContext msgctx;
private AxisCallback axisCallback;
public NonBlockingInvocationWorker(MessageContext msgctx ,
AxisCallback axisCallback) {
this.msgctx = msgctx;
this.axisCallback =axisCallback;
}
public void run() {
try {
// send the request and wait for response
MessageContext response = send(msgctx);
// call the callback
if (response != null) {
SOAPEnvelope resenvelope = response.getEnvelope();
if (resenvelope.hasFault()) {
SOAPBody body = resenvelope.getBody();
// If a fault was found, create an AxisFault with a MessageContext so that
// other programming models can deserialize the fault to an alternative form.
AxisFault fault = new AxisFault(body.getFault(), response);
if (axisCallback != null) {
if (options.isExceptionToBeThrownOnSOAPFault()) {
axisCallback.onError(fault);
} else {
axisCallback.onFault(response);
}
}
} else {
if (axisCallback != null) {
axisCallback.onMessage(response);
}
}
}
} catch (Exception e) {
if (axisCallback != null) {
axisCallback.onError(e);
}
} finally {
if (axisCallback != null) {
axisCallback.onComplete();
}
}
}
}
/**
* This class acts as a callback that allows users to wait on the result.
*/
private class SyncCallBack implements AxisCallback {
boolean complete;
boolean receivedFault;
public boolean waitForCompletion(long timeout) throws AxisFault {
synchronized (this) {
try {
if (complete) return !receivedFault;
wait(timeout);
if (!complete) {
// We timed out!
throw new AxisFault( Messages.getMessage("responseTimeOut"));
}
} catch (InterruptedException e) {
// Something interrupted our wait!
error = e;
}
}
if (error != null) throw AxisFault.makeFault(error);
return !receivedFault;
}
/**
* This is called when we receive a message.
*
* @param msgContext the (response) MessageContext
*/
public void onMessage(MessageContext msgContext) {
// Transport input stream gets closed after calling setComplete
// method. Have to build the whole envelope including the
// attachments at this stage. Data might get lost if the input
// stream gets closed before building the whole envelope.
// TODO: Shouldn't need to do this - need to hook up stream closure to Axiom completion
this.envelope = msgContext.getEnvelope();
this.envelope.buildWithAttachments();
}
/**
* This gets called when a fault message is received.
*
* @param msgContext the MessageContext containing the fault.
*/
public void onFault(MessageContext msgContext) {
error = Utils.getInboundFaultFromMessageContext(msgContext);
}
/**
* This is called at the end of the MEP no matter what happens, quite like a
* finally block.
*/
public synchronized void onComplete() {
complete = true;
notify();
}
private SOAPEnvelope envelope;
private Exception error;
public void onError(Exception e) {
if (log.isDebugEnabled()) {
log.debug("Entry: OutInAxisOperationClient$SyncCallBack::onError, " + e);
}
error = e;
if (log.isDebugEnabled()) {
log.debug("Exit: OutInAxisOperationClient$SyncCallBack::onError");
}
}
}
/**
* Check whether response can have a message body according to the HTTP spec.
* @param responseMessageContext the active response MessageContext
* @return true if there can be a message body in the response, false if not.
*/
private boolean canResponseHaveBody(MessageContext responseMessageContext) {
if (responseMessageContext.getProperty("transport.http.statusCode") != null) {
int statusCode =
Integer.parseInt(responseMessageContext.getProperty("transport.http.statusCode").toString());
if (isEmptyBodyForHttpEnabled && (statusCode == HttpStatus.SC_OK || statusCode == HttpStatus.SC_CREATED || statusCode == HttpStatus.SC_ACCEPTED)) {
InputStream inputStream = (InputStream) responseMessageContext.getProperty(MessageContext.TRANSPORT_IN);
PushbackInputStream pushbackInputStream = new PushbackInputStream(inputStream);
int data = 0;
try {
data = pushbackInputStream.read();
pushbackInputStream.unread(data);
} catch (IOException e) {
return false;
}
responseMessageContext.setProperty(MessageContext.TRANSPORT_IN, pushbackInputStream);
return data != -1;
}
return statusCode >= HttpStatus.SC_OK
&& statusCode != HttpStatus.SC_NO_CONTENT
&& statusCode != HttpStatus.SC_NOT_MODIFIED
&& statusCode != HttpStatus.SC_RESET_CONTENT
&& statusCode / 100 != 1;
} else {
return true;
}
}
}
| |
/*
* Copyright 2002-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.test.web.servlet.request;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.annotation.AuthenticationPrincipal;
import org.springframework.security.core.authority.AuthorityUtils;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.oauth2.client.OAuth2AuthorizedClient;
import org.springframework.security.oauth2.client.annotation.RegisteredOAuth2AuthorizedClient;
import org.springframework.security.oauth2.client.registration.ClientRegistrationRepository;
import org.springframework.security.oauth2.client.registration.TestClientRegistrations;
import org.springframework.security.oauth2.client.web.OAuth2AuthorizedClientRepository;
import org.springframework.security.oauth2.core.user.DefaultOAuth2User;
import org.springframework.security.oauth2.core.user.OAuth2User;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
import static org.mockito.Mockito.mock;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.oauth2Login;
import static org.springframework.security.test.web.servlet.setup.SecurityMockMvcConfigurers.springSecurity;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* Tests for {@link SecurityMockMvcRequestPostProcessors#oauth2Login()}
*
* @author Josh Cummings
* @since 5.3
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration
@WebAppConfiguration
public class SecurityMockMvcRequestPostProcessorsOAuth2LoginTests {
@Autowired
WebApplicationContext context;
MockMvc mvc;
@Before
public void setup() {
// @formatter:off
this.mvc = MockMvcBuilders
.webAppContextSetup(this.context)
.apply(springSecurity())
.build();
// @formatter:on
}
@Test
public void oauth2LoginWhenUsingDefaultsThenProducesDefaultAuthentication() throws Exception {
this.mvc.perform(get("/name").with(oauth2Login())).andExpect(content().string("user"));
this.mvc.perform(get("/admin/id-token/name").with(oauth2Login())).andExpect(status().isForbidden());
}
@Test
public void oauth2LoginWhenUsingDefaultsThenProducesDefaultAuthorizedClient() throws Exception {
this.mvc.perform(get("/client-id").with(oauth2Login())).andExpect(content().string("test-client"));
}
@Test
public void oauth2LoginWhenAuthoritiesSpecifiedThenGrantsAccess() throws Exception {
this.mvc.perform(
get("/admin/scopes").with(oauth2Login().authorities(new SimpleGrantedAuthority("SCOPE_admin"))))
.andExpect(content().string("[\"SCOPE_admin\"]"));
}
@Test
public void oauth2LoginWhenAttributeSpecifiedThenUserHasAttribute() throws Exception {
this.mvc.perform(
get("/attributes/iss").with(oauth2Login().attributes((a) -> a.put("iss", "https://idp.example.org"))))
.andExpect(content().string("https://idp.example.org"));
}
@Test
public void oauth2LoginWhenNameSpecifiedThenUserHasName() throws Exception {
OAuth2User oauth2User = new DefaultOAuth2User(AuthorityUtils.commaSeparatedStringToAuthorityList("SCOPE_read"),
Collections.singletonMap("custom-attribute", "test-subject"), "custom-attribute");
this.mvc.perform(get("/attributes/custom-attribute").with(oauth2Login().oauth2User(oauth2User)))
.andExpect(content().string("test-subject"));
this.mvc.perform(get("/name").with(oauth2Login().oauth2User(oauth2User)))
.andExpect(content().string("test-subject"));
this.mvc.perform(get("/client-name").with(oauth2Login().oauth2User(oauth2User)))
.andExpect(content().string("test-subject"));
}
@Test
public void oauth2LoginWhenClientRegistrationSpecifiedThenUses() throws Exception {
this.mvc.perform(get("/client-id")
.with(oauth2Login().clientRegistration(TestClientRegistrations.clientRegistration().build())))
.andExpect(content().string("client-id"));
}
@Test
public void oauth2LoginWhenOAuth2UserSpecifiedThenLastCalledTakesPrecedence() throws Exception {
OAuth2User oauth2User = new DefaultOAuth2User(AuthorityUtils.createAuthorityList("SCOPE_read"),
Collections.singletonMap("username", "user"), "username");
this.mvc.perform(get("/attributes/sub")
.with(oauth2Login().attributes((a) -> a.put("sub", "bar")).oauth2User(oauth2User)))
.andExpect(status().isOk()).andExpect(content().string("no-attribute"));
this.mvc.perform(get("/attributes/sub")
.with(oauth2Login().oauth2User(oauth2User).attributes((a) -> a.put("sub", "bar"))))
.andExpect(content().string("bar"));
}
@EnableWebSecurity
@EnableWebMvc
static class OAuth2LoginConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests((authorize) -> authorize
.mvcMatchers("/admin/**").hasAuthority("SCOPE_admin")
.anyRequest().hasAuthority("SCOPE_read")
).oauth2Login();
// @formatter:on
}
@Bean
ClientRegistrationRepository clientRegistrationRepository() {
return mock(ClientRegistrationRepository.class);
}
@Bean
OAuth2AuthorizedClientRepository oAuth2AuthorizedClientRepository() {
return mock(OAuth2AuthorizedClientRepository.class);
}
@RestController
static class PrincipalController {
@GetMapping("/name")
String name(@AuthenticationPrincipal OAuth2User oauth2User) {
return oauth2User.getName();
}
@GetMapping("/client-id")
String authorizedClient(@RegisteredOAuth2AuthorizedClient OAuth2AuthorizedClient authorizedClient) {
return authorizedClient.getClientRegistration().getClientId();
}
@GetMapping("/client-name")
String clientName(@RegisteredOAuth2AuthorizedClient OAuth2AuthorizedClient authorizedClient) {
return authorizedClient.getPrincipalName();
}
@GetMapping("/attributes/{attribute}")
String attributes(@AuthenticationPrincipal OAuth2User oauth2User,
@PathVariable("attribute") String attribute) {
return Optional.ofNullable((String) oauth2User.getAttribute(attribute)).orElse("no-attribute");
}
@GetMapping("/admin/scopes")
List<String> scopes(
@AuthenticationPrincipal(expression = "authorities") Collection<GrantedAuthority> authorities) {
return authorities.stream().map(GrantedAuthority::getAuthority).collect(Collectors.toList());
}
}
}
}
| |
package com.emistoolbox.client.ui.pdf.layout;
import java.util.ArrayList;
import java.util.List;
import com.emistoolbox.client.EmisEditor;
import com.emistoolbox.client.admin.ui.EmisUtils;
import com.emistoolbox.client.admin.ui.ListBoxWithUserObjects;
import com.emistoolbox.common.renderer.ChartConfig.ChartType;
import com.emistoolbox.common.renderer.pdfreport.PdfChartContentConfig;
import com.emistoolbox.common.renderer.pdfreport.PdfContentConfig;
import com.emistoolbox.common.renderer.pdfreport.PdfContentConfigVisitor;
import com.emistoolbox.common.renderer.pdfreport.PdfGisContentConfig;
import com.emistoolbox.common.renderer.pdfreport.PdfPriorityListContentConfig;
import com.emistoolbox.common.renderer.pdfreport.TableStyleConfig;
import com.emistoolbox.common.renderer.pdfreport.PdfTextContentConfig;
import com.emistoolbox.common.renderer.pdfreport.PdfVariableContentConfig;
import com.emistoolbox.common.results.MetaResult;
import com.emistoolbox.common.results.MetaResultDimension;
import com.emistoolbox.common.results.TableMetaResult;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.HasHorizontalAlignment;
import com.google.gwt.user.client.ui.HorizontalPanel;
import com.google.gwt.user.client.ui.PushButton;
import com.google.gwt.user.client.ui.VerticalPanel;
public class LayoutContentListEditor extends VerticalPanel implements EmisEditor<List<PdfContentConfig>>
{
private ListBoxWithUserObjects<PdfContentConfig> uiConfigs = new ListBoxWithUserObjects<PdfContentConfig>();
private PushButton btnAddToPage = new PushButton("Add to Page");
private PushButton btnDel = new PushButton("Del");
public LayoutContentListEditor()
{
setWidth("100%");
add(new HTML("<div class='section'>New Content</div>"));
add(uiConfigs);
uiConfigs.setVisibleItemCount(5);
uiConfigs.setWidth("100%");
// Buttons.
EmisUtils.init(btnAddToPage, 100);
EmisUtils.init(btnDel, 60);
HorizontalPanel hp = new HorizontalPanel();
hp.setSpacing(3);
hp.add(btnDel);
hp.add(btnAddToPage);
setHorizontalAlignment(HasHorizontalAlignment.ALIGN_RIGHT);
add(hp);
add(new HTML("<p>This list shows content not, yet, placed on the page.</p><p>You can add new content by going to the 'Analysis' tab and display a chart, a table or a priority list. Then click <b>[Add to Report]</b>."));
btnDel.addClickHandler(new ClickHandler() {
public void onClick(ClickEvent event) {
removeContent();
}
});
}
public void addAddToPageHandler(ClickHandler handler)
{ btnAddToPage.addClickHandler(handler); }
public PdfContentConfig removeContent()
{
int index = uiConfigs.getSelectedIndex();
if (index == -1)
return null;
PdfContentConfig content = uiConfigs.getUserObject(index);
uiConfigs.removeItem(index);
uiConfigs.removeEmptyGroups();
if (index >= uiConfigs.getItemCount())
index--;
if (index != -1)
uiConfigs.setSelectedIndex(index);
return content;
}
public void enableAddButton(boolean enabled)
{ btnAddToPage.setEnabled(enabled); }
public void set(List<PdfContentConfig> configs)
{
uiConfigs.clear();
if (configs == null)
return;
for (PdfContentConfig config : configs)
{
if (config == null)
continue;
uiConfigs.add(getGroup(config), getTitle(config), config);
}
}
public void commit()
{}
public List<PdfContentConfig> get()
{
List<PdfContentConfig> result = new ArrayList<PdfContentConfig>();
for (int i = 0; i < uiConfigs.getItemCount(); i++)
result.add(uiConfigs.getUserObject(i));
return result;
}
private String getGroup(PdfContentConfig config)
{ return config.accept(new GroupNameVisitor()); }
private String getTitle(PdfContentConfig config)
{ return config.accept(new SummaryVisitor()); }
}
class GroupNameVisitor implements PdfContentConfigVisitor<String>
{
public String visit(PdfTextContentConfig config)
{ return "Text"; }
public String visit(PdfVariableContentConfig config)
{ return "Variables"; }
public String visit(PdfChartContentConfig config)
{ return "Charts"; }
public String visit(PdfGisContentConfig config)
{ return "Maps"; }
@Override
public String visit(PdfPriorityListContentConfig config)
{ return "Prio Lists"; }
@Override
public String visit(TableStyleConfig config)
{ return "Tables"; }
}
class SummaryVisitor implements PdfContentConfigVisitor<String>
{
public String visit(PdfTextContentConfig config)
{ return trimTo(config.getText(), "(empty)", 20); }
public String visit(PdfVariableContentConfig config)
{
StringBuffer result = new StringBuffer();
List<String> vars = config.getVariables();
if (vars != null)
for (String var : vars)
{
if (result.length() > 0)
result.append(",");
result.append(var);
}
return trimTo(result.toString(), "(none)", 20);
}
private String trimTo(String text, String defaultText, int maxLength)
{
if (text == null || text.equals(""))
return defaultText;
if (text.length() < maxLength)
return text;
return text.substring(0, maxLength) + "..";
}
private String getChartTypeName(int chartType)
{
ChartType result = ChartType.values()[chartType];
return result.name().toLowerCase();
}
public String visit(PdfChartContentConfig config)
{
StringBuffer result = new StringBuffer();
result.append(getChartTypeName(config.getChartType()) + " for ");
result.append(config.getSeniorEntity().getName());
String delim = " shown by ";
for (int i = 0; i < config.getMetaResult().getDimensionCount(); i++)
{
MetaResultDimension dim = config.getMetaResult().getDimension(i);
result.append(delim);
result.append(dim.getName());
delim = ", ";
}
return result.toString();
}
public String visit(PdfGisContentConfig config)
{
config.getMetaResult().getIndicator();
return "";
}
private boolean appendEntity(StringBuffer result, MetaResult metaResult)
{
if (metaResult == null || metaResult.getContext() == null || metaResult.getContext().getEntityType() == null)
return false;
result.append(metaResult.getContext().getEntityType().getName());
return true;
}
private boolean appendIndicator(StringBuffer result, MetaResult metaResult)
{
if (metaResult == null || metaResult.getIndicator() == null)
return false;
result.append(metaResult.getIndicator().getName());
return true;
}
private boolean appendDimensions(StringBuffer result, TableMetaResult metaResult)
{
if (metaResult == null || metaResult.getDimensionCount() == 0)
return false;
for (int i = 0; i < metaResult.getDimensionCount(); i++)
{
if (i > 0)
result.append(", ");
result.append(metaResult.getDimension(i).getName());
}
return true;
}
@Override
public String visit(PdfPriorityListContentConfig config)
{ return "Priority Lists"; }
@Override
public String visit(TableStyleConfig config)
{ return "Data Table"; }
}
| |
/*
* Copyright 2004-2013 the Seasar Foundation and the Others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.docksidestage.mysql.dbflute.cbean.cq.bs;
import java.util.*;
import org.dbflute.cbean.*;
import org.dbflute.cbean.chelper.*;
import org.dbflute.cbean.ckey.*;
import org.dbflute.cbean.coption.*;
import org.dbflute.cbean.cvalue.ConditionValue;
import org.dbflute.cbean.ordering.*;
import org.dbflute.cbean.scoping.*;
import org.dbflute.cbean.sqlclause.SqlClause;
import org.dbflute.dbmeta.DBMetaProvider;
import org.docksidestage.mysql.dbflute.allcommon.*;
import org.docksidestage.mysql.dbflute.cbean.*;
import org.docksidestage.mysql.dbflute.cbean.cq.*;
/**
* The abstract condition-query of white_variant_relation_local_pk_referrer.
* @author DBFlute(AutoGenerator)
*/
public abstract class AbstractBsWhiteVariantRelationLocalPkReferrerCQ extends AbstractConditionQuery {
// ===================================================================================
// Constructor
// ===========
public AbstractBsWhiteVariantRelationLocalPkReferrerCQ(ConditionQuery referrerQuery, SqlClause sqlClause, String aliasName, int nestLevel) {
super(referrerQuery, sqlClause, aliasName, nestLevel);
}
// ===================================================================================
// DB Meta
// =======
@Override
protected DBMetaProvider xgetDBMetaProvider() {
return DBMetaInstanceHandler.getProvider();
}
public String asTableDbName() {
return "white_variant_relation_local_pk_referrer";
}
// ===================================================================================
// Query
// =====
/**
* Equal(=). And NullIgnored, OnlyOnceRegistered. <br>
* REVERSEFK_SUPPRESSED_ID: {PK, NotNull, BIGINT(19), FK to WHITE_VARIANT_RELATION_MASTER_FOO}
* @param reversefkSuppressedId The value of reversefkSuppressedId as equal. (basically NotNull: error as default, or no condition as option)
*/
public void setReversefkSuppressedId_Equal(Long reversefkSuppressedId) {
doSetReversefkSuppressedId_Equal(reversefkSuppressedId);
}
protected void doSetReversefkSuppressedId_Equal(Long reversefkSuppressedId) {
regReversefkSuppressedId(CK_EQ, reversefkSuppressedId);
}
/**
* GreaterThan(>). And NullIgnored, OnlyOnceRegistered. <br>
* REVERSEFK_SUPPRESSED_ID: {PK, NotNull, BIGINT(19), FK to WHITE_VARIANT_RELATION_MASTER_FOO}
* @param reversefkSuppressedId The value of reversefkSuppressedId as greaterThan. (basically NotNull: error as default, or no condition as option)
*/
public void setReversefkSuppressedId_GreaterThan(Long reversefkSuppressedId) {
regReversefkSuppressedId(CK_GT, reversefkSuppressedId);
}
/**
* LessThan(<). And NullIgnored, OnlyOnceRegistered. <br>
* REVERSEFK_SUPPRESSED_ID: {PK, NotNull, BIGINT(19), FK to WHITE_VARIANT_RELATION_MASTER_FOO}
* @param reversefkSuppressedId The value of reversefkSuppressedId as lessThan. (basically NotNull: error as default, or no condition as option)
*/
public void setReversefkSuppressedId_LessThan(Long reversefkSuppressedId) {
regReversefkSuppressedId(CK_LT, reversefkSuppressedId);
}
/**
* GreaterEqual(>=). And NullIgnored, OnlyOnceRegistered. <br>
* REVERSEFK_SUPPRESSED_ID: {PK, NotNull, BIGINT(19), FK to WHITE_VARIANT_RELATION_MASTER_FOO}
* @param reversefkSuppressedId The value of reversefkSuppressedId as greaterEqual. (basically NotNull: error as default, or no condition as option)
*/
public void setReversefkSuppressedId_GreaterEqual(Long reversefkSuppressedId) {
regReversefkSuppressedId(CK_GE, reversefkSuppressedId);
}
/**
* LessEqual(<=). And NullIgnored, OnlyOnceRegistered. <br>
* REVERSEFK_SUPPRESSED_ID: {PK, NotNull, BIGINT(19), FK to WHITE_VARIANT_RELATION_MASTER_FOO}
* @param reversefkSuppressedId The value of reversefkSuppressedId as lessEqual. (basically NotNull: error as default, or no condition as option)
*/
public void setReversefkSuppressedId_LessEqual(Long reversefkSuppressedId) {
regReversefkSuppressedId(CK_LE, reversefkSuppressedId);
}
/**
* RangeOf with various options. (versatile) <br>
* {(default) minNumber <= column <= maxNumber} <br>
* And NullIgnored, OnlyOnceRegistered. <br>
* REVERSEFK_SUPPRESSED_ID: {PK, NotNull, BIGINT(19), FK to WHITE_VARIANT_RELATION_MASTER_FOO}
* @param minNumber The min number of reversefkSuppressedId. (basically NotNull: if op.allowOneSide(), null allowed)
* @param maxNumber The max number of reversefkSuppressedId. (basically NotNull: if op.allowOneSide(), null allowed)
* @param opLambda The callback for option of range-of. (NotNull)
*/
public void setReversefkSuppressedId_RangeOf(Long minNumber, Long maxNumber, ConditionOptionCall<RangeOfOption> opLambda) {
setReversefkSuppressedId_RangeOf(minNumber, maxNumber, xcROOP(opLambda));
}
/**
* RangeOf with various options. (versatile) <br>
* {(default) minNumber <= column <= maxNumber} <br>
* And NullIgnored, OnlyOnceRegistered. <br>
* REVERSEFK_SUPPRESSED_ID: {PK, NotNull, BIGINT(19), FK to WHITE_VARIANT_RELATION_MASTER_FOO}
* @param minNumber The min number of reversefkSuppressedId. (basically NotNull: if op.allowOneSide(), null allowed)
* @param maxNumber The max number of reversefkSuppressedId. (basically NotNull: if op.allowOneSide(), null allowed)
* @param rangeOfOption The option of range-of. (NotNull)
*/
public void setReversefkSuppressedId_RangeOf(Long minNumber, Long maxNumber, RangeOfOption rangeOfOption) {
regROO(minNumber, maxNumber, xgetCValueReversefkSuppressedId(), "REVERSEFK_SUPPRESSED_ID", rangeOfOption);
}
/**
* InScope {in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br>
* REVERSEFK_SUPPRESSED_ID: {PK, NotNull, BIGINT(19), FK to WHITE_VARIANT_RELATION_MASTER_FOO}
* @param reversefkSuppressedIdList The collection of reversefkSuppressedId as inScope. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setReversefkSuppressedId_InScope(Collection<Long> reversefkSuppressedIdList) {
doSetReversefkSuppressedId_InScope(reversefkSuppressedIdList);
}
protected void doSetReversefkSuppressedId_InScope(Collection<Long> reversefkSuppressedIdList) {
regINS(CK_INS, cTL(reversefkSuppressedIdList), xgetCValueReversefkSuppressedId(), "REVERSEFK_SUPPRESSED_ID");
}
/**
* NotInScope {not in (1, 2)}. And NullIgnored, NullElementIgnored, SeveralRegistered. <br>
* REVERSEFK_SUPPRESSED_ID: {PK, NotNull, BIGINT(19), FK to WHITE_VARIANT_RELATION_MASTER_FOO}
* @param reversefkSuppressedIdList The collection of reversefkSuppressedId as notInScope. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setReversefkSuppressedId_NotInScope(Collection<Long> reversefkSuppressedIdList) {
doSetReversefkSuppressedId_NotInScope(reversefkSuppressedIdList);
}
protected void doSetReversefkSuppressedId_NotInScope(Collection<Long> reversefkSuppressedIdList) {
regINS(CK_NINS, cTL(reversefkSuppressedIdList), xgetCValueReversefkSuppressedId(), "REVERSEFK_SUPPRESSED_ID");
}
/**
* IsNull {is null}. And OnlyOnceRegistered. <br>
* REVERSEFK_SUPPRESSED_ID: {PK, NotNull, BIGINT(19), FK to WHITE_VARIANT_RELATION_MASTER_FOO}
*/
public void setReversefkSuppressedId_IsNull() { regReversefkSuppressedId(CK_ISN, DOBJ); }
/**
* IsNotNull {is not null}. And OnlyOnceRegistered. <br>
* REVERSEFK_SUPPRESSED_ID: {PK, NotNull, BIGINT(19), FK to WHITE_VARIANT_RELATION_MASTER_FOO}
*/
public void setReversefkSuppressedId_IsNotNull() { regReversefkSuppressedId(CK_ISNN, DOBJ); }
protected void regReversefkSuppressedId(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueReversefkSuppressedId(), "REVERSEFK_SUPPRESSED_ID"); }
protected abstract ConditionValue xgetCValueReversefkSuppressedId();
/**
* Equal(=). And NullOrEmptyIgnored, OnlyOnceRegistered. <br>
* MASTER_TYPE_CODE: {NotNull, CHAR(3)}
* @param masterTypeCode The value of masterTypeCode as equal. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setMasterTypeCode_Equal(String masterTypeCode) {
doSetMasterTypeCode_Equal(fRES(masterTypeCode));
}
protected void doSetMasterTypeCode_Equal(String masterTypeCode) {
regMasterTypeCode(CK_EQ, masterTypeCode);
}
/**
* NotEqual(<>). And NullOrEmptyIgnored, OnlyOnceRegistered. <br>
* MASTER_TYPE_CODE: {NotNull, CHAR(3)}
* @param masterTypeCode The value of masterTypeCode as notEqual. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setMasterTypeCode_NotEqual(String masterTypeCode) {
doSetMasterTypeCode_NotEqual(fRES(masterTypeCode));
}
protected void doSetMasterTypeCode_NotEqual(String masterTypeCode) {
regMasterTypeCode(CK_NES, masterTypeCode);
}
/**
* InScope {in ('a', 'b')}. And NullOrEmptyIgnored, NullOrEmptyElementIgnored, SeveralRegistered. <br>
* MASTER_TYPE_CODE: {NotNull, CHAR(3)}
* @param masterTypeCodeList The collection of masterTypeCode as inScope. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setMasterTypeCode_InScope(Collection<String> masterTypeCodeList) {
doSetMasterTypeCode_InScope(masterTypeCodeList);
}
protected void doSetMasterTypeCode_InScope(Collection<String> masterTypeCodeList) {
regINS(CK_INS, cTL(masterTypeCodeList), xgetCValueMasterTypeCode(), "MASTER_TYPE_CODE");
}
/**
* NotInScope {not in ('a', 'b')}. And NullOrEmptyIgnored, NullOrEmptyElementIgnored, SeveralRegistered. <br>
* MASTER_TYPE_CODE: {NotNull, CHAR(3)}
* @param masterTypeCodeList The collection of masterTypeCode as notInScope. (basically NotNull, NotEmpty: error as default, or no condition as option)
*/
public void setMasterTypeCode_NotInScope(Collection<String> masterTypeCodeList) {
doSetMasterTypeCode_NotInScope(masterTypeCodeList);
}
protected void doSetMasterTypeCode_NotInScope(Collection<String> masterTypeCodeList) {
regINS(CK_NINS, cTL(masterTypeCodeList), xgetCValueMasterTypeCode(), "MASTER_TYPE_CODE");
}
/**
* LikeSearch with various options. (versatile) {like '%xxx%' escape ...}. And NullOrEmptyIgnored, SeveralRegistered. <br>
* MASTER_TYPE_CODE: {NotNull, CHAR(3)} <br>
* <pre>e.g. setMasterTypeCode_LikeSearch("xxx", op <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> op.<span style="color: #CC4747">likeContain()</span>);</pre>
* @param masterTypeCode The value of masterTypeCode as likeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option)
* @param opLambda The callback for option of like-search. (NotNull)
*/
public void setMasterTypeCode_LikeSearch(String masterTypeCode, ConditionOptionCall<LikeSearchOption> opLambda) {
setMasterTypeCode_LikeSearch(masterTypeCode, xcLSOP(opLambda));
}
/**
* LikeSearch with various options. (versatile) {like '%xxx%' escape ...}. And NullOrEmptyIgnored, SeveralRegistered. <br>
* MASTER_TYPE_CODE: {NotNull, CHAR(3)} <br>
* <pre>e.g. setMasterTypeCode_LikeSearch("xxx", new <span style="color: #CC4747">LikeSearchOption</span>().likeContain());</pre>
* @param masterTypeCode The value of masterTypeCode as likeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option)
* @param likeSearchOption The option of like-search. (NotNull)
*/
public void setMasterTypeCode_LikeSearch(String masterTypeCode, LikeSearchOption likeSearchOption) {
regLSQ(CK_LS, fRES(masterTypeCode), xgetCValueMasterTypeCode(), "MASTER_TYPE_CODE", likeSearchOption);
}
/**
* NotLikeSearch with various options. (versatile) {not like 'xxx%' escape ...} <br>
* And NullOrEmptyIgnored, SeveralRegistered. <br>
* MASTER_TYPE_CODE: {NotNull, CHAR(3)}
* @param masterTypeCode The value of masterTypeCode as notLikeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option)
* @param opLambda The callback for option of like-search. (NotNull)
*/
public void setMasterTypeCode_NotLikeSearch(String masterTypeCode, ConditionOptionCall<LikeSearchOption> opLambda) {
setMasterTypeCode_NotLikeSearch(masterTypeCode, xcLSOP(opLambda));
}
/**
* NotLikeSearch with various options. (versatile) {not like 'xxx%' escape ...} <br>
* And NullOrEmptyIgnored, SeveralRegistered. <br>
* MASTER_TYPE_CODE: {NotNull, CHAR(3)}
* @param masterTypeCode The value of masterTypeCode as notLikeSearch. (basically NotNull, NotEmpty: error as default, or no condition as option)
* @param likeSearchOption The option of not-like-search. (NotNull)
*/
public void setMasterTypeCode_NotLikeSearch(String masterTypeCode, LikeSearchOption likeSearchOption) {
regLSQ(CK_NLS, fRES(masterTypeCode), xgetCValueMasterTypeCode(), "MASTER_TYPE_CODE", likeSearchOption);
}
protected void regMasterTypeCode(ConditionKey ky, Object vl) { regQ(ky, vl, xgetCValueMasterTypeCode(), "MASTER_TYPE_CODE"); }
protected abstract ConditionValue xgetCValueMasterTypeCode();
// ===================================================================================
// ScalarCondition
// ===============
/**
* Prepare ScalarCondition as equal. <br>
* {where FOO = (select max(BAR) from ...)}
* <pre>
* cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span>
* <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True();
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<WhiteVariantRelationLocalPkReferrerCB> scalar_Equal() {
return xcreateSLCFunction(CK_EQ, WhiteVariantRelationLocalPkReferrerCB.class);
}
/**
* Prepare ScalarCondition as equal. <br>
* {where FOO <> (select max(BAR) from ...)}
* <pre>
* cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span>
* <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True();
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<WhiteVariantRelationLocalPkReferrerCB> scalar_NotEqual() {
return xcreateSLCFunction(CK_NES, WhiteVariantRelationLocalPkReferrerCB.class);
}
/**
* Prepare ScalarCondition as greaterThan. <br>
* {where FOO > (select max(BAR) from ...)}
* <pre>
* cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span>
* <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True();
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<WhiteVariantRelationLocalPkReferrerCB> scalar_GreaterThan() {
return xcreateSLCFunction(CK_GT, WhiteVariantRelationLocalPkReferrerCB.class);
}
/**
* Prepare ScalarCondition as lessThan. <br>
* {where FOO < (select max(BAR) from ...)}
* <pre>
* cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span>
* <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True();
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<WhiteVariantRelationLocalPkReferrerCB> scalar_LessThan() {
return xcreateSLCFunction(CK_LT, WhiteVariantRelationLocalPkReferrerCB.class);
}
/**
* Prepare ScalarCondition as greaterEqual. <br>
* {where FOO >= (select max(BAR) from ...)}
* <pre>
* cb.query().scalar_Equal().<span style="color: #CC4747">avg</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #CC4747">columnPurchasePrice</span>(); <span style="color: #3F7E5E">// *Point!</span>
* <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True();
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<WhiteVariantRelationLocalPkReferrerCB> scalar_GreaterEqual() {
return xcreateSLCFunction(CK_GE, WhiteVariantRelationLocalPkReferrerCB.class);
}
/**
* Prepare ScalarCondition as lessEqual. <br>
* {where FOO <= (select max(BAR) from ...)}
* <pre>
* cb.query().<span style="color: #CC4747">scalar_LessEqual()</span>.max(new SubQuery<WhiteVariantRelationLocalPkReferrerCB>() {
* public void query(WhiteVariantRelationLocalPkReferrerCB subCB) {
* subCB.specify().setFoo... <span style="color: #3F7E5E">// derived column for function</span>
* subCB.query().setBar...
* }
* });
* </pre>
* @return The object to set up a function. (NotNull)
*/
public HpSLCFunction<WhiteVariantRelationLocalPkReferrerCB> scalar_LessEqual() {
return xcreateSLCFunction(CK_LE, WhiteVariantRelationLocalPkReferrerCB.class);
}
@SuppressWarnings("unchecked")
protected <CB extends ConditionBean> void xscalarCondition(String fn, SubQuery<CB> sq, String rd, HpSLCCustomized<CB> cs, ScalarConditionOption op) {
assertObjectNotNull("subQuery", sq);
WhiteVariantRelationLocalPkReferrerCB cb = xcreateScalarConditionCB(); sq.query((CB)cb);
String pp = keepScalarCondition(cb.query()); // for saving query-value
cs.setPartitionByCBean((CB)xcreateScalarConditionPartitionByCB()); // for using partition-by
registerScalarCondition(fn, cb.query(), pp, rd, cs, op);
}
public abstract String keepScalarCondition(WhiteVariantRelationLocalPkReferrerCQ sq);
protected WhiteVariantRelationLocalPkReferrerCB xcreateScalarConditionCB() {
WhiteVariantRelationLocalPkReferrerCB cb = newMyCB(); cb.xsetupForScalarCondition(this); return cb;
}
protected WhiteVariantRelationLocalPkReferrerCB xcreateScalarConditionPartitionByCB() {
WhiteVariantRelationLocalPkReferrerCB cb = newMyCB(); cb.xsetupForScalarConditionPartitionBy(this); return cb;
}
// ===================================================================================
// MyselfDerived
// =============
public void xsmyselfDerive(String fn, SubQuery<WhiteVariantRelationLocalPkReferrerCB> sq, String al, DerivedReferrerOption op) {
assertObjectNotNull("subQuery", sq);
WhiteVariantRelationLocalPkReferrerCB cb = new WhiteVariantRelationLocalPkReferrerCB(); cb.xsetupForDerivedReferrer(this);
lockCall(() -> sq.query(cb)); String pp = keepSpecifyMyselfDerived(cb.query()); String pk = "REVERSEFK_SUPPRESSED_ID";
registerSpecifyMyselfDerived(fn, cb.query(), pk, pk, pp, "myselfDerived", al, op);
}
public abstract String keepSpecifyMyselfDerived(WhiteVariantRelationLocalPkReferrerCQ sq);
/**
* Prepare for (Query)MyselfDerived (correlated sub-query).
* @return The object to set up a function for myself table. (NotNull)
*/
public HpQDRFunction<WhiteVariantRelationLocalPkReferrerCB> myselfDerived() {
return xcreateQDRFunctionMyselfDerived(WhiteVariantRelationLocalPkReferrerCB.class);
}
@SuppressWarnings("unchecked")
protected <CB extends ConditionBean> void xqderiveMyselfDerived(String fn, SubQuery<CB> sq, String rd, Object vl, DerivedReferrerOption op) {
assertObjectNotNull("subQuery", sq);
WhiteVariantRelationLocalPkReferrerCB cb = new WhiteVariantRelationLocalPkReferrerCB(); cb.xsetupForDerivedReferrer(this); sq.query((CB)cb);
String pk = "REVERSEFK_SUPPRESSED_ID";
String sqpp = keepQueryMyselfDerived(cb.query()); // for saving query-value.
String prpp = keepQueryMyselfDerivedParameter(vl);
registerQueryMyselfDerived(fn, cb.query(), pk, pk, sqpp, "myselfDerived", rd, vl, prpp, op);
}
public abstract String keepQueryMyselfDerived(WhiteVariantRelationLocalPkReferrerCQ sq);
public abstract String keepQueryMyselfDerivedParameter(Object vl);
// ===================================================================================
// MyselfExists
// ============
/**
* Prepare for MyselfExists (correlated sub-query).
* @param subCBLambda The implementation of sub-query. (NotNull)
*/
public void myselfExists(SubQuery<WhiteVariantRelationLocalPkReferrerCB> subCBLambda) {
assertObjectNotNull("subCBLambda", subCBLambda);
WhiteVariantRelationLocalPkReferrerCB cb = new WhiteVariantRelationLocalPkReferrerCB(); cb.xsetupForMyselfExists(this);
lockCall(() -> subCBLambda.query(cb)); String pp = keepMyselfExists(cb.query());
registerMyselfExists(cb.query(), pp);
}
public abstract String keepMyselfExists(WhiteVariantRelationLocalPkReferrerCQ sq);
// ===================================================================================
// Full Text Search
// ================
/**
* Match for full-text search. <br>
* Bind variable is unused because the condition value should be literal in MySQL.
* @param textColumn The text column. (NotNull, StringColumn, TargetTableColumn)
* @param conditionValue The condition value embedded without binding (by MySQL restriction) but escaped. (NullAllowed: if null or empty, no condition)
* @param modifier The modifier of full-text search. (NullAllowed: If the value is null, No modifier specified)
*/
public void match(org.dbflute.dbmeta.info.ColumnInfo textColumn
, String conditionValue
, org.dbflute.dbway.WayOfMySQL.FullTextSearchModifier modifier) {
assertObjectNotNull("textColumn", textColumn);
match(newArrayList(textColumn), conditionValue, modifier);
}
/**
* Match for full-text search. <br>
* Bind variable is unused because the condition value should be literal in MySQL.
* @param textColumnList The list of text column. (NotNull, NotEmpty, StringColumn, TargetTableColumn)
* @param conditionValue The condition value embedded without binding (by MySQL restriction) but escaped. (NullAllowed: if null or empty, no condition)
* @param modifier The modifier of full-text search. (NullAllowed: If the value is null, no modifier specified)
*/
public void match(List<org.dbflute.dbmeta.info.ColumnInfo> textColumnList
, String conditionValue
, org.dbflute.dbway.WayOfMySQL.FullTextSearchModifier modifier) {
xdoMatchForMySQL(textColumnList, conditionValue, modifier);
}
// ===================================================================================
// Manual Order
// ============
/**
* Order along manual ordering information.
* <pre>
* cb.query().addOrderBy_Birthdate_Asc().<span style="color: #CC4747">withManualOrder</span>(<span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">op</span>.<span style="color: #CC4747">when_GreaterEqual</span>(priorityDate); <span style="color: #3F7E5E">// e.g. 2000/01/01</span>
* });
* <span style="color: #3F7E5E">// order by </span>
* <span style="color: #3F7E5E">// case</span>
* <span style="color: #3F7E5E">// when BIRTHDATE >= '2000/01/01' then 0</span>
* <span style="color: #3F7E5E">// else 1</span>
* <span style="color: #3F7E5E">// end asc, ...</span>
*
* cb.query().addOrderBy_MemberStatusCode_Asc().<span style="color: #CC4747">withManualOrder</span>(<span style="color: #553000">op</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Withdrawal);
* <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Formalized);
* <span style="color: #553000">op</span>.<span style="color: #CC4747">when_Equal</span>(CDef.MemberStatus.Provisional);
* });
* <span style="color: #3F7E5E">// order by </span>
* <span style="color: #3F7E5E">// case</span>
* <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'WDL' then 0</span>
* <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'FML' then 1</span>
* <span style="color: #3F7E5E">// when MEMBER_STATUS_CODE = 'PRV' then 2</span>
* <span style="color: #3F7E5E">// else 3</span>
* <span style="color: #3F7E5E">// end asc, ...</span>
* </pre>
* <p>This function with Union is unsupported!</p>
* <p>The order values are bound (treated as bind parameter).</p>
* @param opLambda The callback for option of manual-order containing order values. (NotNull)
*/
public void withManualOrder(ManualOrderOptionCall opLambda) { // is user public!
xdoWithManualOrder(cMOO(opLambda));
}
// ===================================================================================
// Small Adjustment
// ================
// ===================================================================================
// Very Internal
// =============
protected WhiteVariantRelationLocalPkReferrerCB newMyCB() {
return new WhiteVariantRelationLocalPkReferrerCB();
}
// very internal (for suppressing warn about 'Not Use Import')
protected String xabUDT() { return Date.class.getName(); }
protected String xabCQ() { return WhiteVariantRelationLocalPkReferrerCQ.class.getName(); }
protected String xabLSO() { return LikeSearchOption.class.getName(); }
protected String xabSLCS() { return HpSLCSetupper.class.getName(); }
protected String xabSCP() { return SubQuery.class.getName(); }
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2011, Sun Microsystems, Inc., Kohsuke Kawaguchi
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.model;
import hudson.DescriptorExtensionList;
import hudson.PluginWrapper;
import hudson.RelativePath;
import hudson.XmlFile;
import hudson.BulkChange;
import hudson.ExtensionList;
import hudson.Util;
import hudson.model.listeners.SaveableListener;
import hudson.util.FormApply;
import hudson.util.FormValidation.CheckMethod;
import hudson.util.ReflectionUtils;
import hudson.util.ReflectionUtils.Parameter;
import hudson.views.ListViewColumn;
import jenkins.model.Jenkins;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.kohsuke.stapler.*;
import org.kohsuke.stapler.jelly.JellyCompatibleFacet;
import org.kohsuke.stapler.lang.Klass;
import org.springframework.util.StringUtils;
import org.jvnet.tiger_types.Types;
import org.apache.commons.io.IOUtils;
import static hudson.util.QuotedStringTokenizer.*;
import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND;
import javax.servlet.ServletException;
import javax.servlet.RequestDispatcher;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import java.util.Locale;
import java.util.Arrays;
import java.util.Collections;
import java.util.concurrent.ConcurrentHashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Type;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.beans.Introspector;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
/**
* Metadata about a configurable instance.
*
* <p>
* {@link Descriptor} is an object that has metadata about a {@link Describable}
* object, and also serves as a factory (in a way this relationship is similar
* to {@link Object}/{@link Class} relationship.
*
* A {@link Descriptor}/{@link Describable}
* combination is used throughout in Hudson to implement a
* configuration/extensibility mechanism.
*
* <p>
* Take the list view support as an example, which is implemented
* in {@link ListView} class. Whenever a new view is created, a new
* {@link ListView} instance is created with the configuration
* information. This instance gets serialized to XML, and this instance
* will be called to render the view page. This is the job
* of {@link Describable} — each instance represents a specific
* configuration of a view (what projects are in it, regular expression, etc.)
*
* <p>
* For Hudson to create such configured {@link ListView} instance, Hudson
* needs another object that captures the metadata of {@link ListView},
* and that is what a {@link Descriptor} is for. {@link ListView} class
* has a singleton descriptor, and this descriptor helps render
* the configuration form, remember system-wide configuration, and works as a factory.
*
* <p>
* {@link Descriptor} also usually have its associated views.
*
*
* <h2>Persistence</h2>
* <p>
* {@link Descriptor} can persist data just by storing them in fields.
* However, it is the responsibility of the derived type to properly
* invoke {@link #save()} and {@link #load()}.
*
* <h2>Reflection Enhancement</h2>
* {@link Descriptor} defines addition to the standard Java reflection
* and provides reflective information about its corresponding {@link Describable}.
* These are primarily used by tag libraries to
* keep the Jelly scripts concise.
*
* @author Kohsuke Kawaguchi
* @see Describable
*/
public abstract class Descriptor<T extends Describable<T>> implements Saveable {
/**
* The class being described by this descriptor.
*/
public transient final Class<? extends T> clazz;
private transient final Map<String,CheckMethod> checkMethods = new ConcurrentHashMap<String,CheckMethod>();
/**
* Lazily computed list of properties on {@link #clazz} and on the descriptor itself.
*/
private transient volatile Map<String, PropertyType> propertyTypes,globalPropertyTypes;
/**
* Represents a readable property on {@link Describable}.
*/
public static final class PropertyType {
public final Class clazz;
public final Type type;
private volatile Class itemType;
public final String displayName;
PropertyType(Class clazz, Type type, String displayName) {
this.clazz = clazz;
this.type = type;
this.displayName = displayName;
}
PropertyType(Field f) {
this(f.getType(),f.getGenericType(),f.toString());
}
PropertyType(Method getter) {
this(getter.getReturnType(),getter.getGenericReturnType(),getter.toString());
}
public Enum[] getEnumConstants() {
return (Enum[])clazz.getEnumConstants();
}
/**
* If the property is a collection/array type, what is an item type?
*/
public Class getItemType() {
if(itemType==null)
itemType = computeItemType();
return itemType;
}
private Class computeItemType() {
if(clazz.isArray()) {
return clazz.getComponentType();
}
if(Collection.class.isAssignableFrom(clazz)) {
Type col = Types.getBaseClass(type, Collection.class);
if (col instanceof ParameterizedType)
return Types.erasure(Types.getTypeArgument(col,0));
else
return Object.class;
}
return null;
}
/**
* Returns {@link Descriptor} whose 'clazz' is the same as {@link #getItemType() the item type}.
*/
public Descriptor getItemTypeDescriptor() {
return Jenkins.getInstance().getDescriptor(getItemType());
}
public Descriptor getItemTypeDescriptorOrDie() {
Class it = getItemType();
if (it == null) {
throw new AssertionError(clazz + " is not an array/collection type in " + displayName + ". See https://wiki.jenkins-ci.org/display/JENKINS/My+class+is+missing+descriptor");
}
Descriptor d = Jenkins.getInstance().getDescriptor(it);
if (d==null)
throw new AssertionError(it +" is missing its descriptor in "+displayName+". See https://wiki.jenkins-ci.org/display/JENKINS/My+class+is+missing+descriptor");
return d;
}
/**
* Returns all the descriptors that produce types assignable to the property type.
*/
public List<? extends Descriptor> getApplicableDescriptors() {
return Jenkins.getInstance().getDescriptorList(clazz);
}
/**
* Returns all the descriptors that produce types assignable to the item type for a collection property.
*/
public List<? extends Descriptor> getApplicableItemDescriptors() {
return Jenkins.getInstance().getDescriptorList(getItemType());
}
}
/**
* Help file redirect, keyed by the field name to the path.
*
* @see #getHelpFile(String)
*/
private transient final Map<String,HelpRedirect> helpRedirect = new HashMap<String,HelpRedirect>();
private static class HelpRedirect {
private final Class<? extends Describable> owner;
private final String fieldNameToRedirectTo;
private HelpRedirect(Class<? extends Describable> owner, String fieldNameToRedirectTo) {
this.owner = owner;
this.fieldNameToRedirectTo = fieldNameToRedirectTo;
}
private String resolve() {
// the resolution has to be deferred to avoid ordering issue among descriptor registrations.
return Jenkins.getInstance().getDescriptor(owner).getHelpFile(fieldNameToRedirectTo);
}
}
/**
*
* @param clazz
* Pass in {@link #self()} to have the descriptor describe itself,
* (this hack is needed since derived types can't call "getClass()" to refer to itself.
*/
protected Descriptor(Class<? extends T> clazz) {
if (clazz==self())
clazz = (Class)getClass();
this.clazz = clazz;
// doing this turns out to be very error prone,
// as field initializers in derived types will override values.
// load();
}
/**
* Infers the type of the corresponding {@link Describable} from the outer class.
* This version works when you follow the common convention, where a descriptor
* is written as the static nested class of the describable class.
*
* @since 1.278
*/
protected Descriptor() {
this.clazz = (Class<T>)getClass().getEnclosingClass();
if(clazz==null)
throw new AssertionError(getClass()+" doesn't have an outer class. Use the constructor that takes the Class object explicitly.");
// detect an type error
Type bt = Types.getBaseClass(getClass(), Descriptor.class);
if (bt instanceof ParameterizedType) {
ParameterizedType pt = (ParameterizedType) bt;
// this 't' is the closest approximation of T of Descriptor<T>.
Class t = Types.erasure(pt.getActualTypeArguments()[0]);
if(!t.isAssignableFrom(clazz))
throw new AssertionError("Outer class "+clazz+" of "+getClass()+" is not assignable to "+t+". Perhaps wrong outer class?");
}
// detect a type error. this Descriptor is supposed to be returned from getDescriptor(), so make sure its type match up.
// this prevents a bug like http://www.nabble.com/Creating-a-new-parameter-Type-%3A-Masked-Parameter-td24786554.html
try {
Method getd = clazz.getMethod("getDescriptor");
if(!getd.getReturnType().isAssignableFrom(getClass())) {
throw new AssertionError(getClass()+" must be assignable to "+getd.getReturnType());
}
} catch (NoSuchMethodException e) {
throw new AssertionError(getClass()+" is missing getDescriptor method.");
}
}
/**
* Human readable name of this kind of configurable object.
*/
public abstract String getDisplayName();
/**
* Uniquely identifies this {@link Descriptor} among all the other {@link Descriptor}s.
*
* <p>
* Historically {@link #clazz} is assumed to be unique, so this method uses that as the default,
* but if you are adding {@link Descriptor}s programmatically for the same type, you can change
* this to disambiguate them.
*
* <p>
* To look up {@link Descriptor} from ID, use {@link Jenkins#getDescriptor(String)}.
*
* @return
* Stick to valid Java identifier character, plus '.', which had to be allowed for historical reasons.
*
* @since 1.391
*/
public String getId() {
return clazz.getName();
}
/**
* Unlike {@link #clazz}, return the parameter type 'T', which determines
* the {@link DescriptorExtensionList} that this goes to.
*
* <p>
* In those situations where subtypes cannot provide the type parameter,
* this method can be overridden to provide it.
*/
public Class<T> getT() {
Type subTyping = Types.getBaseClass(getClass(), Descriptor.class);
if (!(subTyping instanceof ParameterizedType)) {
throw new IllegalStateException(getClass()+" doesn't extend Descriptor with a type parameter.");
}
return Types.erasure(Types.getTypeArgument(subTyping, 0));
}
/**
* Gets the URL that this Descriptor is bound to, relative to the nearest {@link DescriptorByNameOwner}.
* Since {@link Jenkins} is a {@link DescriptorByNameOwner}, there's always one such ancestor to any request.
*/
public String getDescriptorUrl() {
return "descriptorByName/"+getId();
}
/**
* Gets the URL that this Descriptor is bound to, relative to the context path.
* @since 1.406
*/
public final String getDescriptorFullUrl() {
return getCurrentDescriptorByNameUrl()+'/'+getDescriptorUrl();
}
/**
* @since 1.402
*/
public static String getCurrentDescriptorByNameUrl() {
StaplerRequest req = Stapler.getCurrentRequest();
// this override allows RenderOnDemandClosure to preserve the proper value
Object url = req.getAttribute("currentDescriptorByNameUrl");
if (url!=null) return url.toString();
Ancestor a = req.findAncestor(DescriptorByNameOwner.class);
return a.getUrl();
}
/**
* @deprecated since 1.528
* Use {@link #getCheckMethod(String)}
*/
public String getCheckUrl(String fieldName) {
return getCheckMethod(fieldName).toCheckUrl();
}
/**
* If the field "xyz" of a {@link Describable} has the corresponding "doCheckXyz" method,
* return the model of the check method.
* <p>
* This method is used to hook up the form validation method to the corresponding HTML input element.
*/
public CheckMethod getCheckMethod(String fieldName) {
CheckMethod method = checkMethods.get(fieldName);
if(method==null) {
method = new CheckMethod(this,fieldName);
checkMethods.put(fieldName,method);
}
return method;
}
/**
* Computes the list of other form fields that the given field depends on, via the doFillXyzItems method,
* and sets that as the 'fillDependsOn' attribute. Also computes the URL of the doFillXyzItems and
* sets that as the 'fillUrl' attribute.
*/
public void calcFillSettings(String field, Map<String,Object> attributes) {
String capitalizedFieldName = StringUtils.capitalize(field);
String methodName = "doFill" + capitalizedFieldName + "Items";
Method method = ReflectionUtils.getPublicMethodNamed(getClass(), methodName);
if(method==null)
throw new IllegalStateException(String.format("%s doesn't have the %s method for filling a drop-down list", getClass(), methodName));
// build query parameter line by figuring out what should be submitted
List<String> depends = buildFillDependencies(method, new ArrayList<String>());
if (!depends.isEmpty())
attributes.put("fillDependsOn",Util.join(depends," "));
attributes.put("fillUrl", String.format("%s/%s/fill%sItems", getCurrentDescriptorByNameUrl(), getDescriptorUrl(), capitalizedFieldName));
}
private List<String> buildFillDependencies(Method method, List<String> depends) {
for (Parameter p : ReflectionUtils.getParameters(method)) {
QueryParameter qp = p.annotation(QueryParameter.class);
if (qp!=null) {
String name = qp.value();
if (name.length()==0) name = p.name();
if (name==null || name.length()==0)
continue; // unknown parameter name. we'll report the error when the form is submitted.
RelativePath rp = p.annotation(RelativePath.class);
if (rp!=null)
name = rp.value()+'/'+name;
depends.add(name);
continue;
}
Method m = ReflectionUtils.getPublicMethodNamed(p.type(), "fromStapler");
if (m!=null)
buildFillDependencies(m,depends);
}
return depends;
}
/**
* Computes the auto-completion setting
*/
public void calcAutoCompleteSettings(String field, Map<String,Object> attributes) {
String capitalizedFieldName = StringUtils.capitalize(field);
String methodName = "doAutoComplete" + capitalizedFieldName;
Method method = ReflectionUtils.getPublicMethodNamed(getClass(), methodName);
if(method==null)
return; // no auto-completion
attributes.put("autoCompleteUrl", String.format("%s/%s/autoComplete%s", getCurrentDescriptorByNameUrl(), getDescriptorUrl(), capitalizedFieldName));
}
/**
* Used by Jelly to abstract away the handlign of global.jelly vs config.jelly databinding difference.
*/
public @CheckForNull PropertyType getPropertyType(@Nonnull Object instance, @Nonnull String field) {
// in global.jelly, instance==descriptor
return instance==this ? getGlobalPropertyType(field) : getPropertyType(field);
}
/**
* Akin to {@link #getPropertyType(Object,String)} but never returns null.
* @throws AssertionError in case the field cannot be found
* @since 1.492
*/
public @Nonnull PropertyType getPropertyTypeOrDie(@Nonnull Object instance, @Nonnull String field) {
PropertyType propertyType = getPropertyType(instance, field);
if (propertyType != null) {
return propertyType;
} else if (instance == this) {
throw new AssertionError(getClass().getName() + " has no property " + field);
} else {
throw new AssertionError(clazz.getName() + " has no property " + field);
}
}
/**
* Obtains the property type of the given field of {@link #clazz}
*/
public PropertyType getPropertyType(String field) {
if(propertyTypes==null)
propertyTypes = buildPropertyTypes(clazz);
return propertyTypes.get(field);
}
/**
* Obtains the property type of the given field of this descriptor.
*/
public PropertyType getGlobalPropertyType(String field) {
if(globalPropertyTypes==null)
globalPropertyTypes = buildPropertyTypes(getClass());
return globalPropertyTypes.get(field);
}
/**
* Given the class, list up its {@link PropertyType}s from its public fields/getters.
*/
private Map<String, PropertyType> buildPropertyTypes(Class<?> clazz) {
Map<String, PropertyType> r = new HashMap<String, PropertyType>();
for (Field f : clazz.getFields())
r.put(f.getName(),new PropertyType(f));
for (Method m : clazz.getMethods())
if(m.getName().startsWith("get"))
r.put(Introspector.decapitalize(m.getName().substring(3)),new PropertyType(m));
return r;
}
/**
* Gets the class name nicely escaped to be usable as a key in the structured form submission.
*/
public final String getJsonSafeClassName() {
return getId().replace('.','-');
}
/**
* @deprecated
* Implement {@link #newInstance(StaplerRequest, JSONObject)} method instead.
* Deprecated as of 1.145.
*/
public T newInstance(StaplerRequest req) throws FormException {
throw new UnsupportedOperationException(getClass()+" should implement newInstance(StaplerRequest,JSONObject)");
}
/**
* Creates a configured instance from the submitted form.
*
* <p>
* Hudson only invokes this method when the user wants an instance of <tt>T</tt>.
* So there's no need to check that in the implementation.
*
* <p>
* Starting 1.206, the default implementation of this method does the following:
* <pre>
* req.bindJSON(clazz,formData);
* </pre>
* <p>
* ... which performs the databinding on the constructor of {@link #clazz}.
*
* <p>
* For some types of {@link Describable}, such as {@link ListViewColumn}, this method
* can be invoked with null request object for historical reason. Such design is considered
* broken, but due to the compatibility reasons we cannot fix it. Because of this, the
* default implementation gracefully handles null request, but the contract of the method
* still is "request is always non-null." Extension points that need to define the "default instance"
* semantics should define a descriptor subtype and add the no-arg newInstance method.
*
* @param req
* Always non-null (see note above.) This object includes represents the entire submission.
* @param formData
* The JSON object that captures the configuration data for this {@link Descriptor}.
* See http://wiki.jenkins-ci.org/display/JENKINS/Structured+Form+Submission
* Always non-null.
*
* @throws FormException
* Signals a problem in the submitted form.
* @since 1.145
*/
public T newInstance(StaplerRequest req, JSONObject formData) throws FormException {
try {
Method m = getClass().getMethod("newInstance", StaplerRequest.class);
if(!Modifier.isAbstract(m.getDeclaringClass().getModifiers())) {
// this class overrides newInstance(StaplerRequest).
// maintain the backward compatible behavior
return verifyNewInstance(newInstance(req));
} else {
if (req==null) {
// yes, req is supposed to be always non-null, but see the note above
return verifyNewInstance(clazz.newInstance());
}
// new behavior as of 1.206
return verifyNewInstance(req.bindJSON(clazz,formData));
}
} catch (NoSuchMethodException e) {
throw new AssertionError(e); // impossible
} catch (InstantiationException e) {
throw new Error("Failed to instantiate "+clazz+" from "+formData,e);
} catch (IllegalAccessException e) {
throw new Error("Failed to instantiate "+clazz+" from "+formData,e);
} catch (RuntimeException e) {
throw new RuntimeException("Failed to instantiate "+clazz+" from "+formData,e);
}
}
/**
* Look out for a typical error a plugin developer makes.
* See http://hudson.361315.n4.nabble.com/Help-Hint-needed-Post-build-action-doesn-t-stay-activated-td2308833.html
*/
private T verifyNewInstance(T t) {
if (t!=null && t.getDescriptor()!=this) {
// TODO: should this be a fatal error?
LOGGER.warning("Father of "+ t+" and its getDescriptor() points to two different instances. Probably malplaced @Extension. See http://hudson.361315.n4.nabble.com/Help-Hint-needed-Post-build-action-doesn-t-stay-activated-td2308833.html");
}
return t;
}
/**
* Returns the {@link Klass} object used for the purpose of loading resources from this descriptor.
*
* This hook enables other JVM languages to provide more integrated lookup.
*/
public Klass<?> getKlass() {
return Klass.java(clazz);
}
/**
* Returns the resource path to the help screen HTML, if any.
*
* <p>
* Starting 1.282, this method uses "convention over configuration" — you should
* just put the "help.html" (and its localized versions, if any) in the same directory
* you put your Jelly view files, and this method will automatically does the right thing.
*
* <p>
* This value is relative to the context root of Hudson, so normally
* the values are something like <tt>"/plugin/emma/help.html"</tt> to
* refer to static resource files in a plugin, or <tt>"/publisher/EmmaPublisher/abc"</tt>
* to refer to Jelly script <tt>abc.jelly</tt> or a method <tt>EmmaPublisher.doAbc()</tt>.
*
* @return
* null to indicate that there's no help.
*/
public String getHelpFile() {
return getHelpFile(null);
}
/**
* Returns the path to the help screen HTML for the given field.
*
* <p>
* The help files are assumed to be at "help/FIELDNAME.html" with possible
* locale variations.
*/
public String getHelpFile(final String fieldName) {
return getHelpFile(getKlass(),fieldName);
}
public String getHelpFile(Klass<?> clazz, String fieldName) {
HelpRedirect r = helpRedirect.get(fieldName);
if (r!=null) return r.resolve();
for (Klass<?> c : clazz.getAncestors()) {
String page = "/descriptor/" + getId() + "/help";
String suffix;
if(fieldName==null) {
suffix="";
} else {
page += '/'+fieldName;
suffix='-'+fieldName;
}
try {
if(Stapler.getCurrentRequest().getView(c,"help"+suffix)!=null)
return page;
} catch (IOException e) {
throw new Error(e);
}
if(getStaticHelpUrl(c, suffix) !=null) return page;
}
return null;
}
/**
* Tells Jenkins that the help file for the field 'fieldName' is defined in the help file for
* the 'fieldNameToRedirectTo' in the 'owner' class.
* @since 1.425
*/
protected void addHelpFileRedirect(String fieldName, Class<? extends Describable> owner, String fieldNameToRedirectTo) {
helpRedirect.put(fieldName, new HelpRedirect(owner,fieldNameToRedirectTo));
}
/**
* Checks if the given object is created from this {@link Descriptor}.
*/
public final boolean isInstance( T instance ) {
return clazz.isInstance(instance);
}
/**
* Checks if the type represented by this descriptor is a subtype of the given type.
*/
public final boolean isSubTypeOf(Class type) {
return type.isAssignableFrom(clazz);
}
/**
* @deprecated
* As of 1.239, use {@link #configure(StaplerRequest, JSONObject)}.
*/
public boolean configure( StaplerRequest req ) throws FormException {
return true;
}
/**
* Invoked when the global configuration page is submitted.
*
* Can be overriden to store descriptor-specific information.
*
* @param json
* The JSON object that captures the configuration data for this {@link Descriptor}.
* See http://wiki.jenkins-ci.org/display/JENKINS/Structured+Form+Submission
* @return false
* to keep the client in the same config page.
*/
public boolean configure( StaplerRequest req, JSONObject json ) throws FormException {
// compatibility
return configure(req);
}
public String getConfigPage() {
return getViewPage(clazz, getPossibleViewNames("config"), "config.jelly");
}
public String getGlobalConfigPage() {
return getViewPage(clazz, getPossibleViewNames("global"), null);
}
private String getViewPage(Class<?> clazz, String pageName, String defaultValue) {
return getViewPage(clazz,Collections.singleton(pageName),defaultValue);
}
private String getViewPage(Class<?> clazz, Collection<String> pageNames, String defaultValue) {
while(clazz!=Object.class && clazz!=null) {
for (String pageName : pageNames) {
String name = clazz.getName().replace('.', '/').replace('$', '/') + "/" + pageName;
if(clazz.getClassLoader().getResource(name)!=null)
return '/'+name;
}
clazz = clazz.getSuperclass();
}
return defaultValue;
}
protected final String getViewPage(Class<?> clazz, String pageName) {
// We didn't find the configuration page.
// Either this is non-fatal, in which case it doesn't matter what string we return so long as
// it doesn't exist.
// Or this error is fatal, in which case we want the developer to see what page he's missing.
// so we put the page name.
return getViewPage(clazz,pageName,pageName);
}
protected List<String> getPossibleViewNames(String baseName) {
List<String> names = new ArrayList<String>();
for (Facet f : WebApp.get(Jenkins.getInstance().servletContext).facets) {
if (f instanceof JellyCompatibleFacet) {
JellyCompatibleFacet jcf = (JellyCompatibleFacet) f;
for (String ext : jcf.getScriptExtensions())
names.add(baseName +ext);
}
}
return names;
}
/**
* Saves the configuration info to the disk.
*/
public synchronized void save() {
if(BulkChange.contains(this)) return;
try {
getConfigFile().write(this);
SaveableListener.fireOnChange(this, getConfigFile());
} catch (IOException e) {
LOGGER.log(Level.WARNING, "Failed to save "+getConfigFile(),e);
}
}
/**
* Loads the data from the disk into this object.
*
* <p>
* The constructor of the derived class must call this method.
* (If we do that in the base class, the derived class won't
* get a chance to set default values.)
*/
public synchronized void load() {
XmlFile file = getConfigFile();
if(!file.exists())
return;
try {
file.unmarshal(this);
} catch (IOException e) {
LOGGER.log(Level.WARNING, "Failed to load "+file, e);
}
}
protected XmlFile getConfigFile() {
return new XmlFile(new File(Jenkins.getInstance().getRootDir(),getId()+".xml"));
}
/**
* Returns the plugin in which this descriptor is defined.
*
* @return
* null to indicate that this descriptor came from the core.
*/
protected PluginWrapper getPlugin() {
return Jenkins.getInstance().getPluginManager().whichPlugin(clazz);
}
/**
* Serves <tt>help.html</tt> from the resource of {@link #clazz}.
*/
public void doHelp(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
String path = req.getRestOfPath();
if(path.contains("..")) throw new ServletException("Illegal path: "+path);
path = path.replace('/','-');
PluginWrapper pw = getPlugin();
if (pw!=null) {
rsp.setHeader("X-Plugin-Short-Name",pw.getShortName());
rsp.setHeader("X-Plugin-Long-Name",pw.getLongName());
rsp.setHeader("X-Plugin-From", Messages.Descriptor_From(
pw.getLongName().replace("Hudson","Jenkins").replace("hudson","jenkins"), pw.getUrl()));
}
for (Klass<?> c= getKlass(); c!=null; c=c.getSuperClass()) {
RequestDispatcher rd = Stapler.getCurrentRequest().getView(c, "help"+path);
if(rd!=null) {// template based help page
rd.forward(req,rsp);
return;
}
URL url = getStaticHelpUrl(c, path);
if(url!=null) {
// TODO: generalize macro expansion and perhaps even support JEXL
rsp.setContentType("text/html;charset=UTF-8");
InputStream in = url.openStream();
try {
String literal = IOUtils.toString(in,"UTF-8");
rsp.getWriter().println(Util.replaceMacro(literal, Collections.singletonMap("rootURL",req.getContextPath())));
} finally {
IOUtils.closeQuietly(in);
}
return;
}
}
rsp.sendError(SC_NOT_FOUND);
}
private URL getStaticHelpUrl(Klass<?> c, String suffix) {
Locale locale = Stapler.getCurrentRequest().getLocale();
String base = "help"+suffix;
URL url;
url = c.getResource(base + '_' + locale.getLanguage() + '_' + locale.getCountry() + '_' + locale.getVariant() + ".html");
if(url!=null) return url;
url = c.getResource(base + '_' + locale.getLanguage() + '_' + locale.getCountry() + ".html");
if(url!=null) return url;
url = c.getResource(base + '_' + locale.getLanguage() + ".html");
if(url!=null) return url;
// default
return c.getResource(base + ".html");
}
//
// static methods
//
// to work around warning when creating a generic array type
public static <T> T[] toArray( T... values ) {
return values;
}
public static <T> List<T> toList( T... values ) {
return new ArrayList<T>(Arrays.asList(values));
}
public static <T extends Describable<T>>
Map<Descriptor<T>,T> toMap(Iterable<T> describables) {
Map<Descriptor<T>,T> m = new LinkedHashMap<Descriptor<T>,T>();
for (T d : describables) {
m.put(d.getDescriptor(),d);
}
return m;
}
/**
* Used to build {@link Describable} instance list from <f:hetero-list> tag.
*
* @param req
* Request that represents the form submission.
* @param formData
* Structured form data that represents the contains data for the list of describables.
* @param key
* The JSON property name for 'formData' that represents the data for the list of describables.
* @param descriptors
* List of descriptors to create instances from.
* @return
* Can be empty but never null.
*/
public static <T extends Describable<T>>
List<T> newInstancesFromHeteroList(StaplerRequest req, JSONObject formData, String key,
Collection<? extends Descriptor<T>> descriptors) throws FormException {
return newInstancesFromHeteroList(req,formData.get(key),descriptors);
}
public static <T extends Describable<T>>
List<T> newInstancesFromHeteroList(StaplerRequest req, Object formData,
Collection<? extends Descriptor<T>> descriptors) throws FormException {
List<T> items = new ArrayList<T>();
if (formData!=null) {
for (Object o : JSONArray.fromObject(formData)) {
JSONObject jo = (JSONObject)o;
String kind = jo.optString("$class", null);
if (kind == null) {
// Legacy: Remove once plugins have been staged onto $class
kind = jo.getString("kind");
}
Descriptor<T> d = find(descriptors, kind);
if (d != null) {
items.add(d.newInstance(req, jo));
}
}
}
return items;
}
/**
* Finds a descriptor from a collection by its class name.
*/
public static @CheckForNull <T extends Descriptor> T find(Collection<? extends T> list, String className) {
for (T d : list) {
if(d.getClass().getName().equals(className))
return d;
}
// Since we introduced Descriptor.getId(), it is a preferred method of identifying descriptor by a string.
// To make that migration easier without breaking compatibility, let's also match up with the id.
for (T d : list) {
if(d.getId().equals(className))
return d;
}
return null;
}
public static @CheckForNull Descriptor find(String className) {
return find(ExtensionList.lookup(Descriptor.class),className);
}
public static final class FormException extends Exception implements HttpResponse {
private final String formField;
public FormException(String message, String formField) {
super(message);
this.formField = formField;
}
public FormException(String message, Throwable cause, String formField) {
super(message, cause);
this.formField = formField;
}
public FormException(Throwable cause, String formField) {
super(cause);
this.formField = formField;
}
/**
* Which form field contained an error?
*/
public String getFormField() {
return formField;
}
public void generateResponse(StaplerRequest req, StaplerResponse rsp, Object node) throws IOException, ServletException {
if (FormApply.isApply(req)) {
FormApply.applyResponse("notificationBar.show(" + quote(getMessage())+ ",notificationBar.ERROR)")
.generateResponse(req, rsp, node);
} else {
// for now, we can't really use the field name that caused the problem.
new Failure(getMessage()).generateResponse(req,rsp,node);
}
}
}
private static final Logger LOGGER = Logger.getLogger(Descriptor.class.getName());
/**
* Special type indicating that {@link Descriptor} describes itself.
* @see Descriptor#Descriptor(Class)
*/
public static final class Self {}
protected static Class self() { return Self.class; }
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.vcs.log.data.index;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.util.Disposer;
import com.intellij.util.Consumer;
import com.intellij.util.indexing.*;
import com.intellij.util.indexing.impl.*;
import com.intellij.util.io.DataExternalizer;
import com.intellij.util.io.EnumeratorIntegerDescriptor;
import com.intellij.util.io.KeyDescriptor;
import com.intellij.vcs.log.VcsFullCommitDetails;
import com.intellij.vcs.log.impl.FatalErrorHandler;
import com.intellij.vcs.log.util.StorageId;
import gnu.trove.TIntHashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import java.util.function.ObjIntConsumer;
public class VcsLogFullDetailsIndex<T, D extends VcsFullCommitDetails> implements Disposable {
protected static final String INDEX = "index";
@NotNull protected final MyMapReduceIndex myMapReduceIndex;
@NotNull protected final StorageId myStorageId;
@NotNull protected final String myName;
@NotNull protected final DataIndexer<Integer, T, D> myIndexer;
@NotNull private final FatalErrorHandler myFatalErrorHandler;
private volatile boolean myDisposed = false;
public VcsLogFullDetailsIndex(@NotNull StorageId storageId,
@NotNull String name,
@NotNull DataIndexer<Integer, T, D> indexer,
@NotNull DataExternalizer<T> externalizer,
@NotNull FatalErrorHandler fatalErrorHandler,
@NotNull Disposable disposableParent)
throws IOException {
myName = name;
myStorageId = storageId;
myIndexer = indexer;
myFatalErrorHandler = fatalErrorHandler;
myMapReduceIndex = createMapReduceIndex(externalizer);
Disposer.register(disposableParent, this);
}
@NotNull
private MyMapReduceIndex createMapReduceIndex(@NotNull DataExternalizer<T> dataExternalizer) throws IOException {
MyIndexExtension<T, D> extension = new MyIndexExtension<>(myName, myIndexer, dataExternalizer, myStorageId.getVersion());
ForwardIndex<Integer, T> forwardIndex = createForwardIndex(extension);
return new MyMapReduceIndex(extension, new MyMapIndexStorage<>(myName, myStorageId, dataExternalizer), forwardIndex);
}
@Nullable
protected ForwardIndex<Integer, T> createForwardIndex(@NotNull IndexExtension<Integer, T, D> extension) throws IOException {
return null;
}
@NotNull
public TIntHashSet getCommitsWithAnyKey(@NotNull Set<Integer> keys) throws StorageException {
checkDisposed();
TIntHashSet result = new TIntHashSet();
for (Integer key : keys) {
iterateCommitIds(key, result::add);
}
return result;
}
@NotNull
public TIntHashSet getCommitsWithAllKeys(@NotNull Collection<Integer> keys) throws StorageException {
checkDisposed();
return InvertedIndexUtil.collectInputIdsContainingAllKeys(myMapReduceIndex, keys, (k) -> {
ProgressManager.checkCanceled();
return true;
}, null, null);
}
private void iterateCommitIds(int key, @NotNull Consumer<Integer> consumer) throws StorageException {
ValueContainer<T> data = myMapReduceIndex.getData(key);
data.forEach((id, value) -> {
consumer.consume(id);
return true;
});
}
protected void iterateCommitIdsAndValues(int key, @NotNull ObjIntConsumer<? super T> consumer) throws StorageException {
myMapReduceIndex.getData(key).forEach((id, value) -> {
consumer.accept(value, id);
return true;
});
}
@Nullable
protected <MapIndexType> MapIndexType getKeysForCommit(int commit) throws IOException {
MapBasedForwardIndex<Integer, T, MapIndexType> index = myMapReduceIndex.getForwardIndex();
if (index == null) return null;
return index.getInput(commit);
}
public void update(int commitId, @NotNull D details) {
checkDisposed();
myMapReduceIndex.update(commitId, details).compute();
}
public void flush() throws StorageException {
checkDisposed();
myMapReduceIndex.flush();
}
@Override
public void dispose() {
myDisposed = true;
myMapReduceIndex.dispose();
}
private void checkDisposed() {
if (myDisposed) throw new ProcessCanceledException();
}
private class MyMapReduceIndex extends MapReduceIndex<Integer, T, D> {
MyMapReduceIndex(@NotNull MyIndexExtension<T, D> extension,
@NotNull MyMapIndexStorage<T> mapIndexStorage,
@Nullable ForwardIndex<Integer, T> forwardIndex) {
super(extension, mapIndexStorage, forwardIndex);
}
@Nullable
public <MapIndexType> MapBasedForwardIndex<Integer, T, MapIndexType> getForwardIndex() {
if (myForwardIndex instanceof MapBasedForwardIndex) {
return ((MapBasedForwardIndex<Integer, T, MapIndexType>)myForwardIndex);
}
return null;
}
@Override
public void checkCanceled() {
ProgressManager.checkCanceled();
}
@Override
public void requestRebuild(@NotNull Throwable ex) {
myFatalErrorHandler.consume(this, ex);
}
}
private static class MyMapIndexStorage<T> extends MapIndexStorage<Integer, T> {
MyMapIndexStorage(@NotNull String name, @NotNull StorageId storageId, @NotNull DataExternalizer<T> externalizer)
throws IOException {
super(storageId.getStorageFile(name, true), EnumeratorIntegerDescriptor.INSTANCE, externalizer, 5000, false);
}
@Override
protected void checkCanceled() {
ProgressManager.checkCanceled();
}
}
private static class MyIndexExtension<T, D> extends IndexExtension<Integer, T, D> {
@NotNull private final IndexId<Integer, T> myID;
@NotNull private final DataIndexer<Integer, T, D> myIndexer;
@NotNull private final DataExternalizer<T> myExternalizer;
private final int myVersion;
MyIndexExtension(@NotNull String name, @NotNull DataIndexer<Integer, T, D> indexer,
@NotNull DataExternalizer<T> externalizer,
int version) {
myID = IndexId.create(name);
myIndexer = indexer;
myExternalizer = externalizer;
myVersion = version;
}
@NotNull
@Override
public IndexId<Integer, T> getName() {
return myID;
}
@NotNull
@Override
public DataIndexer<Integer, T, D> getIndexer() {
return myIndexer;
}
@NotNull
@Override
public KeyDescriptor<Integer> getKeyDescriptor() {
return EnumeratorIntegerDescriptor.INSTANCE;
}
@NotNull
@Override
public DataExternalizer<T> getValueExternalizer() {
return myExternalizer;
}
@Override
public int getVersion() {
return myVersion;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.carbondata.processing.sortandgroupby.sortdata;
import java.io.BufferedOutputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.AbstractQueue;
import java.util.PriorityQueue;
import java.util.concurrent.Callable;
import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.core.util.CarbonUtilException;
import org.apache.carbondata.processing.sortandgroupby.exception.CarbonSortKeyAndGroupByException;
import org.apache.carbondata.processing.util.RemoveDictionaryUtil;
public class IntermediateFileMerger implements Callable<Void> {
/**
* LOGGER
*/
private static final LogService LOGGER =
LogServiceFactory.getLogService(IntermediateFileMerger.class.getName());
/**
* recordHolderHeap
*/
private AbstractQueue<SortTempFileChunkHolder> recordHolderHeap;
/**
* fileCounter
*/
private int fileCounter;
/**
* stream
*/
private DataOutputStream stream;
/**
* totalNumberOfRecords
*/
private int totalNumberOfRecords;
/**
* records
*/
private Object[][] records;
/**
* entryCount
*/
private int entryCount;
/**
* writer
*/
private TempSortFileWriter writer;
/**
* totalSize
*/
private int totalSize;
private SortParameters mergerParameters;
private File[] intermediateFiles;
private File outPutFile;
/**
* IntermediateFileMerger Constructor
*/
public IntermediateFileMerger(SortParameters mergerParameters, File[] intermediateFiles,
File outPutFile) {
this.mergerParameters = mergerParameters;
this.fileCounter = intermediateFiles.length;
this.intermediateFiles = intermediateFiles;
this.outPutFile = outPutFile;
}
@Override public Void call() throws Exception {
long intermediateMergeStartTime = System.currentTimeMillis();
int fileConterConst = fileCounter;
boolean isFailed = false;
try {
startSorting();
initialize();
while (hasNext()) {
writeDataTofile(next());
}
if (mergerParameters.isSortFileCompressionEnabled() || mergerParameters.isPrefetch()) {
if (entryCount > 0) {
if (entryCount < totalSize) {
Object[][] temp = new Object[entryCount][];
System.arraycopy(records, 0, temp, 0, entryCount);
records = temp;
this.writer.writeSortTempFile(temp);
} else {
this.writer.writeSortTempFile(records);
}
}
}
double intermediateMergeCostTime = (System.currentTimeMillis() -
intermediateMergeStartTime)/1000.0;
LOGGER.info("============================== Intermediate Merge of " + fileConterConst +
" Sort Temp Files Cost Time: " + intermediateMergeCostTime + "(s)");
} catch (Exception e) {
LOGGER.error(e, "Problem while intermediate merging");
isFailed = true;
} finally {
records = null;
CarbonUtil.closeStreams(this.stream);
if (null != writer) {
writer.finish();
}
if (!isFailed) {
try {
finish();
} catch (CarbonSortKeyAndGroupByException e) {
LOGGER.error(e, "Problem while deleting the merge file");
}
} else {
if (outPutFile.delete()) {
LOGGER.error("Problem while deleting the merge file");
}
}
}
return null;
}
/**
* This method is responsible for initializing the out stream
*
* @throws CarbonSortKeyAndGroupByException
*/
private void initialize() throws CarbonSortKeyAndGroupByException {
if (!mergerParameters.isSortFileCompressionEnabled() && !mergerParameters.isPrefetch()) {
try {
this.stream = new DataOutputStream(
new BufferedOutputStream(new FileOutputStream(outPutFile),
mergerParameters.getFileWriteBufferSize()));
this.stream.writeInt(this.totalNumberOfRecords);
} catch (FileNotFoundException e) {
throw new CarbonSortKeyAndGroupByException("Problem while getting the file", e);
} catch (IOException e) {
throw new CarbonSortKeyAndGroupByException("Problem while writing the data to file", e);
}
} else {
writer = TempSortFileWriterFactory.getInstance()
.getTempSortFileWriter(mergerParameters.isSortFileCompressionEnabled(),
mergerParameters.getDimColCount(), mergerParameters.getComplexDimColCount(),
mergerParameters.getMeasureColCount(), mergerParameters.getNoDictionaryCount(),
mergerParameters.getFileWriteBufferSize());
writer.initiaize(outPutFile, totalNumberOfRecords);
if (mergerParameters.isPrefetch()) {
totalSize = mergerParameters.getBufferSize();
} else {
totalSize = mergerParameters.getSortTempFileNoOFRecordsInCompression();
}
}
}
/**
* This method will be used to get the sorted record from file
*
* @return sorted record sorted record
* @throws CarbonSortKeyAndGroupByException
*/
private Object[] getSortedRecordFromFile() throws CarbonSortKeyAndGroupByException {
Object[] row = null;
// poll the top object from heap
// heap maintains binary tree which is based on heap condition that will
// be based on comparator we are passing the heap
// when will call poll it will always delete root of the tree and then
// it does trickel down operation complexity is log(n)
SortTempFileChunkHolder poll = this.recordHolderHeap.poll();
// get the row from chunk
row = poll.getRow();
// check if there no entry present
if (!poll.hasNext()) {
// if chunk is empty then close the stream
poll.closeStream();
// change the file counter
--this.fileCounter;
// reaturn row
return row;
}
// read new row
poll.readRow();
// add to heap
this.recordHolderHeap.add(poll);
// return row
return row;
}
/**
* Below method will be used to start storing process This method will get
* all the temp files present in sort temp folder then it will create the
* record holder heap and then it will read first record from each file and
* initialize the heap
*
* @throws CarbonSortKeyAndGroupByException
*/
private void startSorting() throws CarbonSortKeyAndGroupByException {
LOGGER.info("Number of temp file: " + this.fileCounter);
// create record holder heap
createRecordHolderQueue(intermediateFiles);
// iterate over file list and create chunk holder and add to heap
LOGGER.info("Started adding first record from each file");
SortTempFileChunkHolder sortTempFileChunkHolder = null;
for (File tempFile : intermediateFiles) {
// create chunk holder
sortTempFileChunkHolder =
new SortTempFileChunkHolder(tempFile, mergerParameters.getDimColCount(),
mergerParameters.getComplexDimColCount(), mergerParameters.getMeasureColCount(),
mergerParameters.getFileBufferSize(), mergerParameters.getNoDictionaryCount(),
mergerParameters.getAggType(), mergerParameters.getNoDictionaryDimnesionColumn());
// initialize
sortTempFileChunkHolder.initialize();
sortTempFileChunkHolder.readRow();
this.totalNumberOfRecords += sortTempFileChunkHolder.getEntryCount();
// add to heap
this.recordHolderHeap.add(sortTempFileChunkHolder);
}
LOGGER.info("Heap Size" + this.recordHolderHeap.size());
}
/**
* This method will be used to create the heap which will be used to hold
* the chunk of data
*
* @param listFiles list of temp files
*/
private void createRecordHolderQueue(File[] listFiles) {
// creating record holder heap
this.recordHolderHeap = new PriorityQueue<SortTempFileChunkHolder>(listFiles.length);
}
/**
* This method will be used to get the sorted row
*
* @return sorted row
* @throws CarbonSortKeyAndGroupByException
*/
private Object[] next() throws CarbonSortKeyAndGroupByException {
return getSortedRecordFromFile();
}
/**
* This method will be used to check whether any more element is present or
* not
*
* @return more element is present
*/
private boolean hasNext() {
return this.fileCounter > 0;
}
/**
* Below method will be used to write data to file
*
* @throws CarbonSortKeyAndGroupByException problem while writing
*/
private void writeDataTofile(Object[] row) throws CarbonSortKeyAndGroupByException {
if (mergerParameters.isSortFileCompressionEnabled() || mergerParameters.isPrefetch()) {
if (entryCount == 0) {
records = new Object[totalSize][];
records[entryCount++] = row;
return;
}
records[entryCount++] = row;
if (entryCount == totalSize) {
this.writer.writeSortTempFile(records);
entryCount = 0;
records = new Object[totalSize][];
}
return;
}
try {
int fieldIndex = 0;
char[] aggType = mergerParameters.getAggType();
for (int counter = 0; counter < mergerParameters.getDimColCount(); counter++) {
stream.writeInt((Integer) RemoveDictionaryUtil.getDimension(fieldIndex++, row));
}
// added for high card also
if ((mergerParameters.getNoDictionaryCount() + mergerParameters
.getComplexDimColCount()) > 0) {
stream.write(RemoveDictionaryUtil.getByteArrayForNoDictionaryCols(row));
}
fieldIndex = 0;
for (int counter = 0; counter < mergerParameters.getMeasureColCount(); counter++) {
if (null != RemoveDictionaryUtil.getMeasure(fieldIndex, row)) {
stream.write((byte) 1);
if (aggType[counter] == CarbonCommonConstants.BYTE_VALUE_MEASURE) {
Double val = (Double) RemoveDictionaryUtil.getMeasure(fieldIndex, row);
stream.writeDouble(val);
} else if (aggType[counter] == CarbonCommonConstants.SUM_COUNT_VALUE_MEASURE) {
Double val = (Double) RemoveDictionaryUtil.getMeasure(fieldIndex, row);
stream.writeDouble(val);
} else if (aggType[counter] == CarbonCommonConstants.BIG_INT_MEASURE) {
Long val = (Long) RemoveDictionaryUtil.getMeasure(fieldIndex, row);
stream.writeLong(val);
} else if (aggType[counter] == CarbonCommonConstants.BIG_DECIMAL_MEASURE) {
byte[] bigDecimalInBytes = (byte[]) RemoveDictionaryUtil.getMeasure(fieldIndex, row);
stream.writeInt(bigDecimalInBytes.length);
stream.write(bigDecimalInBytes);
}
} else {
stream.write((byte) 0);
}
fieldIndex++;
}
} catch (IOException e) {
throw new CarbonSortKeyAndGroupByException("Problem while writing the file", e);
}
}
private void finish() throws CarbonSortKeyAndGroupByException {
if (recordHolderHeap != null) {
int size = recordHolderHeap.size();
for (int i = 0; i < size; i++) {
recordHolderHeap.poll().closeStream();
}
}
try {
CarbonUtil.deleteFiles(intermediateFiles);
} catch (CarbonUtilException e) {
throw new CarbonSortKeyAndGroupByException("Problem while deleting the intermediate files");
}
}
}
| |
/*
* Copyright (c) 2002, 2004, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.nio.cs;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.CharsetEncoder;
import sun.nio.cs.StandardCharsets;
import sun.nio.cs.SingleByteDecoder;
import sun.nio.cs.SingleByteEncoder;
import sun.nio.cs.HistoricallyNamedCharset;
public class ISO_8859_4
extends Charset
implements HistoricallyNamedCharset
{
public ISO_8859_4() {
super("ISO-8859-4", StandardCharsets.aliases_ISO_8859_4);
}
public String historicalName() {
return "ISO8859_4";
}
public boolean contains(Charset cs) {
return ((cs.name().equals("US-ASCII"))
|| (cs instanceof ISO_8859_4));
}
public CharsetDecoder newDecoder() {
return new Decoder(this);
}
public CharsetEncoder newEncoder() {
return new Encoder(this);
}
/**
* These accessors are temporarily supplied while sun.io
* converters co-exist with the sun.nio.cs.{ext} charset coders
* These facilitate sharing of conversion tables between the
* two co-existing implementations. When sun.io converters
* are made extinct these will be unncessary and should be removed
*/
public String getDecoderSingleByteMappings() {
return Decoder.byteToCharTable;
}
public short[] getEncoderIndex1() {
return Encoder.index1;
}
public String getEncoderIndex2() {
return Encoder.index2;
}
private static class Decoder extends SingleByteDecoder {
public Decoder(Charset cs) {
super(cs, byteToCharTable);
}
private final static String byteToCharTable =
"\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087" + // 0x80 - 0x87
"\u0088\u0089\u008A\u008B\u008C\u008D\u008E\u008F" + // 0x88 - 0x8F
"\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097" + // 0x90 - 0x97
"\u0098\u0099\u009A\u009B\u009C\u009D\u009E\u009F" + // 0x98 - 0x9F
"\u00A0\u0104\u0138\u0156\u00A4\u0128\u013B\u00A7" + // 0xA0 - 0xA7
"\u00A8\u0160\u0112\u0122\u0166\u00AD\u017D\u00AF" + // 0xA8 - 0xAF
"\u00B0\u0105\u02DB\u0157\u00B4\u0129\u013C\u02C7" + // 0xB0 - 0xB7
"\u00B8\u0161\u0113\u0123\u0167\u014A\u017E\u014B" + // 0xB8 - 0xBF
"\u0100\u00C1\u00C2\u00C3\u00C4\u00C5\u00C6\u012E" + // 0xC0 - 0xC7
"\u010C\u00C9\u0118\u00CB\u0116\u00CD\u00CE\u012A" + // 0xC8 - 0xCF
"\u0110\u0145\u014C\u0136\u00D4\u00D5\u00D6\u00D7" + // 0xD0 - 0xD7
"\u00D8\u0172\u00DA\u00DB\u00DC\u0168\u016A\u00DF" + // 0xD8 - 0xDF
"\u0101\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6\u012F" + // 0xE0 - 0xE7
"\u010D\u00E9\u0119\u00EB\u0117\u00ED\u00EE\u012B" + // 0xE8 - 0xEF
"\u0111\u0146\u014D\u0137\u00F4\u00F5\u00F6\u00F7" + // 0xF0 - 0xF7
"\u00F8\u0173\u00FA\u00FB\u00FC\u0169\u016B\u02D9" + // 0xF8 - 0xFF
"\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007" + // 0x00 - 0x07
"\b\t\n\u000B\f\r\u000E\u000F" + // 0x08 - 0x0F
"\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017" + // 0x10 - 0x17
"\u0018\u0019\u001A\u001B\u001C\u001D\u001E\u001F" + // 0x18 - 0x1F
"\u0020\u0021\"\u0023\u0024\u0025\u0026\'" + // 0x20 - 0x27
"\u0028\u0029\u002A\u002B\u002C\u002D\u002E\u002F" + // 0x28 - 0x2F
"\u0030\u0031\u0032\u0033\u0034\u0035\u0036\u0037" + // 0x30 - 0x37
"\u0038\u0039\u003A\u003B\u003C\u003D\u003E\u003F" + // 0x38 - 0x3F
"\u0040\u0041\u0042\u0043\u0044\u0045\u0046\u0047" + // 0x40 - 0x47
"\u0048\u0049\u004A\u004B\u004C\u004D\u004E\u004F" + // 0x48 - 0x4F
"\u0050\u0051\u0052\u0053\u0054\u0055\u0056\u0057" + // 0x50 - 0x57
"\u0058\u0059\u005A\u005B\\\u005D\u005E\u005F" + // 0x58 - 0x5F
"\u0060\u0061\u0062\u0063\u0064\u0065\u0066\u0067" + // 0x60 - 0x67
"\u0068\u0069\u006A\u006B\u006C\u006D\u006E\u006F" + // 0x68 - 0x6F
"\u0070\u0071\u0072\u0073\u0074\u0075\u0076\u0077" + // 0x70 - 0x77
"\u0078\u0079\u007A\u007B\u007C\u007D\u007E\u007F"; // 0x78 - 0x7F
}
private static class Encoder extends SingleByteEncoder {
public Encoder(Charset cs) {
super(cs, index1, index2, 0xFF00, 0x00FF, 8);
}
private final static String index2 =
"\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007" +
"\b\t\n\u000B\f\r\u000E\u000F" +
"\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017" +
"\u0018\u0019\u001A\u001B\u001C\u001D\u001E\u001F" +
"\u0020\u0021\"\u0023\u0024\u0025\u0026\'" +
"\u0028\u0029\u002A\u002B\u002C\u002D\u002E\u002F" +
"\u0030\u0031\u0032\u0033\u0034\u0035\u0036\u0037" +
"\u0038\u0039\u003A\u003B\u003C\u003D\u003E\u003F" +
"\u0040\u0041\u0042\u0043\u0044\u0045\u0046\u0047" +
"\u0048\u0049\u004A\u004B\u004C\u004D\u004E\u004F" +
"\u0050\u0051\u0052\u0053\u0054\u0055\u0056\u0057" +
"\u0058\u0059\u005A\u005B\\\u005D\u005E\u005F" +
"\u0060\u0061\u0062\u0063\u0064\u0065\u0066\u0067" +
"\u0068\u0069\u006A\u006B\u006C\u006D\u006E\u006F" +
"\u0070\u0071\u0072\u0073\u0074\u0075\u0076\u0077" +
"\u0078\u0079\u007A\u007B\u007C\u007D\u007E\u007F" +
"\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087" +
"\u0088\u0089\u008A\u008B\u008C\u008D\u008E\u008F" +
"\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097" +
"\u0098\u0099\u009A\u009B\u009C\u009D\u009E\u009F" +
"\u00A0\u0000\u0000\u0000\u00A4\u0000\u0000\u00A7" +
"\u00A8\u0000\u0000\u0000\u0000\u00AD\u0000\u00AF" +
"\u00B0\u0000\u0000\u0000\u00B4\u0000\u0000\u0000" +
"\u00B8\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u00C1\u00C2\u00C3\u00C4\u00C5\u00C6\u0000" +
"\u0000\u00C9\u0000\u00CB\u0000\u00CD\u00CE\u0000" +
"\u0000\u0000\u0000\u0000\u00D4\u00D5\u00D6\u00D7" +
"\u00D8\u0000\u00DA\u00DB\u00DC\u0000\u0000\u00DF" +
"\u0000\u00E1\u00E2\u00E3\u00E4\u00E5\u00E6\u0000" +
"\u0000\u00E9\u0000\u00EB\u0000\u00ED\u00EE\u0000" +
"\u0000\u0000\u0000\u0000\u00F4\u00F5\u00F6\u00F7" +
"\u00F8\u0000\u00FA\u00FB\u00FC\u0000\u0000\u0000" +
"\u00C0\u00E0\u0000\u0000\u00A1\u00B1\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u00C8\u00E8\u0000\u0000" +
"\u00D0\u00F0\u00AA\u00BA\u0000\u0000\u00CC\u00EC" +
"\u00CA\u00EA\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u00AB\u00BB\u0000\u0000\u0000\u0000" +
"\u00A5\u00B5\u00CF\u00EF\u0000\u0000\u00C7\u00E7" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u00D3\u00F3" +
"\u00A2\u0000\u0000\u00A6\u00B6\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u00D1\u00F1\u0000" +
"\u0000\u0000\u00BD\u00BF\u00D2\u00F2\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u00A3\u00B3" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u00A9\u00B9\u0000\u0000\u0000\u0000\u00AC\u00BC" +
"\u00DD\u00FD\u00DE\u00FE\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u00D9\u00F9\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u00AE\u00BE\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u00B7" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u00FF\u0000\u00B2\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000" +
"\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000";
private final static short index1[] = {
0, 256, 440, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383,
383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383,
383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383,
383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383,
383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383,
383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383,
383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383,
383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383,
383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383,
383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383,
383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383,
383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383,
383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383,
383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383,
383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383,
383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383, 383,
};
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.pdmodel.graphics.shading;
import java.awt.Paint;
import java.io.IOException;
import org.apache.pdfbox.cos.COSArray;
import org.apache.pdfbox.cos.COSBase;
import org.apache.pdfbox.cos.COSDictionary;
import org.apache.pdfbox.cos.COSName;
import org.apache.pdfbox.pdmodel.common.COSObjectable;
import org.apache.pdfbox.pdmodel.common.PDRectangle;
import org.apache.pdfbox.pdmodel.common.function.PDFunction;
import org.apache.pdfbox.pdmodel.graphics.color.PDColorSpace;
import org.apache.pdfbox.util.Matrix;
/**
* A Shading Resource.
*/
public abstract class PDShading implements COSObjectable {
private final COSDictionary dictionary;
private COSArray background = null;
private PDRectangle bBox = null;
private PDColorSpace colorSpace = null;
private PDFunction function = null;
private PDFunction[] functionArray = null;
/**
* shading type 1 = function based shading.
*/
public static final int SHADING_TYPE1 = 1;
/**
* shading type 2 = axial shading.
*/
public static final int SHADING_TYPE2 = 2;
/**
* shading type 3 = radial shading.
*/
public static final int SHADING_TYPE3 = 3;
/**
* shading type 4 = Free-Form Gouraud-Shaded Triangle Meshes.
*/
public static final int SHADING_TYPE4 = 4;
/**
* shading type 5 = Lattice-Form Gouraud-Shaded Triangle Meshes.
*/
public static final int SHADING_TYPE5 = 5;
/**
* shading type 6 = Coons Patch Meshes.
*/
public static final int SHADING_TYPE6 = 6;
/**
* shading type 7 = Tensor-Product Patch Meshes.
*/
public static final int SHADING_TYPE7 = 7;
/**
* Default constructor.
*/
public PDShading() {
dictionary = new COSDictionary();
}
/**
* Constructor using the given shading dictionary.
*
* @param shadingDictionary the dictionary for this shading
*/
public PDShading(COSDictionary shadingDictionary) {
dictionary = shadingDictionary;
}
/**
* This will get the underlying dictionary.
*
* @return the dictionary for this shading.
*/
@Override
public COSDictionary getCOSObject() {
return dictionary;
}
/**
* This will return the type.
*
* @return the type of object that this is
*/
public String getType() {
return COSName.SHADING.getName();
}
/**
* This will set the shading type.
*
* @param shadingType the new shading type
*/
public void setShadingType(int shadingType) {
dictionary.setInt(COSName.SHADING_TYPE, shadingType);
}
/**
* This will return the shading type.
*
* @return the shading typ
*/
public abstract int getShadingType();
/**
* This will set the background.
*
* @param newBackground the new background
*/
public void setBackground(COSArray newBackground) {
background = newBackground;
dictionary.setItem(COSName.BACKGROUND, newBackground);
}
/**
* This will return the background.
*
* @return the background
*/
public COSArray getBackground() {
if (background == null) {
background = (COSArray) dictionary.getDictionaryObject(COSName.BACKGROUND);
}
return background;
}
/**
* An array of four numbers in the form coordinate system (see below),
* giving the coordinates of the left, bottom, right, and top edges,
* respectively, of the shading's bounding box.
*
* @return the BBox of the form
*/
public PDRectangle getBBox() {
if (bBox == null) {
COSArray array = (COSArray) dictionary.getDictionaryObject(COSName.BBOX);
if (array != null) {
bBox = new PDRectangle(array);
}
}
return bBox;
}
/**
* This will set the BBox (bounding box) for this Shading.
*
* @param newBBox the new BBox
*/
public void setBBox(PDRectangle newBBox) {
bBox = newBBox;
if (bBox == null) {
dictionary.removeItem(COSName.BBOX);
} else {
dictionary.setItem(COSName.BBOX, bBox.getCOSArray());
}
}
/**
* This will set the AntiAlias value.
*
* @param antiAlias the new AntiAlias value
*/
public void setAntiAlias(boolean antiAlias) {
dictionary.setBoolean(COSName.ANTI_ALIAS, antiAlias);
}
/**
* This will return the AntiAlias value.
*
* @return the AntiAlias value
*/
public boolean getAntiAlias() {
return dictionary.getBoolean(COSName.ANTI_ALIAS, false);
}
/**
* This will get the color space or null if none exists.
*
* @return the color space for the shading
* @throws IOException if there is an error getting the color space
*/
public PDColorSpace getColorSpace() throws IOException {
if (colorSpace == null) {
COSBase colorSpaceDictionary = dictionary.getDictionaryObject(COSName.CS, COSName.COLORSPACE);
colorSpace = PDColorSpace.create(colorSpaceDictionary);
}
return colorSpace;
}
/**
* This will set the color space for the shading.
*
* @param colorSpace the color space
*/
public void setColorSpace(PDColorSpace colorSpace) {
this.colorSpace = colorSpace;
if (colorSpace != null) {
dictionary.setItem(COSName.COLORSPACE, colorSpace.getCOSObject());
} else {
dictionary.removeItem(COSName.COLORSPACE);
}
}
/**
* Create the correct PD Model shading based on the COS base shading.
*
* @param resourceDictionary the COS shading dictionary
* @return the newly created shading resources object
* @throws IOException if we are unable to create the PDShading object
*/
public static PDShading create(COSDictionary resourceDictionary) throws IOException {
PDShading shading = null;
int shadingType = resourceDictionary.getInt(COSName.SHADING_TYPE, 0);
switch (shadingType) {
case SHADING_TYPE1:
shading = new PDShadingType1(resourceDictionary);
break;
case SHADING_TYPE2:
shading = new PDShadingType2(resourceDictionary);
break;
case SHADING_TYPE3:
shading = new PDShadingType3(resourceDictionary);
break;
case SHADING_TYPE4:
shading = new PDShadingType4(resourceDictionary);
break;
case SHADING_TYPE5:
shading = new PDShadingType5(resourceDictionary);
break;
case SHADING_TYPE6:
shading = new PDShadingType6(resourceDictionary);
break;
case SHADING_TYPE7:
shading = new PDShadingType7(resourceDictionary);
break;
default:
throw new IOException("Error: Unknown shading type " + shadingType);
}
return shading;
}
/**
* This will set the function for the color conversion.
*
* @param newFunction the new function
*/
public void setFunction(PDFunction newFunction) {
functionArray = null;
function = newFunction;
getCOSObject().setItem(COSName.FUNCTION, newFunction);
}
/**
* This will set the functions COSArray for the color conversion.
*
* @param newFunctions the new COSArray containing all functions
*/
public void setFunction(COSArray newFunctions) {
functionArray = null;
function = null;
getCOSObject().setItem(COSName.FUNCTION, newFunctions);
}
/**
* This will return the function used to convert the color values.
*
* @return the function
* @throws java.io.IOException if we were not able to create the function.
*/
public PDFunction getFunction() throws IOException {
if (function == null) {
COSBase dictionaryFunctionObject = getCOSObject().getDictionaryObject(COSName.FUNCTION);
if (dictionaryFunctionObject != null) {
function = PDFunction.create(dictionaryFunctionObject);
}
}
return function;
}
/**
* Provide the function(s) of the shading dictionary as array.
*
* @return an array containing the function(s).
* @throws IOException if we were unable to create a function.
*/
private PDFunction[] getFunctionsArray() throws IOException {
if (functionArray == null) {
COSBase functionObject = getCOSObject().getDictionaryObject(COSName.FUNCTION);
if (functionObject instanceof COSDictionary) {
functionArray = new PDFunction[1];
functionArray[0] = PDFunction.create(functionObject);
} else if (functionObject instanceof COSArray) {
COSArray functionCOSArray = (COSArray) functionObject;
int numberOfFunctions = functionCOSArray.size();
functionArray = new PDFunction[numberOfFunctions];
for (int i = 0; i < numberOfFunctions; i++) {
functionArray[i] = PDFunction.create(functionCOSArray.get(i));
}
} else {
throw new IOException("mandatory /Function element must be a dictionary or an array");
}
}
return functionArray;
}
/**
* Convert the input value using the functions of the shading dictionary.
*
* @param inputValue the input value
* @return the output values
* @throws IOException thrown if something went wrong
*/
public float[] evalFunction(float inputValue) throws IOException {
return evalFunction(new float[]{inputValue});
}
/**
* Convert the input values using the functions of the shading dictionary.
*
* @param input the input values
* @return the output values
* @throws IOException thrown if something went wrong
*/
public float[] evalFunction(float[] input) throws IOException {
PDFunction[] functions = getFunctionsArray();
int numberOfFunctions = functions.length;
float[] returnValues;
if (numberOfFunctions == 1) {
returnValues = functions[0].eval(input);
} else {
returnValues = new float[numberOfFunctions];
for (int i = 0; i < numberOfFunctions; i++) {
float[] newValue = functions[i].eval(input);
returnValues[i] = newValue[0];
}
}
// From the PDF spec:
// "If the value returned by the function for a given colour component
// is out of range, it shall be adjusted to the nearest valid value."
for (int i = 0; i < returnValues.length; ++i) {
if (returnValues[i] < 0) {
returnValues[i] = 0;
} else if (returnValues[i] > 1) {
returnValues[i] = 1;
}
}
return returnValues;
}
/**
* Returns an AWT paint which corresponds to this shading
*
* @param matrix the pattern matrix concatenated with that of the parent content stream,
* this matrix which maps the pattern's internal coordinate system to user space
* @return an AWT Paint instance
*/
public abstract Paint toPaint(Matrix matrix);
}
| |
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.spec.SecretKeySpec;
import java.io.*;
import java.net.Socket;
import java.nio.ByteBuffer;
import java.security.*;
import java.util.Objects;
public class ClientConnection implements Runnable {
private Socket mClientSocket;
private DataInputStream mDataInput;
private DataOutputStream dataOutput;
private boolean connectionOnline;
private boolean mPingReceived;
private long mPingSendedTime;
private KeysUtils mKeys;
private boolean mEncryption;
ClientConnection(Socket socket, KeysUtils keys) {
this.mClientSocket = socket;
connectionOnline = true;
mKeys = keys;
}
private void closeConnection() {
try {
System.out.println("Connection closed with " + mClientSocket.getInetAddress());
mClientSocket.close();
connectionOnline = false;
} catch (IOException e) {
e.printStackTrace();
}
}
public void run() {
try {
InputStream sin = mClientSocket.getInputStream();
OutputStream sout = mClientSocket.getOutputStream();
mDataInput = new DataInputStream(sin);
dataOutput = new DataOutputStream(sout);
mPingSendedTime = System.currentTimeMillis();
mPingReceived = true;
while (connectionOnline) {
try {
Thread.sleep(500);
} catch (InterruptedException ignored) {
}
if (mDataInput.available() > 0) {
byte[] commandByte = new byte[6];
mDataInput.read(commandByte);
commandByte = CriminalUtils.trimBytes(commandByte);
String command = new String(commandByte, "UTF-8");
if (!Objects.equals(command, "PONG")) {
// System.out.println("Command: " + command);
}
proceedCommand(command);
} else {
checkConnection();
}
}
} catch (IOException e) {
closeConnection();
}
}
private void checkConnection() throws IOException {
if (mPingSendedTime + 3000 < System.currentTimeMillis()) {
if (mPingReceived) {
mPingReceived = false;
mPingSendedTime = System.currentTimeMillis();
sendCommand("PING");
} else {
connectionOnline = false;
System.out.println("Connection timeout (client: "+mClientSocket.getInetAddress()+")");
}
}
}
private void proceedCommand(String command) throws IOException {
switch (command) {
case "HELLO":
if (mKeys.isEnabled()) {
sendCommand("HELLOK", mKeys.getPublic());
} else {
sendCommand("HELLO");
}
break;
case "PKEY":
receiveClientKey();
break;
case "ENCDIS":
mEncryption = false;
break;
case "PONG":
mPingReceived = true;
break;
case "CRIMES":
sendCrimes(true);
break;
case "ADD":
Crime crime = CriminalUtils.readCrime(mDataInput, mKeys, mEncryption);
if (crime != null) {
CrimesLib.getInstance().addCrime(crime);
}
sendCrimes(true);
break;
case "UPDATE":
Crime updcrime = CriminalUtils.readCrime(mDataInput, mKeys, mEncryption);
if (updcrime != null) {
CrimesLib.getInstance().updateCrime(updcrime);
}
sendCrimes(false);
break;
case "DELETE":
Crime delcrime = CriminalUtils.readCrime(mDataInput, mKeys, mEncryption);
if (delcrime != null) {
CrimesLib.getInstance().deleteCrime(delcrime);
}
sendCrimes(true);
break;
case "BYE":
System.out.println("Client " + mClientSocket.getInetAddress() + " has said goodbye");
mClientSocket.close();
break;
}
}
private void receiveClientKey() {
try {
byte[] lengthHeader = new byte[4];
mDataInput.read(lengthHeader);
int dataSize = ByteBuffer.wrap(lengthHeader).getInt();
byte[] body = new byte[dataSize];
mDataInput.read(body);
body = mKeys.decryptServer(body);
Key key = new SecretKeySpec(body, 0, body.length, "AES");
mKeys.setClientSecret(key);
mEncryption = true;
sendCommand("KTEST", "SUCCESS");
} catch (Exception e) {
e.printStackTrace();
mEncryption = false;
}
}
private void sendCrimes(boolean show) throws IOException {
if (!show) {
sendCommand("CRIMEN");
} else {
sendCommand("CRIMES");
}
for (Crime crime : CrimesLib.getInstance().getCrimes()) {
sendCommand("CRIME", crime);
}
sendCommand("CSEND");
}
private void sendCommand(String command) {
final byte[] bodyBytes;
try {
bodyBytes = command.getBytes("UTF-8");
ByteBuffer headerBuffer = ByteBuffer.allocate(6).put(bodyBytes);
byte[] message = headerBuffer.array();
dataOutput.write(message);
dataOutput.flush();
} catch (Exception e) {
e.printStackTrace();
}
}
private void sendCommand(String command, Crime crime) {
sendCommand(command, (Object) crime);
}
private void sendCommand(String command, Object object) {
try {
byte[] crimeBytes = CriminalUtils.serialize(object);
if (mEncryption) {
sendBytes(command, mKeys.encrypt(crimeBytes));
} else {
sendBytes(command, crimeBytes);
}
} catch (Exception e) {
e.printStackTrace();
}
}
private void sendBytes(String command, byte[] bytes) {
try {
final byte[] bodyBytes = command.getBytes("UTF-8");
ByteBuffer headerBuffer = ByteBuffer.allocate(6).put(bodyBytes);
headerBuffer.position(0);
ByteBuffer lengthBuffer = ByteBuffer.allocate(4).putInt(bytes.length);
lengthBuffer.position(0);
byte[] header = headerBuffer.array();
byte[] message = ByteBuffer.allocate(10 + bytes.length).put(header).put(lengthBuffer.array()).put(bytes).array();
dataOutput.write(message);
if (!Objects.equals(command, "CRIME")) {
dataOutput.flush();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
| |
package kodkod.test.pardinus.temporal;
import kodkod.ast.*;
import kodkod.engine.ltl2fol.InvalidMutableExpressionException;
import kodkod.engine.ltl2fol.LTL2FOLTranslator;
import kodkod.engine.ltl2fol.TemporalTranslator;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import java.util.LinkedHashMap;
import org.junit.BeforeClass;
import static kodkod.engine.ltl2fol.TemporalTranslator.FIRST;
import static kodkod.engine.ltl2fol.TemporalTranslator.TRACE;
import static kodkod.engine.ltl2fol.TemporalTranslator.PREFIX;
/**
* Tests whether the translation of PLTL formulas is done correctly.
*
* As of Pardinus 1.1, traces are always assumed to be infinite.
*
* Assumes past translation with {@link TemporalTranslator#ExplicitUnrolls} true.
*
* @author Eduardo Pessoa, Nuno Macedo // [HASLab] decomposed model finding
*/
public class TemporalTranslatorTests {
private static Relation Process = Relation.unary("Process");
private static Relation toSend = Relation.binary_variable("toSend");
private static Relation elected = Relation.unary_variable("elected");
private static Relation naryRelation = Relation.variable("naryRelation", 4);
private static Relation succ = Relation.binary("succ");
private static Relation pfirst = Relation.unary("pfirst");
private static Relation plast = Relation.unary("plast");
private static Relation pord = Relation.binary_variable("pord");
public TemporalTranslatorTests() {}
@BeforeClass
public static void rightEnc() {
assert TemporalTranslator.ExplicitUnrolls;
}
/* Declarations */
@Test
public final void test() {
Formula initial = (elected.eq(elected.prime()).not()).after();
Formula result = elected.getExpansion().join(FIRST.join(TRACE)).eq(elected.getExpansion().join(FIRST.join(TRACE).join(TRACE))).not();
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
/* Declarations */
@Test
public final void declaration_one() {
Formula initial = elected.in(Process);
Formula result = elected.getExpansion().join(FIRST).in(Process);
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void declaration_two() {
Formula initial = toSend.in(Process.product(Process));
Formula result = toSend.getExpansion().join(FIRST).in(Process.product(Process));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void declaration_three() {
Formula initial = naryRelation.in(Process.product(Process).product(Process).product(Process));
Formula result = naryRelation.getExpansion().join(FIRST).in(Process.product(Process).product(Process).product(Process));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void declaration_totalFunction() {
Formula initial = pord.function(pfirst, plast);
try {
((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString();
assert(false);
} catch (InvalidMutableExpressionException e) {
assert(true);
}
}
@Test
public final void declaration_partialFunction() {
Formula initial = pord.partialFunction(pfirst, plast);
try {
((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString();
assert(false);
} catch (InvalidMutableExpressionException e) {
assert(true);
}
}
@Test
public final void declaration_normal_partialFunction() {
Formula initial = succ.partialFunction(pfirst, plast);
Formula result = succ.partialFunction(pfirst, plast);
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void declaration_normal_function() {
Formula initial = succ.partialFunction(pfirst, plast);
Formula result = succ.partialFunction(pfirst, plast);
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
/* Temporal */
@Test
public final void simple_post_init() {
Variable v = Variable.unary("p");
Formula initial = v.in(toSend.prime().join(v)).and(v.in(toSend.join(v))).forAll(v.oneOf(Process));
Formula result = (v.in(toSend.getExpansion().join(FIRST.join(TRACE)).join(v)).and(v.in(toSend.getExpansion().join(FIRST).join(v))).forAll(v.oneOf(Process)));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_always() {
Variable v = Variable.unary("p");
Variable t = Variable.unary("t0");
Formula initial = v.in(toSend.join(v)).and(v.in(toSend.join(v))).forAll(v.oneOf(Process)).always();
Formula result = (v.in(toSend.getExpansion().join(t).join(v)).and(v.in(toSend.getExpansion().join(t).join(v))).forAll(v.oneOf(Process)).forAll(t.oneOf(FIRST.join(TRACE.reflexiveClosure()))));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_next() {
Variable v = Variable.unary("p");
Formula initial = v.in(toSend.join(v)).and(v.in(toSend.join(v))).forAll(v.oneOf(Process)).after();
Formula result = (v.in(toSend.getExpansion().join(FIRST.join(TRACE)).join(v)).and(v.in(toSend.getExpansion().join(FIRST.join(TRACE)).join(v))).forAll(v.oneOf(Process)));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_prime() {
Variable v = Variable.unary("p");
Formula initial = (toSend.join(v).prime()).eq(toSend.join(v)).forAll(v.oneOf(Process));
Formula result = (toSend.getExpansion().join(FIRST.join(TRACE)).join(v).eq(toSend.getExpansion().join(FIRST).join(v)).forAll(v.oneOf(Process)));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_double_prime() {
Variable v = Variable.unary("p");
Formula initial = (toSend.join(v).prime()).eq(toSend.prime().join(v).prime()).forAll(v.oneOf(Process));
Formula result = (toSend.getExpansion().join(FIRST.join(TRACE)).join(v).eq(toSend.getExpansion().join(FIRST.join(TRACE).join(TRACE)).join(v)).forAll(v.oneOf(Process)));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_eventually() {
Variable v = Variable.unary("p");
Formula initial = v.in(toSend.join(v)).and(v.in(toSend.join(v))).forAll(v.oneOf(Process)).eventually();
Variable t = Variable.unary("t0");
Formula result = v.in(toSend.getExpansion().join(t).join(v)).and(v.in(toSend.getExpansion().join(t).join(v))).forAll(v.oneOf(Process)).forSome(t.oneOf(FIRST.join(TRACE.reflexiveClosure())));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_historically() {
Variable v = Variable.unary("p");
Formula initial = v.in(toSend.join(v)).and(v.in(toSend.join(v))).forAll(v.oneOf(Process))
.historically();
Variable t = Variable.unary("t0");
Formula result = v.in(toSend.getExpansion().join(t).join(v)).and(v.in(toSend.getExpansion().join(t).join(v))).forAll(v.oneOf(Process)).forAll(t.oneOf(FIRST.join(PREFIX.transpose().reflexiveClosure())));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_once() {
Variable v = Variable.unary("p");
Formula initial = v.in(toSend.join(v)).and(v.in(toSend.join(v))).forAll(v.oneOf(Process)).once();
Variable t = Variable.unary("t0");
Formula result = v.in(toSend.getExpansion().join(t).join(v)).and(v.in(toSend.getExpansion().join(t).join(v))).forAll(v.oneOf(Process)).forSome(t.oneOf(FIRST.join(PREFIX.transpose().reflexiveClosure())));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_previous() {
Variable v = Variable.unary("p");
Formula initial = v.in(toSend.join(v)).and(v.in(toSend.join(v))).forAll(v.oneOf(Process)).before();
Formula result = FIRST.join(PREFIX.transpose()).some().and(v.in(toSend.getExpansion().join(FIRST.join(PREFIX.transpose())).join(v)).and(v.in(toSend.getExpansion().join(FIRST.join(PREFIX.transpose())).join(v))).forAll(v.oneOf(Process)));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_prime_always() {
Variable v = Variable.unary("p");
Formula initial = v.join(toSend.prime()).eq(v.join(toSend)).always();
Variable t = Variable.unary("t0");
Formula result = (((v.join(toSend.getExpansion().join(t.join(TRACE))).eq(v.join(toSend.getExpansion().join(t))))).forAll(t.oneOf(FIRST.join(TRACE.reflexiveClosure()))));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_post_next_always() {
Variable v = Variable.unary("p");
Formula initial = v.join(toSend.prime()).eq(v.join(toSend)).after().always();
Variable t = Variable.unary("t0");
Formula result = ((v.join(toSend.getExpansion().join(t.join(TRACE).join(TRACE))).eq(v.join(toSend.getExpansion().join(t.join(TRACE))))).forAll(t.oneOf(FIRST.join(TRACE.reflexiveClosure()))));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_post_eventually() {
Variable v = Variable.unary("p");
Formula initial = v.join(toSend.prime()).eq(v.join(toSend)).eventually();
Variable t = Variable.unary("t0");
Formula result = (v.join(toSend.getExpansion().join(t.join(TRACE))).eq(v.join(toSend.getExpansion().join(t)))).forSome(t.oneOf(FIRST.join(TRACE.reflexiveClosure())));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_post_next_eventually() {
Variable v = Variable.unary("p");
Formula initial = v.join(toSend.prime()).eq(v.join(toSend).prime().prime()).after().eventually();
Variable t = Variable.unary("t0");
Formula result = ((v.join(toSend.getExpansion().join(t.join(TRACE).join(TRACE))).eq(v.join(toSend.getExpansion().join(t.join(TRACE).join(TRACE).join(TRACE)))))).forSome(t.oneOf(FIRST.join(TRACE.reflexiveClosure())));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_post_next() {
Formula initial = toSend.prime().eq(toSend).after();
Formula result = (toSend.getExpansion().join(FIRST.join(TRACE).join(TRACE)).eq(toSend.getExpansion().join(FIRST.join(TRACE))));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_until() {
Formula initial = Process.join(toSend).some().until(Process.join(toSend).lone());
Variable t = Variable.unary("t0");
Variable t1 = Variable.unary("t1");
Formula f2 = Process.join(toSend.getExpansion().join(t1)).some().forAll(t1.oneOf(upTo(FIRST,t,false)));
Formula f1 = Process.join(toSend.getExpansion().join(t)).lone().and(f2).forSome(t.oneOf(FIRST.join(TRACE.reflexiveClosure())));
Formula result = f1;
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_since() {
Formula initial = Process.join(toSend).some().since(Process.join(toSend).lone());
Variable t = Variable.unary("t0");
Variable t1 = Variable.unary("t1");
Formula f2 = Process.join(toSend.getExpansion().join(t1)).some().forAll(t1.oneOf(downTo(FIRST,t,false)));
Formula f1 = Process.join(toSend.getExpansion().join(t)).lone().and(f2).forSome(t.oneOf(FIRST.join(PREFIX.transpose().reflexiveClosure())));
Formula result = f1;
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
private Expression upTo(Expression t1, Expression t2, boolean inc2) {
Formula c = t2.in(t1.join(PREFIX.reflexiveClosure()));
Expression exp1 = PREFIX.reflexiveClosure();
Expression exp2 = PREFIX.closure();
Expression exp11 = TRACE.reflexiveClosure();
Expression exp12 = TRACE.closure();
Expression e1 = (t1.join(exp1)).intersection(t2.join(exp2.transpose()));
Expression e21 = (t1.join(exp11)).intersection(t2.join(exp12.transpose()));
Expression e22 = (t2.join(exp1)).intersection(t1.join(exp2.transpose()));
Expression e2 = e21.difference(e22);
Expression e = c.thenElse(e1, e2);
if (inc2) e = e.union(t2);
return e;
}
private Expression downTo(Expression t1, Expression t2, boolean inc2) {
Expression exp1 = PREFIX.reflexiveClosure();
Expression exp2 = PREFIX.closure();
Expression e1 = (t1.join(exp1.transpose())).intersection(t2.join(exp2));
Expression e = e1;
if (inc2) e = e.union(t2);
return e;
}
@Test
public final void simple_release() {
Formula initial = Process.join(toSend).some().releases(Process.join(toSend).lone());
Variable t = Variable.unary("t1");
Variable t1 = Variable.unary("t2");
Variable t2 = Variable.unary("t0");
Formula f2 = Process.join(toSend.getExpansion().join(t1)).lone().forAll(t1.oneOf(upTo(FIRST,t,true)));
Formula f1 = Process.join(toSend.getExpansion().join(t)).some().and(f2).forSome(t.oneOf(FIRST.join(TRACE.reflexiveClosure())));
Formula f3 = Process.join(toSend.getExpansion().join(t2)).lone().forAll(t2.oneOf(FIRST.join(TRACE.reflexiveClosure())));
Formula result = ((f3)).or(f1);
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_release_post() {
Variable v = Variable.unary("p");
Formula initial = toSend.join(v).eq(toSend.join(v)).releases(Process.join(toSend.prime()).lone()).forAll(v.oneOf(Process));
Variable t = Variable.unary("t1");
Variable t1 = Variable.unary("t2");
Variable t2 = Variable.unary("t0");
Formula f2 = (Process.join(toSend.getExpansion().join(t1.join(TRACE))).lone()).forAll(t1.oneOf(upTo(FIRST,t,true)));
Formula f1 = toSend.getExpansion().join(t).join(v).eq(toSend.getExpansion().join(t).join(v)).and(f2).forSome(t.oneOf(FIRST.join(TRACE.reflexiveClosure())));
Formula f3 = Process.join(toSend.getExpansion().join(t2.join(TRACE))).lone().forAll(t2.oneOf(FIRST.join(TRACE.reflexiveClosure())));
Formula result = ((f3)).or(f1).forAll(v.oneOf(Process));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
/* Out of the root */
@Test
public final void simple_always_always() {
Variable v = Variable.unary("p");
Formula initial = v.in(toSend.join(v)).always().and(v.in(toSend.join(v))).forAll(v.oneOf(Process)).always();
Variable t = Variable.unary("t0");
Variable t1 = Variable.unary("t1");
Formula result = (((v.in(toSend.getExpansion().join(t1).join(v))).forAll(t1.oneOf(t.join(TRACE.reflexiveClosure())))).and(v.in(toSend.getExpansion().join(t).join(v))).forAll(v.oneOf(Process)).forAll(t.oneOf(FIRST.join(TRACE.reflexiveClosure()))));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_always_eventually() {
Variable v = Variable.unary("p");
Formula initial = v.in(toSend.join(v)).always().and(v.in(toSend.join(v))).forAll(v.oneOf(Process)).eventually();
Variable t = Variable.unary("t0");
Variable t1 = Variable.unary("t1");
Formula result = ((v.in(toSend.getExpansion().join(t1).join(v))).forAll(t1.oneOf(t.join(TRACE.reflexiveClosure())))).and(v.in(toSend.getExpansion().join(t).join(v))).forAll(v.oneOf(Process)).forSome(t.oneOf(FIRST.join(TRACE.reflexiveClosure())));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_once_always() {
Variable v = Variable.unary("p");
Formula initial = v.in(toSend.join(v)).once().and(v.in(toSend.join(v))).forAll(v.oneOf(Process)).always();
Variable t = Variable.unary("t0");
Variable t1 = Variable.unary("t1");
Formula result = ((v.in(toSend.getExpansion().join(t1).join(v))).forSome(t1.oneOf(t.join(PREFIX.transpose().reflexiveClosure()))).and(v.in(toSend.getExpansion().join(t).join(v))).forAll(v.oneOf(Process)).forAll(t.oneOf(FIRST.join(TRACE.reflexiveClosure()))));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_historically_eventually() {
Variable v = Variable.unary("p");
Formula initial = v.in(toSend.join(v)).historically().and(v.in(toSend.join(v))).forAll(v.oneOf(Process)).eventually();
Variable t = Variable.unary("t0");
Variable t1 = Variable.unary("t1");
Formula result = (v.in(toSend.getExpansion().join(t1).join(v))).forAll(t1.oneOf(t.join(PREFIX.transpose().reflexiveClosure()))).and(v.in(toSend.getExpansion().join(t).join(v))).forAll(v.oneOf(Process)).forSome(t.oneOf(FIRST.join(TRACE.reflexiveClosure())));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_next_always() {
Variable v = Variable.unary("p");
Formula initial = v.in(toSend.join(v)).after().and(v.in(toSend.join(v))).forAll(v.oneOf(Process)).always();
Variable t = Variable.unary("t0");
Formula result = (v.in(toSend.getExpansion().join(t.join(TRACE)).join(v)).and(v.in(toSend.getExpansion().join(t).join(v))).forAll(v.oneOf(Process)).forAll(t.oneOf(FIRST.join(TRACE.reflexiveClosure()))));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_previous_always() {
Variable v = Variable.unary("p");
Formula initial = v.in(toSend.join(v)).before().and(v.in(toSend.join(v))).forAll(v.oneOf(Process)).always();
Variable t = Variable.unary("t0");
Formula result = ((t.join(PREFIX.transpose()).some().and(v.in(toSend.getExpansion().join(t.join(PREFIX.transpose())).join(v)))).and(v.in(toSend.getExpansion().join(t).join(v))).forAll(v.oneOf(Process)).forAll(t.oneOf(FIRST.join(TRACE.reflexiveClosure()))));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_previous_always_eventually() {
Variable v = Variable.unary("p");
Formula initial = v.in(toSend.join(v)).before().and(v.in(toSend.join(v)).always().and(v.in(toSend.join(v)))).forAll(v.oneOf(Process)).eventually();
Variable t = Variable.unary("t0");
Variable t1 = Variable.unary("t1");
Formula f1 = t.join(PREFIX.transpose()).some().and(v.in(toSend.getExpansion().join(t.join(PREFIX.transpose())).join(v)));
Formula f2 = ((v.in(toSend.getExpansion().join(t1).join(v))).forAll(t1.oneOf(t.join(TRACE.reflexiveClosure()))));
Formula result = ((f1.and(f2.and(v.in(toSend.getExpansion().join(t).join(v))))).forAll(v.oneOf(Process))).forSome(t.oneOf(FIRST.join(TRACE.reflexiveClosure())));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_next_eventually_eventually() {
Variable v = Variable.unary("p");
Formula initial = v.in(toSend.join(v)).after().and(v.in(toSend.join(v)).eventually().and(v.in(toSend.join(v)))).forAll(v.oneOf(Process)).eventually();
Variable t = Variable.unary("t0");
Variable t1 = Variable.unary("t1");
Formula f1 = (v.in(toSend.getExpansion().join(t.join(TRACE)).join(v)));
Formula f2 = (v.in(toSend.getExpansion().join(t1).join(v))).forSome(t1.oneOf(t.join(TRACE.reflexiveClosure())));
Formula result = ((f1.and(f2.and(v.in(toSend.getExpansion().join(t).join(v))))).forAll(v.oneOf(Process))).forSome(t.oneOf(FIRST.join(TRACE.reflexiveClosure())));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_until_always() {
Formula initial = Process.join(toSend).some().until(Process.join(toSend).lone()).always();
Variable t = Variable.unary("t0");
Variable t1 = Variable.unary("t1");
Variable t2 = Variable.unary("t2");
Formula f2 = Process.join(toSend.getExpansion().join(t2)).some().forAll(t2.oneOf(upTo(t,t1,false)));
Formula f1 = Process.join(toSend.getExpansion().join(t1)).lone().and(f2).forSome(t1.oneOf(t.join(TRACE.reflexiveClosure())));
Formula result = (f1.forAll(t.oneOf(FIRST.join(TRACE.reflexiveClosure()))));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_always_until_eventually() {
Formula initial = Process.join(toSend).some().always().until(Process.join(toSend).lone()).eventually();
Variable t = Variable.unary("t0");
Variable t1 = Variable.unary("t1");
Variable t2 = Variable.unary("t2");
Variable t3 = Variable.unary("t3");
Formula f2 = ((Process.join(toSend.getExpansion().join(t3)).some().forAll(t3.oneOf(t2.join(TRACE.reflexiveClosure()))))).forAll(t2.oneOf(upTo(t,t1,false)));
Formula f1 = Process.join(toSend.getExpansion().join(t1)).lone().and(f2).forSome(t1.oneOf(t.join(TRACE.reflexiveClosure())));
Formula result = f1.forSome(t.oneOf(FIRST.join(TRACE.reflexiveClosure())));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void simple_release_always() {
Formula initial = Process.join(toSend).some().releases(Process.join(toSend).lone()).always();
Variable t = Variable.unary("t2");
Variable t1 = Variable.unary("t3");
Variable t2 = Variable.unary("t1");
Variable t3 = Variable.unary("t0");
Formula f2 = Process.join(toSend.getExpansion().join(t1)).lone().forAll(t1.oneOf(upTo(t3,t,true)));
Formula f1 = Process.join(toSend.getExpansion().join(t)).some().and(f2).forSome(t.oneOf(t3.join(TRACE.reflexiveClosure())));
Formula f3 = Process.join(toSend.getExpansion().join(t2)).lone().forAll(t2.oneOf(t3.join(TRACE.reflexiveClosure())));
Formula result = (((f3)).or(f1).forAll(t3.oneOf(FIRST.join(TRACE.reflexiveClosure()))));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString()); }
@Test
public final void simple_until1_always() {
Formula initial = Process.join(toSend).some().until(Process.join(toSend).prime().lone()).always().and(toSend.one());
Variable t = Variable.unary("t0");
Variable t1 = Variable.unary("t1");
Variable t2 = Variable.unary("t2");
Formula f2 = Process.join(toSend.getExpansion().join(t2)).some().forAll(t2.oneOf(upTo(t,t1,false)));
Formula f1 = ((Process.join(toSend.getExpansion().join(t1.join(TRACE))).lone()).and(f2)).forSome(t1.oneOf(t.join(TRACE.reflexiveClosure())));
Formula result = ((f1.forAll(t.oneOf(FIRST.join(TRACE.reflexiveClosure())))).and(toSend.getExpansion().join(FIRST).one()));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
/* Prime, next and previous */
@Test
public final void nested_primes() {
Formula initial = ((toSend.join(toSend.prime())).prime().in(toSend)).eventually();
Variable t = Variable.unary("t0");
Formula result = (((toSend.getExpansion().join(t.join(TRACE)).join(toSend.getExpansion().join(t.join(TRACE).join(TRACE)))).in(toSend.getExpansion().join(t)))).forSome(t.oneOf(FIRST.join(TRACE.reflexiveClosure())));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void nested_primes2() {
Formula initial = ((toSend.join(toSend.prime())).prime().in(toSend)).and(toSend.prime().prime().prime().in(toSend)).eventually();
Variable t = Variable.unary("t0");
Formula result = ((toSend.getExpansion().join(t.join(TRACE)).join(toSend.getExpansion().join(t.join(TRACE).join(TRACE))).in(toSend.getExpansion().join(t))).and(toSend.getExpansion().join(t.join(TRACE).join(TRACE).join(TRACE)).in(toSend.getExpansion().join(t)))).forSome(t.oneOf(FIRST.join(TRACE.reflexiveClosure())));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void nested_primes3() {
Formula initial = ((toSend.join(toSend.prime())).prime().in(toSend)).and(toSend.prime().prime().prime().in(toSend));
Formula result = ((toSend.getExpansion().join(FIRST.join(TRACE)).join(toSend.getExpansion().join(FIRST.join(TRACE).join(TRACE))).in(toSend.getExpansion().join(FIRST))).and(toSend.getExpansion().join(FIRST.join(TRACE).join(TRACE).join(TRACE)).in(toSend.getExpansion().join(FIRST))));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void nested_next() {
Formula initial = (toSend.in(toSend).after().after()).and(toSend.in(toSend).after()).after();
Formula f1 = (toSend.getExpansion().join(FIRST.join(TRACE).join(TRACE).join(TRACE)).in(toSend.getExpansion().join(FIRST.join(TRACE).join(TRACE).join(TRACE))));
Formula f2 = (toSend.getExpansion().join(FIRST.join(TRACE).join(TRACE)).in(toSend.getExpansion().join(FIRST.join(TRACE).join(TRACE))));
Formula result = (f1.and(f2));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void nested_next_prime() {
Formula initial = (toSend.in(toSend).after().after()).and(toSend.in(toSend.prime()).after()).after();
Formula f1 = ((toSend.getExpansion().join(FIRST.join(TRACE).join(TRACE).join(TRACE)).in(toSend.getExpansion().join(FIRST.join(TRACE).join(TRACE).join(TRACE)))));
Formula f2 = (toSend.getExpansion().join(FIRST.join(TRACE).join(TRACE)).in(toSend.getExpansion().join(FIRST.join(TRACE).join(TRACE).join(TRACE))));
Formula result = (f1.and(f2));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void nested_previous() {
Formula initial = (toSend.in(toSend).before().before()).and(toSend.in(toSend).before()).before();
Formula f1 = FIRST.join(PREFIX.transpose()).join(PREFIX.transpose()).some().and(FIRST.join(PREFIX.transpose()).join(PREFIX.transpose()).join(PREFIX.transpose()).some().and(toSend.getExpansion().join(FIRST.join(PREFIX.transpose()).join(PREFIX.transpose()).join(PREFIX.transpose())).in(toSend.getExpansion().join(FIRST.join(PREFIX.transpose()).join(PREFIX.transpose()).join(PREFIX.transpose())))));
Formula f2 = FIRST.join(PREFIX.transpose()).join(PREFIX.transpose()).some().and(toSend.getExpansion().join(FIRST.join(PREFIX.transpose()).join(PREFIX.transpose())).in(toSend.getExpansion().join(FIRST.join(PREFIX.transpose()).join(PREFIX.transpose()))));
Formula result = FIRST.join(PREFIX.transpose()).some().and(f1.and(f2));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void nested_previous_next() {
Formula initial = (toSend.in(toSend)).after().before();
Formula f1 = (toSend.getExpansion().join(FIRST.join(PREFIX.transpose()).join(TRACE)).in(toSend.getExpansion().join(FIRST.join(PREFIX.transpose()).join(TRACE))));
Formula result = FIRST.join(PREFIX.transpose()).some().and(f1);
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void nested_previous_prime() {
Formula initial = (toSend.in(toSend.prime())).before();
Formula f1 = (toSend.getExpansion().join(FIRST.join(PREFIX.transpose())).in(toSend.getExpansion().join(FIRST.join(PREFIX.transpose()).join(TRACE))));
Formula result = FIRST.join(PREFIX.transpose()).some().and(f1);
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
@Test
public final void nested_previous_next_prime() {
Formula initial = ((toSend.prime().prime().in(toSend.prime()).after()).and((toSend.prime().prime().in(toSend.prime()).before()))).before();
Formula f1 = (toSend.getExpansion().join(FIRST.join(PREFIX.transpose()).join(TRACE).join(TRACE).join(TRACE)).in(toSend.getExpansion().join(FIRST.join(PREFIX.transpose()).join(TRACE).join(TRACE))));
Formula f2 = FIRST.join(PREFIX.transpose()).join(PREFIX.transpose()).some().and(toSend.getExpansion().join(FIRST.join(PREFIX.transpose()).join(PREFIX.transpose()).join(TRACE).join(TRACE)).in(toSend.getExpansion().join(FIRST.join(PREFIX.transpose()).join(PREFIX.transpose()).join(TRACE))));
Formula result = FIRST.join(PREFIX.transpose()).some().and(f1.and(f2));
assertEquals(result.toString(), ((NaryFormula)LTL2FOLTranslator.translate(initial,0,false,new LinkedHashMap<Formula,Formula>())).child(1).toString());
}
}
| |
package org.jabref.logic.exporter;
import java.io.FileOutputStream;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.nio.channels.FileLock;
import java.nio.channels.OverlappingFileLockException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.nio.file.attribute.PosixFilePermission;
import java.util.EnumSet;
import java.util.Set;
import org.jabref.logic.util.io.FileUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A file output stream that is similar to the standard {@link FileOutputStream}, except that all writes are first
* redirected to a temporary file. When the stream is closed, the temporary file (atomically) replaces the target file.
*
* <p>
* In detail, the strategy is to:
* <ol>
* <li>Write to a temporary file (with .tmp suffix) in the same directory as the destination file.</li>
* <li>Create a backup (with .bak suffix) of the original file (if it exists) in the same directory.</li>
* <li>Move the temporary file to the correct place, overwriting any file that already exists at that location.</li>
* <li>Delete the backup file (if configured to do so).</li>
* </ol>
* If all goes well, no temporary or backup files will remain on disk after closing the stream.
* <p>
* Errors are handled as follows:
* <ol>
* <li>If anything goes wrong while writing to the temporary file, the temporary file will be deleted (leaving the
* original file untouched).</li>
* <li>If anything goes wrong while copying the temporary file to the target file, the backup of the original file is
* kept.</li>
* </ol>
* <p>
* Implementation inspired by code from <a href="https://github.com/martylamb/atomicfileoutputstream/blob/master/src/main/java/com/martiansoftware/io/AtomicFileOutputStream.java">Marty
* Lamb</a> and <a href="https://github.com/apache/zookeeper/blob/master/src/java/main/org/apache/zookeeper/common/AtomicFileOutputStream.java">Apache</a>.
*/
public class AtomicFileOutputStream extends FilterOutputStream {
private static final Logger LOGGER = LoggerFactory.getLogger(AtomicFileOutputStream.class);
private static final String TEMPORARY_EXTENSION = ".tmp";
private static final String BACKUP_EXTENSION = ".bak";
/**
* The file we want to create/replace.
*/
private final Path targetFile;
/**
* The file to which writes are redirected to.
*/
private final Path temporaryFile;
private final FileLock temporaryFileLock;
/**
* A backup of the target file (if it exists), created when the stream is closed
*/
private final Path backupFile;
private final boolean keepBackup;
/**
* Creates a new output stream to write to or replace the file at the specified path.
*
* @param path the path of the file to write to or replace
* @param keepBackup whether to keep the backup file after a successful write process
*/
public AtomicFileOutputStream(Path path, boolean keepBackup) throws IOException {
super(Files.newOutputStream(getPathOfTemporaryFile(path)));
this.targetFile = path;
this.temporaryFile = getPathOfTemporaryFile(path);
this.backupFile = getPathOfBackupFile(path);
this.keepBackup = keepBackup;
try {
// Lock files (so that at least not another JabRef instance writes at the same time to the same tmp file)
if (out instanceof FileOutputStream) {
temporaryFileLock = ((FileOutputStream) out).getChannel().lock();
} else {
temporaryFileLock = null;
}
} catch (OverlappingFileLockException exception) {
throw new IOException("Could not obtain write access to " + temporaryFile + ". Maybe another instance of JabRef is currently writing to the same file?", exception);
}
}
/**
* Creates a new output stream to write to or replace the file at the specified path. The backup file is deleted when the write was successful.
*
* @param path the path of the file to write to or replace
*/
public AtomicFileOutputStream(Path path) throws IOException {
this(path, false);
}
private static Path getPathOfTemporaryFile(Path targetFile) {
return FileUtil.addExtension(targetFile, TEMPORARY_EXTENSION);
}
private static Path getPathOfBackupFile(Path targetFile) {
return FileUtil.addExtension(targetFile, BACKUP_EXTENSION);
}
/**
* Returns the path of the backup copy of the original file (may not exist)
*/
public Path getBackup() {
return backupFile;
}
/**
* Override for performance reasons.
*/
@Override
public void write(byte b[], int off, int len) throws IOException {
try {
out.write(b, off, len);
} catch (IOException exception) {
cleanup();
throw exception;
}
}
/**
* Closes the write process to the temporary file but does not commit to the target file.
*/
public void abort() {
try {
super.close();
Files.deleteIfExists(temporaryFile);
Files.deleteIfExists(backupFile);
} catch (IOException exception) {
LOGGER.debug("Unable to abort writing to file " + temporaryFile, exception);
}
}
private void cleanup() {
try {
Files.deleteIfExists(temporaryFile);
} catch (IOException exception) {
LOGGER.debug("Unable to delete file " + temporaryFile, exception);
}
try {
if (temporaryFileLock != null) {
temporaryFileLock.release();
}
} catch (IOException exception) {
LOGGER.warn("Unable to release lock on file " + temporaryFile, exception);
}
}
// perform the final operations to move the temporary file to its final destination
@Override
public void close() throws IOException {
try {
try {
// Make sure we have written everything to the temporary file
flush();
if (out instanceof FileOutputStream) {
((FileOutputStream) out).getFD().sync();
}
} catch (IOException exception) {
// Try to close nonetheless
super.close();
throw exception;
}
super.close();
// We successfully wrote everything to the temporary file, lets copy it to the correct place
// First, make backup of original file and try to save file permissions to restore them later (by default: 664)
Set<PosixFilePermission> oldFilePermissions = EnumSet.of(PosixFilePermission.OWNER_READ,
PosixFilePermission.OWNER_WRITE,
PosixFilePermission.GROUP_READ,
PosixFilePermission.GROUP_WRITE,
PosixFilePermission.OTHERS_READ);
if (Files.exists(targetFile)) {
Files.copy(targetFile, backupFile, StandardCopyOption.REPLACE_EXISTING);
if (FileUtil.IS_POSIX_COMPILANT) {
try {
oldFilePermissions = Files.getPosixFilePermissions(targetFile);
} catch (IOException exception) {
LOGGER.warn("Error getting file permissions for file {}.", targetFile, exception);
}
}
}
// Move temporary file (replace original if it exists)
Files.move(temporaryFile, targetFile, StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING);
// Restore file permissions
if (FileUtil.IS_POSIX_COMPILANT) {
try {
Files.setPosixFilePermissions(targetFile, oldFilePermissions);
} catch (IOException exception) {
LOGGER.warn("Error writing file permissions to file {}.", targetFile, exception);
}
}
if (!keepBackup) {
// Remove backup file
Files.deleteIfExists(backupFile);
}
} finally {
// Remove temporary file (but not the backup!)
cleanup();
}
}
@Override
public void flush() throws IOException {
try {
super.flush();
} catch (IOException exception) {
cleanup();
throw exception;
}
}
@Override
public void write(int b) throws IOException {
try {
super.write(b);
} catch (IOException exception) {
cleanup();
throw exception;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken;
import static org.elasticsearch.common.xcontent.XContentParserUtils.throwUnknownField;
public final class SearchHits implements Streamable, ToXContent, Iterable<SearchHit> {
public static SearchHits empty() {
// We shouldn't use static final instance, since that could directly be returned by native transport clients
return new SearchHits(EMPTY, 0, 0);
}
public static final SearchHit[] EMPTY = new SearchHit[0];
private SearchHit[] hits;
public long totalHits;
private float maxScore;
SearchHits() {
}
public SearchHits(SearchHit[] hits, long totalHits, float maxScore) {
this.hits = hits;
this.totalHits = totalHits;
this.maxScore = maxScore;
}
public void shardTarget(SearchShardTarget shardTarget) {
for (SearchHit hit : hits) {
hit.shard(shardTarget);
}
}
/**
* The total number of hits that matches the search request.
*/
public long getTotalHits() {
return totalHits;
}
/**
* The maximum score of this query.
*/
public float getMaxScore() {
return maxScore;
}
/**
* The hits of the search request (based on the search type, and from / size provided).
*/
public SearchHit[] getHits() {
return this.hits;
}
/**
* Return the hit as the provided position.
*/
public SearchHit getAt(int position) {
return hits[position];
}
@Override
public Iterator<SearchHit> iterator() {
return Arrays.stream(getHits()).iterator();
}
public SearchHit[] internalHits() {
return this.hits;
}
static final class Fields {
static final String HITS = "hits";
static final String TOTAL = "total";
static final String MAX_SCORE = "max_score";
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields.HITS);
builder.field(Fields.TOTAL, totalHits);
if (Float.isNaN(maxScore)) {
builder.nullField(Fields.MAX_SCORE);
} else {
builder.field(Fields.MAX_SCORE, maxScore);
}
builder.field(Fields.HITS);
builder.startArray();
for (SearchHit hit : hits) {
hit.toXContent(builder, params);
}
builder.endArray();
builder.endObject();
return builder;
}
public static SearchHits fromXContent(XContentParser parser) throws IOException {
if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
parser.nextToken();
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
}
XContentParser.Token token = parser.currentToken();
String currentFieldName = null;
List<SearchHit> hits = new ArrayList<>();
long totalHits = 0;
float maxScore = 0f;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (Fields.TOTAL.equals(currentFieldName)) {
totalHits = parser.longValue();
} else if (Fields.MAX_SCORE.equals(currentFieldName)) {
maxScore = parser.floatValue();
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else if (token == XContentParser.Token.VALUE_NULL) {
if (Fields.MAX_SCORE.equals(currentFieldName)) {
maxScore = Float.NaN; // NaN gets rendered as null-field
} else {
throwUnknownField(currentFieldName, parser.getTokenLocation());
}
} else if (token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
hits.add(SearchHit.fromXContent(parser));
}
}
}
SearchHits searchHits = new SearchHits(hits.toArray(new SearchHit[hits.size()]), totalHits,
maxScore);
return searchHits;
}
public static SearchHits readSearchHits(StreamInput in) throws IOException {
SearchHits hits = new SearchHits();
hits.readFrom(in);
return hits;
}
@Override
public void readFrom(StreamInput in) throws IOException {
totalHits = in.readVLong();
maxScore = in.readFloat();
int size = in.readVInt();
if (size == 0) {
hits = EMPTY;
} else {
hits = new SearchHit[size];
for (int i = 0; i < hits.length; i++) {
hits[i] = SearchHit.readSearchHit(in);
}
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVLong(totalHits);
out.writeFloat(maxScore);
out.writeVInt(hits.length);
if (hits.length > 0) {
for (SearchHit hit : hits) {
hit.writeTo(out);
}
}
}
@Override
public boolean equals(Object obj) {
if (obj == null || getClass() != obj.getClass()) {
return false;
}
SearchHits other = (SearchHits) obj;
return Objects.equals(totalHits, other.totalHits)
&& Objects.equals(maxScore, other.maxScore)
&& Arrays.equals(hits, other.hits);
}
@Override
public int hashCode() {
return Objects.hash(totalHits, maxScore, Arrays.hashCode(hits));
}
}
| |
package gr.plushost.prototypeapp.activities;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.res.Configuration;
import android.net.Uri;
import android.support.v4.app.FragmentManager;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Color;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.view.GravityCompat;
import android.support.v4.view.MenuItemCompat;
import android.support.v7.widget.Toolbar;
import android.view.Gravity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.GridView;
import android.widget.ListAdapter;
import android.widget.TextView;
import android.widget.Toast;
import com.beardedhen.androidbootstrap.utils.AutoResizeTextView;
import com.github.johnpersano.supertoasts.SuperToast;
import com.github.johnpersano.supertoasts.util.Style;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.List;
import gr.plushost.prototypeapp.R;
import gr.plushost.prototypeapp.adapters.listviews.PopularItemsMainListAdapter;
import gr.plushost.prototypeapp.adapters.viewpagers.BannersImageSlideAdapter;
import gr.plushost.prototypeapp.adapters.listviews.CategoriesListAdapter;
import gr.plushost.prototypeapp.aplications.StoreApplication;
import gr.plushost.prototypeapp.exceptionhandlers.StoreExceptionHandler;
import gr.plushost.prototypeapp.fragments.NavigationDrawerFragment;
import gr.plushost.prototypeapp.indicators.CirclePageIndicator;
import gr.plushost.prototypeapp.items.BannerItem;
import gr.plushost.prototypeapp.items.CategoryItem;
import gr.plushost.prototypeapp.items.MiniProductItem;
import gr.plushost.prototypeapp.listeners.CartMenuItemStuffListener;
import gr.plushost.prototypeapp.network.NoNetworkHandler;
import gr.plushost.prototypeapp.network.ServiceHandler;
import gr.plushost.prototypeapp.util.Helper;
import gr.plushost.prototypeapp.widgets.ExpandableHeightGridView;
import gr.plushost.prototypeapp.widgets.ExpandableHeightListView;
import gr.plushost.prototypeapp.widgets.HorizontalListView;
import gr.plushost.prototypeapp.widgets.LockableScrollView;
import gr.plushost.prototypeapp.widgets.NestedGridView;
import gr.plushost.prototypeapp.widgets.NestedListView;
import gr.plushost.prototypeapp.widgets.SmartViewPager;
public class MainActivity extends DrawerActivity {
Activity act = this;
List<CategoryItem> categoriesList;
ArrayList<String> bannerImgs;
NestedListView listView;
CategoriesListAdapter categoriesListAdapter;
SmartViewPager viewPagerImg;
CirclePageIndicator circlePageIndicator;
TextView searchTextView;
boolean doubleBackToExitPressedOnce;
LockableScrollView lockableScrollView;
List<BannerItem> bannersList;
int hot_number = 0;
TextView ui_hot = null;
NoNetworkHandler noNetworkHandler;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getIntent().getBooleanExtra("EXIT", false)) {
finish();
//overridePendingTransition(0, R.anim.slide_out_bottom);
android.os.Process.killProcess(android.os.Process.myPid());
System.exit(0);
}
noNetworkHandler = new NoNetworkHandler(this);
setContentView(R.layout.activity_main);
Toolbar toolbar = (Toolbar) findViewById(R.id.action_bar);
setSupportActionBar(toolbar);
getSupportActionBar().setLogo(R.drawable.ic_launcher);
FragmentManager fragmentManager = getSupportFragmentManager();
FragmentTransaction fragmentTransaction = fragmentManager.beginTransaction();
NavigationDrawerFragment fragment = new NavigationDrawerFragment();
fragmentTransaction.add(R.id.nav_drawer_fragment, fragment);
fragmentTransaction.commit();
super.set(toolbar, true, fragment);
((TextView) findViewById(R.id.phonetxt)).setText(getResources().getString(R.string.txt_phone_title) + " " + getResources().getString(R.string.store_phone));
searchTextView = (TextView) findViewById(R.id.searchTextView);
searchTextView.setHint(String.format(getResources().getString(R.string.search_text_on_box), getResources().getString(R.string.store_name)));
/*if (!ServiceHandler.isNetworkAvailable(this)) {
AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(this);
alertDialogBuilder.setTitle(getResources().getString(R.string.not_internet_title));
alertDialogBuilder
.setMessage(getResources().getString(R.string.not_internet_msg))
.setCancelable(false)
.setIcon(R.drawable.sing)
.setPositiveButton(getResources().getString(R.string.not_internet_ok_btn), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
android.os.Process.killProcess(android.os.Process.myPid());
System.exit(1);
}
})
.setNegativeButton(getResources().getString(R.string.not_internet_settings_btn), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
startActivityForResult(new Intent(android.provider.Settings.ACTION_SETTINGS), 0);
android.os.Process.killProcess(android.os.Process.myPid());
System.exit(1);
}
});
AlertDialog alertDialog = alertDialogBuilder.create();
alertDialog.show();
}*/
listView = (NestedListView) findViewById(R.id.list_categories);
viewPagerImg = (SmartViewPager) findViewById(R.id.viewPagerImg);
circlePageIndicator = (CirclePageIndicator) findViewById(R.id.indicatorImg);
lockableScrollView = (LockableScrollView) findViewById(R.id.mainScrollView);
//String jsonMyObject;
/*Bundle extras = getIntent().getExtras();
if (extras != null) {
jsonMyObject = extras.getString("categories_list");
if (new Gson().fromJson(jsonMyObject, new TypeToken<List<CategoryItem>>() {
}.getType()) instanceof ArrayList) {*/
categoriesList = StoreApplication.getInstance().getCategoryItemList(); //(ArrayList<CategoryItem>) new Gson().fromJson(jsonMyObject, new TypeToken<List<CategoryItem>>() {
/*}.getType());
}
jsonMyObject = extras.getString("banners_list");
if (new Gson().fromJson(jsonMyObject, new TypeToken<List<BannerItem>>() {
}.getType()) instanceof ArrayList) {*/
bannersList = StoreApplication.getInstance().getBannerItemList();//(ArrayList<BannerItem>) new Gson().fromJson(jsonMyObject, new TypeToken<List<BannerItem>>() {
/*}.getType());
}
}*/
if (categoriesList.size() > 0) {
categoriesListAdapter = new CategoriesListAdapter(this, categoriesList, -1);
listView.setAdapter(categoriesListAdapter);
Helper.getListViewSize(listView, act);
//listView.setExpanded(true);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
List<CategoryItem> itemList = categoriesListAdapter.getList();
CategoryItem item = itemList.get(position);
if (item.is_parent()) {
Intent i = new Intent(act, CategoriesActivity.class);
//i.putExtra("categories_list", new Gson().toJson(categoriesList));
i.putExtra("parent_id", item.getID());
i.putExtra("category_name", item.getTitle());
startActivity(i);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
} else {
Intent i = new Intent(act, ProductsActivity.class);
i.putExtra("category_id", item.getID());
i.putExtra("category_name", item.getTitle());
startActivity(i);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
}
}
});
bannerImgs = new ArrayList<>();
for (BannerItem bannerItem : bannersList) {
bannerImgs.add(bannerItem.getImg_url());
}
viewPagerImg.setAdapter(new BannersImageSlideAdapter(this, bannerImgs));
viewPagerImg.runnable(bannerImgs.size(), 7000);
//viewPagerImg.setPageTransformer(true, new ZoomOutSlideTransformer());
circlePageIndicator.setFillColor(Color.parseColor("#FF6600"));
circlePageIndicator.setPageColor(Color.parseColor("#AAEEEEEE"));
circlePageIndicator.setStrokeColor(Color.parseColor("#AAEEEEEE"));
circlePageIndicator.setViewPager(viewPagerImg);
listView.setFocusable(false);
if(StoreApplication.getInstance().getFreeShippingPromotionItem().isHas_free_ship()){
findViewById(R.id.freeShipRel).setVisibility(View.VISIBLE);
((TextView) findViewById(R.id.freeshiptxt2)).setText(String.format(getResources().getString(R.string.txt_freeship_body), StoreApplication.getCurrency_symbol(act) + StoreApplication.getInstance().getFreeShippingPromotionItem().getValue()));
}
else{
findViewById(R.id.freeShipRel).setVisibility(View.GONE);
}
if(noNetworkHandler.showDialog())
new GetCartCount().execute();
}
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
setContentView(R.layout.activity_main);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_main, menu);
menu.findItem(R.id.action_cart).setActionView(R.layout.layout_notification_cart_icon);
final View menu_hotlist = MenuItemCompat.getActionView(menu.findItem(R.id.action_cart));//.getActionView();
ui_hot = (TextView) menu_hotlist.findViewById(R.id.hotlist_hot);
updateHotCount(StoreApplication.getCartCount());
new CartMenuItemStuffListener(menu_hotlist, getResources().getString(R.string.menu_cart_name)) {
@Override
public void onClick(View v) {
if (getSharedPreferences("ShopPrefs", 0).getBoolean("is_connected", false)) {
Intent i = new Intent(act, ShoppingCartActivity.class);
i.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT);
startActivity(i);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
} else {
Intent i = new Intent(act, LoginActivity.class);
i.putExtra("page", 0);
startActivity(i);
}
}
};
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == R.id.action_settings) {
Intent i = new Intent(act, SettingsActivity.class);
startActivity(i);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
return true;
} else if (id == R.id.action_search) {
Intent i = new Intent(this, SearchActivity.class);
startActivity(i);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
}
return super.onOptionsItemSelected(item);
}
public void updateHotCount(final int new_hot_number) {
hot_number = new_hot_number;
if (ui_hot == null) return;
runOnUiThread(new Runnable() {
@Override
public void run() {
if (new_hot_number == 0)
ui_hot.setVisibility(View.INVISIBLE);
else {
ui_hot.setVisibility(View.VISIBLE);
if (new_hot_number < 10)
ui_hot.setText(Integer.toString(new_hot_number));
else
ui_hot.setText("9+");
}
}
});
}
public void onResume() {
super.onResume();
viewPagerImg.setSliding(true);
if(noNetworkHandler.showDialog())
new GetCartCount().execute();
}
public void onPause() {
super.onPause();
viewPagerImg.setSliding(false);
}
public class GetCartCount extends AsyncTask<Void, Void, Integer> {
@Override
protected void onPreExecute(){
updateHotCount(StoreApplication.getCartCount());
}
@Override
protected Integer doInBackground(Void... voids) {
String response = StoreApplication.getServiceHandler(act).makeServiceCall(act, true, String.format(act.getResources().getString(R.string.url_cart_count), act.getSharedPreferences("ShopPrefs", 0).getString("store_language", ""), act.getSharedPreferences("ShopPrefs", 0).getString("store_currency", "")), ServiceHandler.GET);
try {
JSONObject jsonObject = new JSONObject(response);
if (jsonObject.getString("code").equals("0x0000")) {
return jsonObject.getJSONObject("info").getInt("cart_items_count");
}
return 0;
} catch (Exception e) {
e.printStackTrace();
}
return 0;
}
@Override
protected void onPostExecute(Integer result) {
StoreApplication.setCartCount(result);
updateHotCount(result);
}
}
@Override
public void onBackPressed() {
if (drawerLayout.isDrawerOpen(GravityCompat.START)) {
drawerLayout.closeDrawer(GravityCompat.START);
} else {
if (doubleBackToExitPressedOnce) {
super.onBackPressed();
finish();
overridePendingTransition(0, R.anim.slide_out_bottom);
/*Intent intent = new Intent(getApplicationContext(), MainActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP);
intent.putExtra("EXIT", true);
startActivity(intent);*/
return;
}
this.doubleBackToExitPressedOnce = true;
SuperToast.create(this, getResources().getString(R.string.back_toast_warning_msg), SuperToast.Duration.SHORT, Style.getStyle(Style.PURPLE, SuperToast.Animations.POPUP)).show();
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
doubleBackToExitPressedOnce = false;
}
}, 2000);
}
}
public void openSearch(View view) {
Intent i = new Intent(this, SearchActivity.class);
startActivity(i);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
}
public void openBtn1(View view) {
Intent browserIntent = new Intent(this, WebViewActivity.class);
browserIntent.putExtra("title", act.getResources().getString(R.string.btn_txt_1));
browserIntent.putExtra("url", act.getResources().getString(R.string.btn_url_1));
browserIntent.putExtra("user_agent", "pc");
startActivity(browserIntent);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
}
public void openBtn2(View view) {
Intent browserIntent = new Intent(this, WebViewActivity.class);
browserIntent.putExtra("title", act.getResources().getString(R.string.btn_txt_2));
browserIntent.putExtra("url", act.getResources().getString(R.string.btn_url_2));
browserIntent.putExtra("user_agent", "pc");
startActivity(browserIntent);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
}
public void openBtn3(View view) {
Intent browserIntent = new Intent(this, WebViewActivity.class);
browserIntent.putExtra("title", act.getResources().getString(R.string.btn_txt_3));
browserIntent.putExtra("url", act.getResources().getString(R.string.btn_url_3));
browserIntent.putExtra("user_agent", "pc");
startActivity(browserIntent);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
}
public void openBtn4(View view) {
Intent browserIntent = new Intent(this, WebViewActivity.class);
browserIntent.putExtra("title", act.getResources().getString(R.string.btn_txt_4));
browserIntent.putExtra("url", act.getResources().getString(R.string.btn_url_4));
browserIntent.putExtra("user_agent", "mobile");
startActivity(browserIntent);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
}
public void openBtn5(View view) {
Intent browserIntent = new Intent(this, WebViewActivity.class);
browserIntent.putExtra("title", act.getResources().getString(R.string.btn_txt_5));
browserIntent.putExtra("url", act.getResources().getString(R.string.btn_url_5));
browserIntent.putExtra("user_agent", "mobile");
startActivity(browserIntent);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
}
public void openOroi(View view) {
Intent browserIntent = new Intent(this, WebViewActivity.class);
browserIntent.putExtra("title", getResources().getString(R.string.txt_btn_terms));
browserIntent.putExtra("url", act.getResources().getString(R.string.txt_url_terms));
browserIntent.putExtra("user_agent", "pc");
startActivity(browserIntent);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
}
public void openProsopika(View view) {
Intent browserIntent = new Intent(this, WebViewActivity.class);
browserIntent.putExtra("title", getResources().getString(R.string.txt_btn_privacy));
browserIntent.putExtra("url", act.getResources().getString(R.string.txt_url_privacy));
browserIntent.putExtra("user_agent", "pc");
startActivity(browserIntent);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
}
public void openStoreSite(View view){
Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(getResources().getString(R.string.auth_host)));
startActivity(browserIntent);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
}
public void openSocial1(View view){
Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(getResources().getString(R.string.social_url_1)));
startActivity(browserIntent);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
}
public void openSocial2(View view){
Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(getResources().getString(R.string.social_url_2)));
startActivity(browserIntent);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
}
public void openSocial3(View view){
Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(getResources().getString(R.string.social_url_3)));
startActivity(browserIntent);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
}
public void openSocial4(View view){
Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(getResources().getString(R.string.social_url_4)));
startActivity(browserIntent);
overridePendingTransition(R.anim.slide_in_right, R.anim.slide_out_left);
}
}
| |
package net.glowstone.inventory;
import com.google.common.collect.Iterators;
import net.glowstone.GlowServer;
import org.bukkit.Material;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.inventory.*;
import java.io.InputStream;
import java.util.*;
/**
* Manager for crafting and smelting recipes
*/
public final class CraftingManager implements Iterable<Recipe> {
private final ArrayList<ShapedRecipe> shapedRecipes = new ArrayList<>();
private final ArrayList<ShapelessRecipe> shapelessRecipes = new ArrayList<>();
private final ArrayList<FurnaceRecipe> furnaceRecipes = new ArrayList<>();
private final Map<Material, Integer> furnaceFuels = new HashMap<>();
public void initialize() {
resetRecipes();
// Report stats
GlowServer.logger.info("Recipes: " +
shapedRecipes.size() + " shaped, " +
shapelessRecipes.size() + " shapeless, " +
furnaceRecipes.size() + " furnace, " +
furnaceFuels.size() + " fuels.");
}
/**
* Adds a recipe to the crafting manager.
* @param recipe The recipe to add.
* @return Whether adding the recipe was successful.
*/
public boolean addRecipe(Recipe recipe) {
if (recipe instanceof ShapedRecipe) {
shapedRecipes.add((ShapedRecipe) recipe);
return true;
} else if (recipe instanceof ShapelessRecipe) {
shapelessRecipes.add((ShapelessRecipe) recipe);
return true;
} else if (recipe instanceof FurnaceRecipe) {
furnaceRecipes.add((FurnaceRecipe) recipe);
return true;
} else {
return false;
}
}
/**
* Get a furnace recipe from the crafting manager.
* @param input The furnace input.
* @return The FurnaceRecipe, or null if none is found.
*/
public FurnaceRecipe getFurnaceRecipe(ItemStack input) {
for (FurnaceRecipe recipe : furnaceRecipes) {
if (matchesWildcard(recipe.getInput(), input)) {
return recipe;
}
}
return null;
}
/**
* Get how long a given fuel material will burn for.
* @param material The fuel material.
* @return The time in ticks, or 0 if that material does not burn.
*/
public int getFuelTime(Material material) {
if (furnaceFuels.containsKey(material)) {
return furnaceFuels.get(material);
} else {
return 0;
}
}
/**
* Remove enough items from the given item list to form the given recipe.
* @param items The items to remove the ingredients from.
* @param recipe A recipe known to match the items.
*/
public void removeItems(ItemStack[] items, Recipe recipe) {
// todo
}
/**
* Get a shaped or shapeless recipe from the crafting manager.
* @param items An array of items with null being empty slots. Length should be a perfect square.
* @return The ShapedRecipe or ShapelessRecipe that matches the input, or null if none match.
*/
public Recipe getCraftingRecipe(ItemStack[] items) {
int size = (int) Math.sqrt(items.length);
if (size * size != items.length) {
throw new IllegalArgumentException("ItemStack list was not square (was " + items.length + ")");
}
ShapedRecipe result = getShapedRecipe(size, items);
if (result != null) {
return result;
}
ItemStack[] reversedItems = new ItemStack[items.length];
for (int row = 0; row < size; ++row) {
for (int col = 0; col < size; ++col) {
int col2 = size - 1 - col;
reversedItems[row * size + col] = items[row * size + col2];
}
}
// this check saves the trouble of iterating through all the recipes again
if (!Arrays.equals(items, reversedItems)) {
result = getShapedRecipe(size, reversedItems);
if (result != null) {
return result;
}
}
return getShapelessRecipe(items);
}
private ShapedRecipe getShapedRecipe(int size, ItemStack[] items) {
for (ShapedRecipe recipe : shapedRecipes) {
Map<Character, ItemStack> ingredients = recipe.getIngredientMap();
String[] shape = recipe.getShape();
int rows = shape.length, cols = 0;
for (String row : shape) {
if (row.length() > cols) {
cols = row.length();
}
}
if (rows == 0 || cols == 0) continue;
// outer loop: try at each possible starting position
for (int rStart = 0; rStart <= size - rows; ++rStart) {
position:
for (int cStart = 0; cStart <= size - cols; ++cStart) {
// inner loop: verify recipe against this position
for (int row = 0; row < rows; ++row) {
for (int col = 0; col < cols; ++col) {
ItemStack given = items[(rStart + row) * size + cStart + col];
char ingredientChar = shape[row].length() > col ? shape[row].charAt(col) : ' ';
ItemStack expected = ingredients.get(ingredientChar);
// check for mismatch in presence of an item in that slot at all
if (expected == null) {
if (given != null) {
continue position;
} else {
continue; // good match
}
} else if (given == null) {
continue position;
}
// check for type and data match
if (!matchesWildcard(expected, given)) {
continue position;
}
}
}
// also check that no items outside the recipe size are present
for (int row = 0; row < size; row++) {
for (int col = 0; col < size; col++) {
// if this position is outside the recipe and non-null, fail
if ((row < rStart || row >= rStart + rows || col < cStart || col >= cStart + cols) &&
items[row * size + col] != null) {
continue position;
}
}
}
// recipe matches and zero items outside the recipe part.
return recipe;
}
} // end position loop
} // end recipe loop
return null;
}
private ShapelessRecipe getShapelessRecipe(ItemStack[] items) {
recipe:
for (ShapelessRecipe recipe : shapelessRecipes) {
boolean[] accountedFor = new boolean[items.length];
// Mark empty item slots accounted for
for (int i = 0; i < items.length; ++i) {
accountedFor[i] = items[i] == null;
}
// Make sure each ingredient in the recipe exists in the inventory
ingredient:
for (ItemStack ingredient : recipe.getIngredientList()) {
for (int i = 0; i < items.length; ++i) {
// if this item is not already used and it matches this ingredient...
if (!accountedFor[i] && matchesWildcard(ingredient, items[i])) {
// ... this item is accounted for and this ingredient is found.
accountedFor[i] = true;
continue ingredient;
}
}
// no item matched this ingredient, so the recipe fails
continue recipe;
}
// Make sure inventory has no leftover items
for (int i = 0; i < items.length; ++i) {
if (!accountedFor[i]) {
continue recipe;
}
}
return recipe;
}
return null;
}
@Override
public Iterator<Recipe> iterator() {
return Iterators.concat(shapedRecipes.iterator(), shapelessRecipes.iterator(), furnaceRecipes.iterator());
}
private boolean isWildcard(short data) {
// old-style wildcards (byte -1) not supported
return data == Short.MAX_VALUE;
}
private boolean matchesWildcard(ItemStack expected, ItemStack actual) {
return expected.getType() == actual.getType() && (isWildcard(expected.getDurability()) || expected.getDurability() == actual.getDurability());
}
/**
* Get a list of all recipes for a given item. The stack size is ignored
* in comparisons. If the durability is -1, it will match any data value.
* @param result The item whose recipes you want
* @return The list of recipes
*/
public List<Recipe> getRecipesFor(ItemStack result) {
// handling for old-style wildcards
if (result.getDurability() == -1) {
result = result.clone();
result.setDurability(Short.MAX_VALUE);
}
List<Recipe> recipes = new LinkedList<>();
for (Recipe recipe : this) {
if (matchesWildcard(result, recipe.getResult())) {
recipes.add(recipe);
}
}
return recipes;
}
/**
* Clear all recipes.
*/
public void clearRecipes() {
shapedRecipes.clear();
shapelessRecipes.clear();
furnaceRecipes.clear();
furnaceFuels.clear();
}
/**
* Reset the crafting recipe lists to their default states.
*/
public void resetRecipes() {
clearRecipes();
loadRecipes();
// Smelting fuels (time is in ticks)
furnaceFuels.put(Material.COAL, 1600);
furnaceFuels.put(Material.WOOD, 300);
furnaceFuels.put(Material.SAPLING, 100);
furnaceFuels.put(Material.STICK, 100);
furnaceFuels.put(Material.FENCE, 300);
furnaceFuels.put(Material.WOOD_STAIRS, 400);
furnaceFuels.put(Material.TRAP_DOOR, 300);
furnaceFuels.put(Material.LOG, 300);
furnaceFuels.put(Material.WORKBENCH, 300);
furnaceFuels.put(Material.BOOKSHELF, 300);
furnaceFuels.put(Material.CHEST, 300);
furnaceFuels.put(Material.JUKEBOX, 300);
furnaceFuels.put(Material.NOTE_BLOCK, 300);
furnaceFuels.put(Material.LOCKED_CHEST, 300);
furnaceFuels.put(Material.LAVA_BUCKET, 20000);
}
/**
* Load default recipes from built-in recipes.yml file.
*/
@SuppressWarnings("unchecked")
private void loadRecipes() {
// Load recipes from recipes.yml file
InputStream in = getClass().getClassLoader().getResourceAsStream("builtin/recipes.yml");
if (in == null) {
GlowServer.logger.warning("Could not find default recipes on classpath");
return;
}
ConfigurationSection config = YamlConfiguration.loadConfiguration(in);
// shaped
for (Map<?, ?> data : config.getMapList("shaped")) {
ItemStack resultStack = ItemStack.deserialize((Map<String, Object>) data.get("result"));
ShapedRecipe recipe = new ShapedRecipe(resultStack);
List<String> shape = (List<String>) data.get("shape");
recipe.shape(shape.toArray(new String[shape.size()]));
Map<String, Map<String, Object>> ingreds = (Map<String, Map<String, Object>>) data.get("ingredients");
for (Map.Entry<String, Map<String, Object>> entry : ingreds.entrySet()) {
ItemStack stack = ItemStack.deserialize(entry.getValue());
recipe.setIngredient(entry.getKey().charAt(0), stack.getData());
}
shapedRecipes.add(recipe);
}
// shapeless
for (Map<?, ?> data : config.getMapList("shapeless")) {
ItemStack resultStack = ItemStack.deserialize((Map<String, Object>) data.get("result"));
ShapelessRecipe recipe = new ShapelessRecipe(resultStack);
List<Map<String, Object>> ingreds = (List<Map<String, Object>>) data.get("ingredients");
for (Map<String, Object> entry : ingreds) {
recipe.addIngredient(ItemStack.deserialize(entry).getData());
}
shapelessRecipes.add(recipe);
}
// furnace
for (Map<?, ?> data : config.getMapList("furnace")) {
ItemStack inputStack = ItemStack.deserialize((Map<String, Object>) data.get("input"));
ItemStack resultStack = ItemStack.deserialize((Map<String, Object>) data.get("result"));
furnaceRecipes.add(new FurnaceRecipe(resultStack, inputStack.getData()));
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.common.block;
import io.airlift.slice.SliceInput;
import io.airlift.slice.SliceOutput;
import org.openjdk.jol.info.ClassLayout;
import javax.annotation.Nullable;
import java.util.Arrays;
import java.util.OptionalInt;
import java.util.function.BiConsumer;
import static com.facebook.presto.common.block.BlockUtil.calculateBlockResetSize;
import static com.facebook.presto.common.block.BlockUtil.checkArrayRange;
import static com.facebook.presto.common.block.BlockUtil.checkValidRegion;
import static com.facebook.presto.common.block.BlockUtil.internalPositionInRange;
import static io.airlift.slice.SizeOf.sizeOf;
import static java.lang.Math.max;
import static java.lang.String.format;
public class ByteArrayBlockBuilder
implements BlockBuilder
{
private static final int INSTANCE_SIZE = ClassLayout.parseClass(ByteArrayBlockBuilder.class).instanceSize();
private static final Block NULL_VALUE_BLOCK = new ByteArrayBlock(0, 1, new boolean[] {true}, new byte[1]);
@Nullable
private BlockBuilderStatus blockBuilderStatus;
private boolean initialized;
private int initialEntryCount;
private int positionCount;
private boolean hasNullValue;
private boolean hasNonNullValue;
// it is assumed that these arrays are the same length
private boolean[] valueIsNull = new boolean[0];
private byte[] values = new byte[0];
private long retainedSizeInBytes;
public ByteArrayBlockBuilder(@Nullable BlockBuilderStatus blockBuilderStatus, int expectedEntries)
{
this.blockBuilderStatus = blockBuilderStatus;
this.initialEntryCount = max(expectedEntries, 1);
updateDataSize();
}
@Override
public BlockBuilder writeByte(int value)
{
if (values.length <= positionCount) {
growCapacity();
}
values[positionCount] = (byte) value;
hasNonNullValue = true;
positionCount++;
if (blockBuilderStatus != null) {
blockBuilderStatus.addBytes(Byte.BYTES + Byte.BYTES);
}
return this;
}
@Override
public BlockBuilder closeEntry()
{
return this;
}
@Override
public BlockBuilder appendNull()
{
if (values.length <= positionCount) {
growCapacity();
}
valueIsNull[positionCount] = true;
hasNullValue = true;
positionCount++;
if (blockBuilderStatus != null) {
blockBuilderStatus.addBytes(Byte.BYTES + Byte.BYTES);
}
return this;
}
@Override
public Block build()
{
if (!hasNonNullValue) {
return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, positionCount);
}
return new ByteArrayBlock(0, positionCount, hasNullValue ? valueIsNull : null, values);
}
@Override
public BlockBuilder newBlockBuilderLike(BlockBuilderStatus blockBuilderStatus)
{
return new ByteArrayBlockBuilder(blockBuilderStatus, calculateBlockResetSize(positionCount));
}
@Override
public BlockBuilder newBlockBuilderLike(BlockBuilderStatus blockBuilderStatus, int expectedEntries)
{
return new ByteArrayBlockBuilder(blockBuilderStatus, max(calculateBlockResetSize(positionCount), expectedEntries));
}
private void growCapacity()
{
int newSize;
if (initialized) {
newSize = BlockUtil.calculateNewArraySize(values.length);
}
else {
newSize = initialEntryCount;
initialized = true;
}
valueIsNull = Arrays.copyOf(valueIsNull, newSize);
values = Arrays.copyOf(values, newSize);
updateDataSize();
}
private void updateDataSize()
{
retainedSizeInBytes = INSTANCE_SIZE + sizeOf(valueIsNull) + sizeOf(values);
if (blockBuilderStatus != null) {
retainedSizeInBytes += BlockBuilderStatus.INSTANCE_SIZE;
}
}
@Override
public long getSizeInBytes()
{
return ByteArrayBlock.SIZE_IN_BYTES_PER_POSITION * (long) positionCount;
}
@Override
public long getRegionSizeInBytes(int position, int length)
{
return ByteArrayBlock.SIZE_IN_BYTES_PER_POSITION * (long) length;
}
@Override
public OptionalInt fixedSizeInBytesPerPosition()
{
return OptionalInt.of(ByteArrayBlock.SIZE_IN_BYTES_PER_POSITION);
}
@Override
public long getPositionsSizeInBytes(boolean[] usedPositions, int usedPositionCount)
{
return ByteArrayBlock.SIZE_IN_BYTES_PER_POSITION * (long) usedPositionCount;
}
@Override
public long getRetainedSizeInBytes()
{
return retainedSizeInBytes;
}
@Override
public long getEstimatedDataSizeForStats(int position)
{
return isNull(position) ? 0 : Byte.BYTES;
}
@Override
public void retainedBytesForEachPart(BiConsumer<Object, Long> consumer)
{
consumer.accept(values, sizeOf(values));
consumer.accept(valueIsNull, sizeOf(valueIsNull));
consumer.accept(this, (long) INSTANCE_SIZE);
}
@Override
public int getPositionCount()
{
return positionCount;
}
@Override
public byte getByte(int position)
{
checkReadablePosition(position);
return values[position];
}
@Override
public boolean mayHaveNull()
{
return hasNullValue;
}
@Override
public boolean isNull(int position)
{
checkReadablePosition(position);
return valueIsNull[position];
}
@Override
public void writePositionTo(int position, BlockBuilder blockBuilder)
{
checkReadablePosition(position);
blockBuilder.writeByte(values[position]);
blockBuilder.closeEntry();
}
@Override
public void writePositionTo(int position, SliceOutput output)
{
if (isNull(position)) {
output.writeByte(0);
}
else {
output.writeByte(1);
output.writeByte(values[position]);
}
}
@Override
public BlockBuilder readPositionFrom(SliceInput input)
{
boolean isNull = input.readByte() == 0;
if (isNull) {
appendNull();
}
else {
writeByte(input.readByte());
closeEntry();
}
return this;
}
@Override
public Block getSingleValueBlock(int position)
{
checkReadablePosition(position);
return new ByteArrayBlock(
0,
1,
valueIsNull[position] ? new boolean[] {true} : null,
new byte[] {values[position]});
}
@Override
public Block copyPositions(int[] positions, int offset, int length)
{
checkArrayRange(positions, offset, length);
if (!hasNonNullValue) {
return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length);
}
boolean[] newValueIsNull = null;
if (hasNullValue) {
newValueIsNull = new boolean[length];
}
byte[] newValues = new byte[length];
for (int i = 0; i < length; i++) {
int position = positions[offset + i];
checkReadablePosition(position);
if (hasNullValue) {
newValueIsNull[i] = valueIsNull[position];
}
newValues[i] = values[position];
}
return new ByteArrayBlock(0, length, newValueIsNull, newValues);
}
@Override
public Block getRegion(int positionOffset, int length)
{
checkValidRegion(getPositionCount(), positionOffset, length);
if (!hasNonNullValue) {
return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length);
}
return new ByteArrayBlock(positionOffset, length, hasNullValue ? valueIsNull : null, values);
}
@Override
public Block copyRegion(int positionOffset, int length)
{
checkValidRegion(getPositionCount(), positionOffset, length);
if (!hasNonNullValue) {
return new RunLengthEncodedBlock(NULL_VALUE_BLOCK, length);
}
boolean[] newValueIsNull = null;
if (hasNullValue) {
newValueIsNull = Arrays.copyOfRange(valueIsNull, positionOffset, positionOffset + length);
}
byte[] newValues = Arrays.copyOfRange(values, positionOffset, positionOffset + length);
return new ByteArrayBlock(0, length, newValueIsNull, newValues);
}
@Override
public String getEncodingName()
{
return ByteArrayBlockEncoding.NAME;
}
@Override
public String toString()
{
return format("ByteArrayBlockBuilder(%d){positionCount=%d}", hashCode(), getPositionCount());
}
private void checkReadablePosition(int position)
{
if (position < 0 || position >= getPositionCount()) {
throw new IllegalArgumentException("position is not valid");
}
}
@Override
public boolean isNullUnchecked(int internalPosition)
{
assert mayHaveNull() : "no nulls present";
assert internalPositionInRange(internalPosition, getOffsetBase(), getPositionCount());
return valueIsNull[internalPosition];
}
@Override
public byte getByteUnchecked(int internalPosition)
{
assert internalPositionInRange(internalPosition, getOffsetBase(), getPositionCount());
return values[internalPosition];
}
@Override
public int getOffsetBase()
{
return 0;
}
}
| |
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.connector.mysql;
import com.github.shyiko.mysql.binlog.event.deserialization.AbstractRowsEventDataDeserializer;
import com.github.shyiko.mysql.binlog.event.deserialization.json.JsonBinary;
import io.debezium.DebeziumException;
import io.debezium.annotation.Immutable;
import io.debezium.config.CommonConnectorConfig.BinaryHandlingMode;
import io.debezium.connector.mysql.antlr.MySqlAntlrDdlParser;
import io.debezium.data.Json;
import io.debezium.jdbc.JdbcValueConverters;
import io.debezium.jdbc.TemporalPrecisionMode;
import io.debezium.relational.Column;
import io.debezium.relational.Table;
import io.debezium.relational.ValueConverter;
import io.debezium.time.Year;
import io.debezium.util.Strings;
import org.apache.kafka.connect.data.Decimal;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.SchemaBuilder;
import org.apache.kafka.connect.errors.ConnectException;
import org.apache.kafka.connect.source.SourceRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.charset.Charset;
import java.nio.charset.IllegalCharsetNameException;
import java.nio.charset.StandardCharsets;
import java.sql.Timestamp;
import java.sql.Types;
import java.time.Duration;
import java.time.LocalDate;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.time.temporal.ChronoField;
import java.time.temporal.ChronoUnit;
import java.time.temporal.Temporal;
import java.time.temporal.TemporalAdjuster;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* MySQL-specific customization of the conversions from JDBC values obtained from the MySQL binlog client library.
* <p>
* This class always uses UTC for the default time zone when converting values without timezone information to values
* that require timezones. This is because MySQL {@code TIMESTAMP} values are always
* <a href="https://dev.mysql.com/doc/refman/5.7/en/datetime.html">stored in UTC</a> (unlike {@code DATETIME} values)
* and are replicated in this form. Meanwhile, the MySQL Binlog Client library will
* {@link AbstractRowsEventDataDeserializer deserialize} these as {@link java.sql.Timestamp} values that have no
* timezone and, therefore, are presumed to be in UTC. When the column is properly marked with a
* {@link Types#TIMESTAMP_WITH_TIMEZONE} type, the converters will need to convert that {@link java.sql.Timestamp}
* value into an {@link OffsetDateTime} using the default time zone, which always is UTC.
*
* @author Randall Hauch
* @see com.github.shyiko.mysql.binlog.event.deserialization.AbstractRowsEventDataDeserializer
*/
@Immutable
public class MySqlValueConverters extends JdbcValueConverters {
/**
*
*/
@FunctionalInterface
public interface ParsingErrorHandler {
void error(String message, Exception exception);
}
private static final String METHOD_GET_STATIC_JAVA_ENCODING = "getStaticJavaEncodingForMysqlCharset";
private static final String METHOD_GET_JAVA_ENCODING = "getJavaEncodingForMysqlCharset";
private static final String CLASS_CHARSET_MAPPING = "com.mysql.cj.CharsetMapping";
private static final Logger LOGGER = LoggerFactory.getLogger(MySqlValueConverters.class);
/**
* Used to parse values of TIME columns. Format: 000:00:00.000000.
*/
private static final Pattern TIME_FIELD_PATTERN = Pattern.compile("(\\-?[0-9]*):([0-9]*):([0-9]*)(\\.([0-9]*))?");
/**
* Used to parse values of DATE columns. Format: 000-00-00.
*/
private static final Pattern DATE_FIELD_PATTERN = Pattern.compile("([0-9]*)-([0-9]*)-([0-9]*)");
/**
* Used to parse values of TIMESTAMP columns. Format: 000-00-00 00:00:00.000.
*/
private static final Pattern TIMESTAMP_FIELD_PATTERN = Pattern.compile("([0-9]*)-([0-9]*)-([0-9]*) .*");
/**
* ===================== This is a diff from the original file ===========================
* It's a hacky way for us to intentionally pass in the jdbc class loader to load 'com.mysql.cj.CharsetMapping'
* class so that we can invoke the 'getJavaEncodingForMysqlCharset' static method. The usage of this class loader
* is in 'charsetFor(Column column) ' method from line 341 to 354.
*/
public static ClassLoader jdbcClassLoader;
/**
* A utility method that adjusts <a href="https://dev.mysql.com/doc/refman/5.7/en/two-digit-years.html">ambiguous</a>
* 2-digit year values of DATETIME, DATE, and TIMESTAMP types using these MySQL-specific rules:
* <ul>
* <li>Year values in the range 00-69 are converted to 2000-2069.</li>
* <li>Year values in the range 70-99 are converted to 1970-1999.</li>
* </ul>
*
* @param temporal the temporal instance to adjust; may not be null
* @return the possibly adjusted temporal instance; never null
*/
protected static Temporal adjustTemporal(Temporal temporal) {
if (temporal.isSupported(ChronoField.YEAR)) {
int year = temporal.get(ChronoField.YEAR);
if (0 <= year && year <= 69) {
temporal = temporal.plus(2000, ChronoUnit.YEARS);
} else if (70 <= year && year <= 99) {
temporal = temporal.plus(1900, ChronoUnit.YEARS);
}
}
return temporal;
}
private final ParsingErrorHandler parsingErrorHandler;
/**
* Create a new instance that always uses UTC for the default time zone when_needed converting values without
* timezone information to values that require timezones.
* <p>
*
* @param decimalMode how {@code DECIMAL} and {@code NUMERIC} values should be treated; may be null if
* {@link io.debezium.jdbc.JdbcValueConverters.DecimalMode#PRECISE} is to be used
* @param temporalPrecisionMode temporal precision mode based on {@link io.debezium.jdbc.TemporalPrecisionMode}
* @param bigIntUnsignedMode how {@code BIGINT UNSIGNED} values should be treated; may be null if
* {@link io.debezium.jdbc.JdbcValueConverters.BigIntUnsignedMode#PRECISE} is to be used
* @param binaryMode how binary columns should be represented
*/
public MySqlValueConverters(DecimalMode decimalMode, TemporalPrecisionMode temporalPrecisionMode,
BigIntUnsignedMode bigIntUnsignedMode, BinaryHandlingMode binaryMode) {
this(decimalMode, temporalPrecisionMode, bigIntUnsignedMode, binaryMode, x -> x, (message, exception) -> {
throw new DebeziumException(message, exception);
});
}
/**
* Create a new instance that always uses UTC for the default time zone when converting values without timezone
* information to values that require timezones.
* <p>
*
* @param decimalMode how {@code DECIMAL} and {@code NUMERIC} values should be treated; may be null if
* {@link io.debezium.jdbc.JdbcValueConverters.DecimalMode#PRECISE} is to be used
* @param temporalPrecisionMode temporal precision mode based on {@link io.debezium.jdbc.TemporalPrecisionMode}
* @param bigIntUnsignedMode how {@code BIGINT UNSIGNED} values should be treated; may be null if
* {@link io.debezium.jdbc.JdbcValueConverters.BigIntUnsignedMode#PRECISE} is to be used
* @param binaryMode how binary columns should be represented
* @param adjuster a temporal adjuster to make a database specific time modification before conversion
* @param parsingErrorHandler for errors during postponed binlog parsing
*/
public MySqlValueConverters(DecimalMode decimalMode, TemporalPrecisionMode temporalPrecisionMode,
BigIntUnsignedMode bigIntUnsignedMode, BinaryHandlingMode binaryMode,
TemporalAdjuster adjuster, ParsingErrorHandler parsingErrorHandler) {
super(decimalMode, temporalPrecisionMode, ZoneOffset.UTC, adjuster, bigIntUnsignedMode, binaryMode);
this.parsingErrorHandler = parsingErrorHandler;
}
@Override
protected ByteOrder byteOrderOfBitType() {
return ByteOrder.BIG_ENDIAN;
}
@Override
public SchemaBuilder schemaBuilder(Column column) {
// Handle a few MySQL-specific types based upon how they are handled by the MySQL binlog client ...
String typeName = column.typeName().toUpperCase();
if (matches(typeName, "JSON")) {
return Json.builder();
}
if (matches(typeName, "POINT")) {
return io.debezium.data.geometry.Point.builder();
}
if (matches(typeName, "GEOMETRY")
|| matches(typeName, "LINESTRING")
|| matches(typeName, "POLYGON")
|| matches(typeName, "MULTIPOINT")
|| matches(typeName, "MULTILINESTRING")
|| matches(typeName, "MULTIPOLYGON")
|| isGeometryCollection(typeName)) {
return io.debezium.data.geometry.Geometry.builder();
}
if (matches(typeName, "YEAR")) {
return Year.builder();
}
if (matches(typeName, "ENUM")) {
String commaSeperatedOptions = extractEnumAndSetOptionsAsString(column);
return io.debezium.data.Enum.builder(commaSeperatedOptions);
}
if (matches(typeName, "SET")) {
String commaSeperatedOptions = extractEnumAndSetOptionsAsString(column);
return io.debezium.data.EnumSet.builder(commaSeperatedOptions);
}
if (matches(typeName, "SMALLINT UNSIGNED") || matches(typeName, "SMALLINT UNSIGNED ZEROFILL")
|| matches(typeName, "INT2 UNSIGNED") || matches(typeName, "INT2 UNSIGNED ZEROFILL")) {
// In order to capture unsigned SMALLINT 16-bit data source, INT32 will be required to safely capture all valid
// values
// Source: https://kafka.apache.org/0102/javadoc/org/apache/kafka/connect/data/Schema.Type.html
return SchemaBuilder.int32();
}
if (matches(typeName, "INT UNSIGNED") || matches(typeName, "INT UNSIGNED ZEROFILL")
|| matches(typeName, "INT4 UNSIGNED") || matches(typeName, "INT4 UNSIGNED ZEROFILL")) {
// In order to capture unsigned INT 32-bit data source, INT64 will be required to safely capture all valid values
// Source: https://kafka.apache.org/0102/javadoc/org/apache/kafka/connect/data/Schema.Type.html
return SchemaBuilder.int64();
}
if (matches(typeName, "BIGINT UNSIGNED") || matches(typeName, "BIGINT UNSIGNED ZEROFILL")
|| matches(typeName, "INT8 UNSIGNED") || matches(typeName, "INT8 UNSIGNED ZEROFILL")) {
switch (super.bigIntUnsignedMode) {
case LONG:
return SchemaBuilder.int64();
case PRECISE:
// In order to capture unsigned INT 64-bit data source, org.apache.kafka.connect.data.Decimal:Byte will be
// required to safely capture all valid values with scale of 0
// Source: https://kafka.apache.org/0102/javadoc/org/apache/kafka/connect/data/Schema.Type.html
return Decimal.builder(0);
}
}
// Otherwise, let the base class handle it ...
return super.schemaBuilder(column);
}
@Override
public ValueConverter converter(Column column, Field fieldDefn) {
// Handle a few MySQL-specific types based upon how they are handled by the MySQL binlog client ...
String typeName = column.typeName().toUpperCase();
if (matches(typeName, "JSON")) {
return (data) -> convertJson(column, fieldDefn, data);
}
if (matches(typeName, "GEOMETRY")
|| matches(typeName, "LINESTRING")
|| matches(typeName, "POLYGON")
|| matches(typeName, "MULTIPOINT")
|| matches(typeName, "MULTILINESTRING")
|| matches(typeName, "MULTIPOLYGON")
|| isGeometryCollection(typeName)) {
return (data -> convertGeometry(column, fieldDefn, data));
}
if (matches(typeName, "POINT")) {
// backwards compatibility
return (data -> convertPoint(column, fieldDefn, data));
}
if (matches(typeName, "YEAR")) {
return (data) -> convertYearToInt(column, fieldDefn, data);
}
if (matches(typeName, "ENUM")) {
// Build up the character array based upon the column's type ...
List<String> options = extractEnumAndSetOptions(column);
return (data) -> convertEnumToString(options, column, fieldDefn, data);
}
if (matches(typeName, "SET")) {
// Build up the character array based upon the column's type ...
List<String> options = extractEnumAndSetOptions(column);
return (data) -> convertSetToString(options, column, fieldDefn, data);
}
if (matches(typeName, "TINYINT UNSIGNED") || matches(typeName, "TINYINT UNSIGNED ZEROFILL")
|| matches(typeName, "INT1 UNSIGNED") || matches(typeName, "INT1 UNSIGNED ZEROFILL")) {
// Convert TINYINT UNSIGNED internally from SIGNED to UNSIGNED based on the boundary settings
return (data) -> convertUnsignedTinyint(column, fieldDefn, data);
}
if (matches(typeName, "SMALLINT UNSIGNED") || matches(typeName, "SMALLINT UNSIGNED ZEROFILL")
|| matches(typeName, "INT2 UNSIGNED") || matches(typeName, "INT2 UNSIGNED ZEROFILL")) {
// Convert SMALLINT UNSIGNED internally from SIGNED to UNSIGNED based on the boundary settings
return (data) -> convertUnsignedSmallint(column, fieldDefn, data);
}
if (matches(typeName, "MEDIUMINT UNSIGNED") || matches(typeName, "MEDIUMINT UNSIGNED ZEROFILL")
|| matches(typeName, "INT3 UNSIGNED") || matches(typeName, "INT3 UNSIGNED ZEROFILL")
|| matches(typeName, "MIDDLEINT UNSIGNED") || matches(typeName, "MIDDLEINT UNSIGNED ZEROFILL")) {
// Convert MEDIUMINT UNSIGNED internally from SIGNED to UNSIGNED based on the boundary settings
return (data) -> convertUnsignedMediumint(column, fieldDefn, data);
}
if (matches(typeName, "INT UNSIGNED") || matches(typeName, "INT UNSIGNED ZEROFILL")
|| matches(typeName, "INT4 UNSIGNED") || matches(typeName, "INT4 UNSIGNED ZEROFILL")) {
// Convert INT UNSIGNED internally from SIGNED to UNSIGNED based on the boundary settings
return (data) -> convertUnsignedInt(column, fieldDefn, data);
}
if (matches(typeName, "BIGINT UNSIGNED") || matches(typeName, "BIGINT UNSIGNED ZEROFILL")
|| matches(typeName, "INT8 UNSIGNED") || matches(typeName, "INT8 UNSIGNED ZEROFILL")) {
switch (super.bigIntUnsignedMode) {
case LONG:
return (data) -> convertBigInt(column, fieldDefn, data);
case PRECISE:
// Convert BIGINT UNSIGNED internally from SIGNED to UNSIGNED based on the boundary settings
return (data) -> convertUnsignedBigint(column, fieldDefn, data);
}
}
// We have to convert bytes encoded in the column's character set ...
switch (column.jdbcType()) {
case Types.CHAR: // variable-length
case Types.VARCHAR: // variable-length
case Types.LONGVARCHAR: // variable-length
case Types.CLOB: // variable-length
case Types.NCHAR: // fixed-length
case Types.NVARCHAR: // fixed-length
case Types.LONGNVARCHAR: // fixed-length
case Types.NCLOB: // fixed-length
case Types.DATALINK:
case Types.SQLXML:
Charset charset = charsetFor(column);
if (charset != null) {
logger.debug("Using {} charset by default for column: {}", charset, column);
return (data) -> convertString(column, fieldDefn, charset, data);
}
logger.warn("Using UTF-8 charset by default for column without charset: {}", column);
return (data) -> convertString(column, fieldDefn, StandardCharsets.UTF_8, data);
case Types.TIME:
if (adaptiveTimeMicrosecondsPrecisionMode) {
return data -> convertDurationToMicroseconds(column, fieldDefn, data);
}
return ((ValueConverter) (data -> convertTimestampToLocalDateTime(column, fieldDefn, data)))
.and(super.converter(column, fieldDefn));
case Types.TIMESTAMP:
return ((ValueConverter) (data -> convertTimestampToLocalDateTime(column, fieldDefn, data)))
.and(super.converter(column, fieldDefn));
default:
break;
}
// Otherwise, let the base class handle it ...
return super.converter(column, fieldDefn);
}
/**
* Return the {@link Charset} instance with the MySQL-specific character set name used by the given column.
*
* @param column the column in which the character set is used; never null
* @return the Java {@link Charset}, or null if there is no mapping
*/
protected Charset charsetFor(Column column) {
String mySqlCharsetName = column.charsetName();
if (mySqlCharsetName == null) {
logger.warn("Column is missing a character set: {}", column);
return null;
}
// This is a change from the original file, we are using the jdbcClassLoader to invoke the static
// getJavaEncodingForMysqlCharset or getStaticJavaEncodingForMysqlCharset (since mysql connector 8.0.26) method
// Following line is from original file. Instead of getting encoding from CharsetMapping we load the class
// using jdbcClassLoader.
// String encoding = CharsetMapping.getJavaEncodingForMysqlCharset(mySqlCharsetName);
String encoding;
Class<?> charsetMappingClass = null;
try {
charsetMappingClass = jdbcClassLoader.loadClass(CLASS_CHARSET_MAPPING);
} catch (ClassNotFoundException e) {
throw new RuntimeException(String.format("Failed to load class %s: %s", CLASS_CHARSET_MAPPING,
e.getMessage()), e);
}
Method getCharsetMethod = null;
try {
getCharsetMethod = charsetMappingClass.getMethod(METHOD_GET_JAVA_ENCODING, String.class);
} catch (NoSuchMethodException e) {
try {
getCharsetMethod = charsetMappingClass.getDeclaredMethod(METHOD_GET_STATIC_JAVA_ENCODING, String.class);
getCharsetMethod.setAccessible(true);
} catch (NoSuchMethodException noSuchMethodException) {
throw new RuntimeException(String.format("Failed to find method %s or %s for class %s",
METHOD_GET_JAVA_ENCODING, METHOD_GET_STATIC_JAVA_ENCODING,
CLASS_CHARSET_MAPPING));
}
}
try {
encoding = (String) getCharsetMethod.invoke(null, mySqlCharsetName);
} catch (Exception e) {
throw new RuntimeException(String.format("Error while using class loader to invoke '%s.%s' static method",
CLASS_CHARSET_MAPPING, getCharsetMethod.getName()), e);
}
// end change from original file
if (encoding == null) {
logger.warn("Column uses MySQL character set '{}', which has no mapping to a Java character set",
mySqlCharsetName);
} else {
try {
return Charset.forName(encoding);
} catch (IllegalCharsetNameException e) {
logger.error("Unable to load Java charset '{}' for column with MySQL character set '{}'", encoding,
mySqlCharsetName);
}
}
return null;
}
/**
* Convert the {@link String} {@code byte[]} value to a string value used in a {@link SourceRecord}.
*
* @param column the column in which the value appears
* @param fieldDefn the field definition for the {@link SourceRecord}'s {@link Schema}; never null
* @param data the data; may be null
* @return the converted value, or null if the conversion could not be made and the column allows nulls
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
protected Object convertJson(Column column, Field fieldDefn, Object data) {
return convertValue(column, fieldDefn, data, "{}", (r) -> {
if (data instanceof byte[]) {
// The BinlogReader sees these JSON values as binary encoded, so we use the binlog client library's utility
// to parse MySQL's internal binary representation into a JSON string, using the standard formatter.
if (((byte[]) data).length == 0) {
r.deliver(column.isOptional() ? null : "{}");
} else {
try {
r.deliver(JsonBinary.parseAsString((byte[]) data));
} catch (IOException e) {
parsingErrorHandler.error("Failed to parse and read a JSON value on '" + column + "' value " +
Arrays.toString((byte[]) data), e);
r.deliver(column.isOptional() ? null : "{}");
}
}
} else if (data instanceof String) {
// The SnapshotReader sees JSON values as UTF-8 encoded strings.
r.deliver(data);
}
});
}
/**
* Convert the {@link String} or {@code byte[]} value to a string value used in a {@link SourceRecord}.
*
* @param column the column in which the value appears
* @param fieldDefn the field definition for the {@link SourceRecord}'s {@link Schema}; never null
* @param columnCharset the Java character set in which column byte[] values are encoded; may not be null
* @param data the data; may be null
* @return the converted value, or null if the conversion could not be made and the column allows nulls
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
protected Object convertString(Column column, Field fieldDefn, Charset columnCharset, Object data) {
return convertValue(column, fieldDefn, data, "", (r) -> {
if (data instanceof byte[]) {
// Decode the binary representation using the given character encoding ...
r.deliver(new String((byte[]) data, columnCharset));
} else if (data instanceof String) {
r.deliver(data);
}
});
}
/**
* Converts a value object for a MySQL {@code YEAR}, which appear in the binlog as an integer though returns from
* the MySQL JDBC driver as either a short or a {@link java.sql.Date}.
*
* @param column the column definition describing the {@code data} value; never null
* @param fieldDefn the field definition; never null
* @param data the data object to be converted into a year literal integer value; never null
* @return the converted value, or null if the conversion could not be made and the column allows nulls
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
@SuppressWarnings("deprecation")
protected Object convertYearToInt(Column column, Field fieldDefn, Object data) {
return convertValue(column, fieldDefn, data, 0, (r) -> {
Object mutData = data;
if (data instanceof java.time.Year) {
// The MySQL binlog always returns a Year object ...
r.deliver(adjustTemporal(java.time.Year.of(((java.time.Year) data).getValue())).get(ChronoField.YEAR));
} else if (data instanceof java.sql.Date) {
// MySQL JDBC driver sometimes returns a Java SQL Date object ...
// year from java.sql.Date is defined as number of years since 1900
r.deliver(((java.sql.Date) data).getYear() + 1900);
} else if (data instanceof String) {
mutData = Integer.valueOf((String) data);
}
if (mutData instanceof Number) {
// MySQL JDBC driver sometimes returns a short ...
r.deliver(adjustTemporal(java.time.Year.of(((Number) mutData).intValue())).get(ChronoField.YEAR));
}
});
}
/**
* Converts a value object for a MySQL {@code ENUM}, which is represented in the binlog events as an integer value
* containing the index of the enum option. The MySQL JDBC driver returns a string containing the option,
* so this method calculates the same.
*
* @param options the characters that appear in the same order as defined in the column; may not be null
* @param column the column definition describing the {@code data} value; never null
* @param fieldDefn the field definition; never null
* @param data the data object to be converted into an {@code ENUM} literal String value
* @return the converted value, or null if the conversion could not be made and the column allows nulls
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
protected Object convertEnumToString(List<String> options, Column column, Field fieldDefn, Object data) {
return convertValue(column, fieldDefn, data, "", (r) -> {
if (data instanceof String) {
// JDBC should return strings ...
r.deliver(data);
} else if (data instanceof Integer) {
if (options != null) {
// The binlog will contain an int with the 1-based index of the option in the enum value ...
int value = ((Integer) data).intValue();
if (value == 0) {
// an invalid value was specified, which corresponds to the empty string '' and an index of 0
r.deliver("");
}
int index = value - 1; // 'options' is 0-based
if (index < options.size() && index >= 0) {
r.deliver(options.get(index));
}
} else {
r.deliver(null);
}
}
});
}
/**
* Converts a value object for a MySQL {@code SET}, which is represented in the binlog events contain a long number
* in which every bit corresponds to a different option. The MySQL JDBC driver returns a string containing the
* comma-separated options, so this method calculates the same.
*
* @param options the characters that appear in the same order as defined in the column; may not be null
* @param column the column definition describing the {@code data} value; never null
* @param fieldDefn the field definition; never null
* @param data the data object to be converted into an {@code SET} literal String value; never null
* @return the converted value, or null if the conversion could not be made and the column allows nulls
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
protected Object convertSetToString(List<String> options, Column column, Field fieldDefn, Object data) {
return convertValue(column, fieldDefn, data, "", (r) -> {
if (data instanceof String) {
// JDBC should return strings ...
r.deliver(data);
} else if (data instanceof Long) {
// The binlog will contain a long with the indexes of the options in the set value ...
long indexes = ((Long) data).longValue();
r.deliver(convertSetValue(column, indexes, options));
}
});
}
/**
* Determine if the uppercase form of a column's type exactly matches or begins with the specified prefix.
* Note that this logic works when the column's {@link Column#typeName() type} contains the type name followed by
* parentheses.
*
* @param upperCaseTypeName the upper case form of the column's {@link Column#typeName() type name}
* @param upperCaseMatch the upper case form of the expected type or prefix of the type; may not be null
* @return {@code true} if the type matches the specified type, or {@code false} otherwise
*/
protected boolean matches(String upperCaseTypeName, String upperCaseMatch) {
if (upperCaseTypeName == null) {
return false;
}
return upperCaseMatch.equals(upperCaseTypeName) || upperCaseTypeName.startsWith(upperCaseMatch + "(");
}
/**
* Determine if the uppercase form of a column's type is geometry collection independent of JDBC driver or server
* version.
*
* @param upperCaseTypeName the upper case form of the column's {@link Column#typeName() type name}
* @return {@code true} if the type is geometry collection
*/
protected boolean isGeometryCollection(String upperCaseTypeName) {
if (upperCaseTypeName == null) {
return false;
}
return upperCaseTypeName.equals("GEOMETRYCOLLECTION") || upperCaseTypeName.equals("GEOMCOLLECTION")
|| upperCaseTypeName.endsWith(".GEOMCOLLECTION");
}
protected List<String> extractEnumAndSetOptions(Column column) {
return MySqlAntlrDdlParser.extractEnumAndSetOptions(column.enumValues());
}
protected String extractEnumAndSetOptionsAsString(Column column) {
return Strings.join(",", extractEnumAndSetOptions(column));
}
protected String convertSetValue(Column column, long indexes, List<String> options) {
StringBuilder sb = new StringBuilder();
int index = 0;
boolean first = true;
int optionLen = options.size();
while (indexes != 0L) {
if (indexes % 2L != 0) {
if (first) {
first = false;
} else {
sb.append(',');
}
if (index < optionLen) {
sb.append(options.get(index));
} else {
logger.warn("Found unexpected index '{}' on column {}", index, column);
}
}
++index;
indexes = indexes >>> 1;
}
return sb.toString();
}
/**
* Convert the a value representing a POINT {@code byte[]} value to a Point value used in a {@link SourceRecord}.
*
* @param column the column in which the value appears
* @param fieldDefn the field definition for the {@link SourceRecord}'s {@link Schema}; never null
* @param data the data; may be null
* @return the converted value, or null if the conversion could not be made and the column allows nulls
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
protected Object convertPoint(Column column, Field fieldDefn, Object data) {
final MySqlGeometry empty = MySqlGeometry.createEmpty();
return convertValue(column, fieldDefn, data,
io.debezium.data.geometry.Geometry.createValue(fieldDefn.schema(), empty.getWkb(),
empty.getSrid()), (r) -> {
if (data instanceof byte[]) {
// The binlog utility sends a byte array for any Geometry type, we will use our own binaryParse to parse the
// byte to WKB, hence to the suitable class
MySqlGeometry mySqlGeometry = MySqlGeometry.fromBytes((byte[]) data);
if (mySqlGeometry.isPoint()) {
r.deliver(io.debezium.data.geometry.Point.createValue(fieldDefn.schema(), mySqlGeometry.getWkb(),
mySqlGeometry.getSrid()));
} else {
throw new ConnectException("Failed to parse and read a value of type POINT on " + column);
}
}
});
}
/**
* Convert the a value representing a GEOMETRY {@code byte[]} value to a Geometry value used in a
* {@link SourceRecord}.
*
* @param column the column in which the value appears
* @param fieldDefn the field definition for the {@link SourceRecord}'s {@link Schema}; never null
* @param data the data; may be null
* @return the converted value, or null if the conversion could not be made and the column allows nulls
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
protected Object convertGeometry(Column column, Field fieldDefn, Object data) {
final MySqlGeometry empty = MySqlGeometry.createEmpty();
return convertValue(column, fieldDefn, data,
io.debezium.data.geometry.Geometry.createValue(fieldDefn.schema(), empty.getWkb(),
empty.getSrid()), (r) -> {
if (data instanceof byte[]) {
// The binlog utility sends a byte array for any Geometry type, we will use our own binaryParse to parse the
// byte to WKB, hence to the suitable class
if (data instanceof byte[]) {
// The binlog utility sends a byte array for any Geometry type, we will use our own binaryParse to parse the
// byte to WKB, hence to the suitable class
MySqlGeometry mySqlGeometry = MySqlGeometry.fromBytes((byte[]) data);
r.deliver(io.debezium.data.geometry.Geometry.createValue(fieldDefn.schema(), mySqlGeometry.getWkb(),
mySqlGeometry.getSrid()));
}
}
});
}
@Override
protected ByteBuffer toByteBuffer(Column column, byte[] data) {
// DBZ-254 right-pad fixed-length binary column values with 0x00 (zero byte)
if (column.jdbcType() == Types.BINARY && data.length < column.length()) {
data = Arrays.copyOf(data, column.length());
}
return super.toByteBuffer(column, data);
}
/**
* Convert the a value representing a Unsigned TINYINT value to the correct Unsigned TINYINT representation.
*
* @param column the column in which the value appears
* @param fieldDefn the field definition for the {@link SourceRecord}'s {@link Schema}; never null
* @param data the data; may be null
*
* @return the converted value, or null if the conversion could not be made and the column allows nulls
*
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
protected Object convertUnsignedTinyint(Column column, Field fieldDefn, Object data) {
return convertValue(column, fieldDefn, data, (short) 0, (r) -> {
if (data instanceof Short) {
r.deliver(MySqlUnsignedIntegerConverter.convertUnsignedTinyint((short) data));
} else if (data instanceof Number) {
r.deliver(MySqlUnsignedIntegerConverter.convertUnsignedTinyint(((Number) data).shortValue()));
} else {
// We continue with the original converting method (smallint) since we have an unsigned Tinyint
r.deliver(convertSmallInt(column, fieldDefn, data));
}
});
}
/**
* Convert the a value representing a Unsigned SMALLINT value to the correct Unsigned SMALLINT representation.
*
* @param column the column in which the value appears
* @param fieldDefn the field definition for the {@link SourceRecord}'s {@link Schema}; never null
* @param data the data; may be null
*
* @return the converted value, or null if the conversion could not be made and the column allows nulls
*
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
protected Object convertUnsignedSmallint(Column column, Field fieldDefn, Object data) {
return convertValue(column, fieldDefn, data, 0, (r) -> {
if (data instanceof Integer) {
r.deliver(MySqlUnsignedIntegerConverter.convertUnsignedSmallint((int) data));
} else if (data instanceof Number) {
r.deliver(MySqlUnsignedIntegerConverter.convertUnsignedSmallint(((Number) data).intValue()));
} else {
// We continue with the original converting method (integer) since we have an unsigned Smallint
r.deliver(convertInteger(column, fieldDefn, data));
}
});
}
/**
* Convert the a value representing a Unsigned MEDIUMINT value to the correct Unsigned SMALLINT representation.
*
* @param column the column in which the value appears
* @param fieldDefn the field definition for the {@link SourceRecord}'s {@link Schema}; never null
* @param data the data; may be null
*
* @return the converted value, or null if the conversion could not be made and the column allows nulls
*
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
protected Object convertUnsignedMediumint(Column column, Field fieldDefn, Object data) {
return convertValue(column, fieldDefn, data, 0, (r) -> {
if (data instanceof Integer) {
r.deliver(MySqlUnsignedIntegerConverter.convertUnsignedMediumint((int) data));
} else if (data instanceof Number) {
r.deliver(MySqlUnsignedIntegerConverter.convertUnsignedMediumint(((Number) data).intValue()));
} else {
// We continue with the original converting method (integer) since we have an unsigned Medium
r.deliver(convertInteger(column, fieldDefn, data));
}
});
}
/**
* Convert the a value representing a Unsigned INT value to the correct Unsigned INT representation.
*
* @param column the column in which the value appears
* @param fieldDefn the field definition for the {@link SourceRecord}'s {@link Schema}; never null
* @param data the data; may be null
*
* @return the converted value, or null if the conversion could not be made and the column allows nulls
*
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
protected Object convertUnsignedInt(Column column, Field fieldDefn, Object data) {
return convertValue(column, fieldDefn, data, 0L, (r) -> {
if (data instanceof Long) {
r.deliver(MySqlUnsignedIntegerConverter.convertUnsignedInteger((long) data));
} else if (data instanceof Number) {
r.deliver(MySqlUnsignedIntegerConverter.convertUnsignedInteger(((Number) data).longValue()));
} else {
// We continue with the original converting method (bigint) since we have an unsigned Integer
r.deliver(convertBigInt(column, fieldDefn, data));
}
});
}
/**
* Convert the a value representing a Unsigned BIGINT value to the correct Unsigned INT representation.
*
* @param column the column in which the value appears
* @param fieldDefn the field definition for the {@link SourceRecord}'s {@link Schema}; never null
* @param data the data; may be null
*
* @return the converted value, or null if the conversion could not be made and the column allows nulls
*
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
protected Object convertUnsignedBigint(Column column, Field fieldDefn, Object data) {
return convertValue(column, fieldDefn, data, 0L, (r) -> {
if (data instanceof BigDecimal) {
r.deliver(MySqlUnsignedIntegerConverter.convertUnsignedBigint((BigDecimal) data));
} else if (data instanceof Number) {
r.deliver(MySqlUnsignedIntegerConverter.convertUnsignedBigint(new BigDecimal(((Number) data).toString())));
} else if (data instanceof String) {
r.deliver(MySqlUnsignedIntegerConverter.convertUnsignedBigint(new BigDecimal((String) data)));
} else {
r.deliver(convertNumeric(column, fieldDefn, data));
}
});
}
/**
* Converts a value object for an expected type of {@link java.time.Duration} to {@link Long} values that represents
* the time in microseconds.
* <p>
* Per the JDBC specification, databases should return {@link java.sql.Time} instances, but that's not working
* because it can only handle Daytime 00:00:00-23:59:59. We use {@link java.time.Duration} instead that can handle
* the range of -838:59:59.000000 to 838:59:59.000000 of a MySQL TIME type and transfer data as signed INT64 which
* reflects the DB value converted to microseconds.
*
* @param column the column definition describing the {@code data} value; never null
* @param fieldDefn the field definition; never null
* @param data the data object to be converted into a {@link java.time.Duration} type; never null
* @return the converted value, or null if the conversion could not be made and the column allows nulls
* @throws IllegalArgumentException if the value could not be converted but the column does not allow nulls
*/
protected Object convertDurationToMicroseconds(Column column, Field fieldDefn, Object data) {
return convertValue(column, fieldDefn, data, 0L, (r) -> {
try {
if (data instanceof Duration) {
r.deliver(((Duration) data).toNanos() / 1_000);
}
} catch (IllegalArgumentException e) {
}
});
}
protected Object convertTimestampToLocalDateTime(Column column, Field fieldDefn, Object data) {
if (data == null && !fieldDefn.schema().isOptional()) {
return null;
}
if (!(data instanceof Timestamp)) {
return data;
}
return ((Timestamp) data).toLocalDateTime();
}
public static Duration stringToDuration(String timeString) {
Matcher matcher = TIME_FIELD_PATTERN.matcher(timeString);
if (!matcher.matches()) {
throw new RuntimeException("Unexpected format for TIME column: " + timeString);
}
long hours = Long.parseLong(matcher.group(1));
long minutes = Long.parseLong(matcher.group(2));
long seconds = Long.parseLong(matcher.group(3));
long nanoSeconds = 0;
String microSecondsString = matcher.group(5);
if (microSecondsString != null) {
nanoSeconds = Long.parseLong(Strings.justifyLeft(microSecondsString, 9, '0'));
}
if (hours >= 0) {
return Duration.ofHours(hours)
.plusMinutes(minutes)
.plusSeconds(seconds)
.plusNanos(nanoSeconds);
} else {
return Duration.ofHours(hours)
.minusMinutes(minutes)
.minusSeconds(seconds)
.minusNanos(nanoSeconds);
}
}
public static LocalDate stringToLocalDate(String dateString, Column column, Table table) {
final Matcher matcher = DATE_FIELD_PATTERN.matcher(dateString);
if (!matcher.matches()) {
throw new RuntimeException("Unexpected format for DATE column: " + dateString);
}
final int year = Integer.parseInt(matcher.group(1));
final int month = Integer.parseInt(matcher.group(2));
final int day = Integer.parseInt(matcher.group(3));
if (year == 0 || month == 0 || day == 0) {
LOGGER.warn("Invalid value '{}' stored in column '{}' of table '{}' converted to empty value", dateString,
column.name(), table.id());
return null;
}
return LocalDate.of(year, month, day);
}
public static boolean containsZeroValuesInDatePart(String timestampString, Column column, Table table) {
final Matcher matcher = TIMESTAMP_FIELD_PATTERN.matcher(timestampString);
if (!matcher.matches()) {
throw new RuntimeException("Unexpected format for DATE column: " + timestampString);
}
final int year = Integer.parseInt(matcher.group(1));
final int month = Integer.parseInt(matcher.group(2));
final int day = Integer.parseInt(matcher.group(3));
if (year == 0 || month == 0 || day == 0) {
LOGGER.warn("Invalid value '{}' stored in column '{}' of table '{}' converted to empty value", timestampString,
column.name(), table.id());
return true;
}
return false;
}
}
| |
package com.shaubert.contacts;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.content.Context;
import android.database.ContentObserver;
import android.database.Cursor;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Handler;
import android.os.SystemClock;
import android.provider.ContactsContract;
import android.text.TextUtils;
import android.util.Log;
import com.google.i18n.phonenumbers.PhoneNumberUtil;
import com.google.i18n.phonenumbers.Phonenumber;
import java.util.*;
public class ContactsHelper {
public static final String TAG = ContactsHelper.class.getSimpleName();
public static boolean LOGGING = true;
public enum State {
NOT_INITIALIZED,
INITIALIZING,
INITIALIZED,
ERROR
}
public static final int MAX_FREQUENT_CONTACTS_DEFAULT_VALUE = 10;
private static final String[] CONTACTS_PROJECTION = {
ContactsContract.Contacts._ID,
ContactsContract.Contacts.DISPLAY_NAME_PRIMARY,
ContactsContract.Contacts.PHOTO_ID,
ContactsContract.Contacts.STARRED,
ContactsContract.Contacts.TIMES_CONTACTED,
};
private static final String CONTACTS_WHERE =
"ifnull(" + ContactsContract.Contacts.HAS_PHONE_NUMBER + ", 0) == 1";
private static final String[] PHONES_PROJECTION = {
ContactsContract.CommonDataKinds.Phone.CONTACT_ID,
ContactsContract.CommonDataKinds.Phone.NUMBER,
ContactsContract.CommonDataKinds.Phone.IS_SUPER_PRIMARY
};
private static ContactsHelper instance;
public static ContactsHelper get(Context context) {
if (instance == null) {
instance = new ContactsHelper(context.getApplicationContext());
}
return instance;
}
private Context appContext;
private ContactsStateListener stateListener;
private ExceptionCallback exceptionCallback;
private int maxFrequentContacts = MAX_FREQUENT_CONTACTS_DEFAULT_VALUE;
private Handler handler = new Handler();
private ContentObserver contactsObserver = new ContentObserver(handler) {
@Override
public void onChange(boolean selfChange) {
if (!selfChange) {
rebuildCache();
}
}
@Override
public void onChange(boolean selfChange, Uri uri) {
if (!selfChange) {
rebuildCache();
}
}
};
private Map<String, Contact> contactMap = new HashMap<String, Contact>();
private Map<String, Contact> phoneMap = new HashMap<String, Contact>();
private AsyncTask<Void, Void, Boolean> asyncTask;
private State state = State.NOT_INITIALIZED;
private ContactsHelper(Context appContext) {
this.appContext = appContext;
exceptionCallback = new LoggingExceptionsCallback();
ContentResolver contentResolver = appContext.getContentResolver();
contentResolver.registerContentObserver(ContactsContract.Contacts.CONTENT_URI, true, contactsObserver);
rebuildCache();
}
public void setStateListener(ContactsStateListener stateListener) {
this.stateListener = stateListener;
}
public void setExceptionCallback(ExceptionCallback exceptionCallback) {
this.exceptionCallback = exceptionCallback;
}
public void setMaxFrequentContactsCount(int maxFrequentContacts) {
this.maxFrequentContacts = maxFrequentContacts;
}
public boolean isInitialized() {
return state == State.INITIALIZED;
}
public State getState() {
return state;
}
protected void setState(State state, boolean hasChanges) {
this.state = state;
if (stateListener != null) {
stateListener.onContactsStateChanges(state, hasChanges);
}
}
private void logException(String message, Exception ex) {
if (exceptionCallback != null) {
exceptionCallback.logException(message, ex);
}
}
public Set<String> getAllPhones() {
Set<String> result = new HashSet<String>();
for (Contact contact : contactMap.values()) {
result.addAll(contact.getPhones());
}
return result;
}
public List<Contact> getAllContacts() {
return new ArrayList<>(contactMap.values());
}
public void rebuildCache() {
if (asyncTask != null) {
asyncTask.cancel(true);
}
asyncTask = new AsyncTask<Void, Void, Boolean>() {
private Map<String, Contact> resultMap = new HashMap<String, Contact>();
@Override
protected void onPreExecute() {
if (state == State.NOT_INITIALIZED
|| state == State.ERROR) {
setState(State.INITIALIZING, false);
}
}
@Override
protected Boolean doInBackground(Void... params) {
return buildCache(resultMap);
}
@Override
protected void onPostExecute(Boolean result) {
if (Boolean.TRUE.equals(result)) {
setContacts(resultMap);
} else {
setState(State.ERROR, false);
}
}
};
asyncTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}
private Map<String, Contact> buildPhoneMap(Map<String, Contact> contactMap) {
Map<String, Contact> phoneMap = new HashMap<>();
for (Contact contact : contactMap.values()) {
for (String phone : contact.getPhones()) {
Contact existingMapping = phoneMap.get(phone);
if (existingMapping == null) {
phoneMap.put(phone, contact);
} else {
if (!existingMapping.hasNames() && contact.hasNames()) {
existingMapping.setName(contact.getName(NameKind.FULL_NAME));
phoneMap.put(phone, contact);
continue;
}
if (existingMapping.getImageUri() == null
&& contact.getImageUri() != null) {
phoneMap.put(phone, contact);
continue;
}
}
}
}
return phoneMap;
}
private void setContacts(Map<String, Contact> resultMap) {
Map<String, Contact> resultPhoneMap = buildPhoneMap(resultMap);
boolean hasChanges = false;
for (Contact contact : resultMap.values()) {
if (!hasChanges) {
Contact oldMapping = contactMap.get(contact.getId());
if (oldMapping != null) {
if (oldMapping.hashCode() != contact.hashCode()) {
hasChanges = true;
}
} else {
hasChanges = true;
}
}
}
contactMap = resultMap;
phoneMap = resultPhoneMap;
setState(State.INITIALIZED, hasChanges);
}
public Contact getContactByPhone(String phone) {
if (TextUtils.isEmpty(phone)) {
return null;
}
if (state == State.NOT_INITIALIZED) {
rebuildCache();
return null;
}
Contact contact = phoneMap.get(phone);
if (contact != null) {
return contact;
}
Phonenumber.PhoneNumber phoneNumber = Phones.parseInternationalPhone(phone);
if (phoneNumber != null) {
PhoneNumberUtil phoneNumberUtil = PhoneNumberUtil.getInstance();
String formattedPhone = phoneNumberUtil.format(phoneNumber, PhoneNumberUtil.PhoneNumberFormat.E164);
contact = phoneMap.get(formattedPhone);
if (contact != null) {
return contact;
}
formattedPhone = PhoneNumberUtil.normalizeDigitsOnly(
phoneNumberUtil.format(phoneNumber, PhoneNumberUtil.PhoneNumberFormat.NATIONAL));
formattedPhone = Phones.leaveOnlyDigits(formattedPhone);
contact = phoneMap.get(formattedPhone);
if (contact != null) {
return contact;
}
}
return null;
}
public Contact refreshContact(Contact contact) {
Map<String, Contact> resultMap = new HashMap<>(1);
ContentResolver resolver = appContext.getContentResolver();
Cursor cursor = null;
try {
cursor = resolver.query(getContactUri(contact.getId()), CONTACTS_PROJECTION, null, null, null);
if (populateContactsMap(cursor, resultMap) && !resultMap.isEmpty()) {
for (String phone : contact.getPhones()) {
phoneMap.remove(phone);
}
contactMap.remove(contact.getId());
Contact updContact = resultMap.values().iterator().next();
contactMap.put(updContact.getId(), updContact);
for (String phone : updContact.getPhones()) {
phoneMap.put(phone, updContact);
}
return updContact;
}
} catch (Exception e) {
logException("failed to get contact", e);
} finally {
if (cursor != null) {
cursor.close();
}
}
return contact;
}
private boolean buildCache(Map<String, Contact> resultMap) {
resultMap.clear();
ContentResolver resolver = appContext.getContentResolver();
Cursor cursor = null;
try {
cursor = resolver.query(ContactsContract.Contacts.CONTENT_URI, CONTACTS_PROJECTION, CONTACTS_WHERE, null, null);
return populateContactsMap(cursor, resultMap);
} catch (Exception e) {
logException("failed to get contacts", e);
} finally {
if (cursor != null) {
cursor.close();
}
}
return false;
}
private boolean populateContactsMap(Cursor contactsCursor, Map<String, Contact> resultMap) {
if (contactsCursor != null && contactsCursor.moveToFirst()) {
long startTime = SystemClock.elapsedRealtime();
ContentResolver resolver = appContext.getContentResolver();
int idColumn = contactsCursor.getColumnIndexOrThrow(ContactsContract.Contacts._ID);
int nameColumn = contactsCursor.getColumnIndexOrThrow(ContactsContract.Contacts.DISPLAY_NAME_PRIMARY);
int starredColumn = contactsCursor.getColumnIndexOrThrow(ContactsContract.Contacts.STARRED);
int timesContactedColumn = contactsCursor.getColumnIndexOrThrow(ContactsContract.Contacts.TIMES_CONTACTED);
int photoIdColumn = contactsCursor.getColumnIndexOrThrow(ContactsContract.Contacts.PHOTO_ID);
do {
if (Thread.currentThread().isInterrupted()) {
return false;
}
String name = contactsCursor.getString(nameColumn);
if (TextUtils.isEmpty(name)) {
continue;
}
String id = contactsCursor.getString(idColumn);
Contact contact = new Contact();
contact.setId(id);
contact.setName(name);
if (contactsCursor.getInt(photoIdColumn) > 0) {
Uri contactUri = getContactUri(id);
contact.setImageUri(contactUri);
}
contact.setStarred(contactsCursor.getInt(starredColumn) > 0);
contact.setTimesContacted(contactsCursor.getInt(timesContactedColumn));
resultMap.put(id, contact);
} while (contactsCursor.moveToNext());
if (Thread.currentThread().isInterrupted()) {
return false;
}
Cursor phoneCursor = null;
try {
phoneCursor = resolver.query(ContactsContract.CommonDataKinds.Phone.CONTENT_URI,
PHONES_PROJECTION,
ContactsContract.CommonDataKinds.Phone.CONTACT_ID
+ " IN (" + TextUtils.join(",", resultMap.keySet()) + ")",
null,
null);
if (phoneCursor != null && phoneCursor.moveToFirst()) {
int contactIdIndex = phoneCursor.getColumnIndexOrThrow(ContactsContract.CommonDataKinds.Phone.CONTACT_ID);
int phoneIndex = phoneCursor.getColumnIndexOrThrow(ContactsContract.CommonDataKinds.Phone.NUMBER);
int superPrimaryIndex = phoneCursor.getColumnIndexOrThrow(ContactsContract.CommonDataKinds.Phone.IS_SUPER_PRIMARY);
do {
if (Thread.currentThread().isInterrupted()) {
return false;
}
String contactId = phoneCursor.getString(contactIdIndex);
Contact contact = resultMap.get(contactId);
if (contact == null) {
continue;
}
String contactNumber = phoneCursor.getString(phoneIndex);
if (!TextUtils.isEmpty(contactNumber)) {
String number = prepareContactNumber(contactNumber);
if (isNumberValid(number)) {
contact.addPhone(number);
if (phoneCursor.getInt(superPrimaryIndex) > 0) {
contact.setDefaultPhone(number);
}
}
}
} while (phoneCursor.moveToNext());
}
} catch (Exception e) {
logException("failed to get phone numbers from contacts", e);
} finally {
if (phoneCursor != null) {
phoneCursor.close();
}
}
for (Iterator<Map.Entry<String, Contact>> iterator = resultMap.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Contact> entry = iterator.next();
if (entry.getValue().getPhones().isEmpty()) {
iterator.remove();
}
}
if (LOGGING) {
Log.d(TAG, "fetch contacts time = " + (SystemClock.elapsedRealtime() - startTime) + "ms");
}
markFrequentContacts(resultMap);
return true;
} else {
return contactsCursor != null;
}
}
private void markFrequentContacts(Map<String, Contact> resultMap) {
List<Contact> contacts = new ArrayList<>(resultMap.values());
Collections.sort(contacts, new ContactTimesComparator());
int count = Math.min(contacts.size(), maxFrequentContacts);
for (int i = 0; i < count; i++) {
Contact contact = contacts.get(i);
if (contact.getTimesContacted() > 0) {
contact.setFrequent(true);
}
}
}
private Uri getContactUri(String id) {
return ContentUris.withAppendedId(ContactsContract.Contacts.CONTENT_URI, Long.parseLong(id));
}
private boolean isNumberValid(String number) {
if (TextUtils.isEmpty(number)) {
return false;
}
return true;
}
public boolean hasImage(Uri contactUri, ContentResolver resolver) {
Uri photoUri = Uri.withAppendedPath(contactUri, ContactsContract.Contacts.Photo.CONTENT_DIRECTORY);
Cursor cursor = null;
try {
cursor = resolver. query(photoUri,
new String[] {
ContactsContract.CommonDataKinds.Photo.PHOTO
}, null, null, null);
if (cursor == null || !cursor.moveToFirst()) {
return false;
}
return !cursor.isNull(0);
} catch (Exception ex) {
logException("failed to check if contact has image", ex);
return false;
} finally {
if (cursor != null) {
cursor.close();
}
}
}
private String prepareContactNumber(String phone) {
if (TextUtils.isEmpty(phone)) return phone;
int ch = phone.charAt(0);
int digit = Character.digit(ch, 10);
if (digit != -1 || ch == '+') {
return Phones.leaveOnlyDigitsAndPlus(phone);
} else {
return null;
}
}
}
| |
/**
* generated by Xtext 2.12.0
*/
package org.etl.sparrow.impl;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
import org.etl.sparrow.Action;
import org.etl.sparrow.SparrowPackage;
import org.etl.sparrow.Try;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Try</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link org.etl.sparrow.impl.TryImpl#getName <em>Name</em>}</li>
* <li>{@link org.etl.sparrow.impl.TryImpl#getAction <em>Action</em>}</li>
* </ul>
*
* @generated
*/
public class TryImpl extends MinimalEObjectImpl.Container implements Try
{
/**
* The default value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected static final String NAME_EDEFAULT = null;
/**
* The cached value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected String name = NAME_EDEFAULT;
/**
* The cached value of the '{@link #getAction() <em>Action</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getAction()
* @generated
* @ordered
*/
protected EList<Action> action;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected TryImpl()
{
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass()
{
return SparrowPackage.Literals.TRY;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getName()
{
return name;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setName(String newName)
{
String oldName = name;
name = newName;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, SparrowPackage.TRY__NAME, oldName, name));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<Action> getAction()
{
if (action == null)
{
action = new EObjectContainmentEList<Action>(Action.class, this, SparrowPackage.TRY__ACTION);
}
return action;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs)
{
switch (featureID)
{
case SparrowPackage.TRY__ACTION:
return ((InternalEList<?>)getAction()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType)
{
switch (featureID)
{
case SparrowPackage.TRY__NAME:
return getName();
case SparrowPackage.TRY__ACTION:
return getAction();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue)
{
switch (featureID)
{
case SparrowPackage.TRY__NAME:
setName((String)newValue);
return;
case SparrowPackage.TRY__ACTION:
getAction().clear();
getAction().addAll((Collection<? extends Action>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID)
{
switch (featureID)
{
case SparrowPackage.TRY__NAME:
setName(NAME_EDEFAULT);
return;
case SparrowPackage.TRY__ACTION:
getAction().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID)
{
switch (featureID)
{
case SparrowPackage.TRY__NAME:
return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name);
case SparrowPackage.TRY__ACTION:
return action != null && !action.isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString()
{
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (name: ");
result.append(name);
result.append(')');
return result.toString();
}
} //TryImpl
| |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Copyright (c) 2014 Intel Corporation. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.xwalk.core.internal.xwview.test;
import android.app.Activity;
import android.content.Context;
import android.test.ActivityInstrumentationTestCase2;
import android.util.Log;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.webkit.WebResourceResponse;
import android.widget.FrameLayout;
import java.io.InputStream;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.Callable;
import java.util.concurrent.FutureTask;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.TimeUnit;
import junit.framework.Assert;
import org.chromium.content.browser.ContentViewCore;
import org.chromium.content.browser.test.util.CallbackHelper;
import org.chromium.content.browser.test.util.Criteria;
import org.chromium.content.browser.test.util.CriteriaHelper;
import org.chromium.ui.gfx.DeviceDisplayInfo;
import org.xwalk.core.internal.XWalkNavigationHistoryInternal;
import org.xwalk.core.internal.XWalkNavigationItemInternal;
import org.xwalk.core.internal.XWalkResourceClientInternal;
import org.xwalk.core.internal.XWalkSettings;
import org.xwalk.core.internal.XWalkUIClientInternal;
import org.xwalk.core.internal.XWalkViewInternal;
import org.xwalk.core.internal.XWalkWebChromeClient;
import static org.chromium.base.test.util.ScalableTimeout.scaleTimeout;
public class XWalkViewInternalTestBase
extends ActivityInstrumentationTestCase2<XWalkViewInternalTestRunnerActivity> {
protected final static int WAIT_TIMEOUT_SECONDS = 15;
private static final long WAIT_TIMEOUT_MS = scaleTimeout(15000);
private static final int CHECK_INTERVAL = 100;
private final static String TAG = "XWalkViewInternalTestBase";
private XWalkViewInternal mXWalkViewInternal;
final TestHelperBridge mTestHelperBridge = new TestHelperBridge();
class TestXWalkUIClientInternalBase extends XWalkUIClientInternal {
TestHelperBridge mInnerContentsClient;
public TestXWalkUIClientInternalBase(TestHelperBridge client) {
super(getXWalkView());
mInnerContentsClient = client;
}
@Override
public void onPageLoadStarted(XWalkViewInternal view, String url) {
mInnerContentsClient.onPageStarted(url);
}
@Override
public void onPageLoadStopped(XWalkViewInternal view, String url, LoadStatusInternal status) {
mInnerContentsClient.onPageFinished(url);
}
@Override
public void onReceivedTitle(XWalkViewInternal view, String title) {
mInnerContentsClient.onTitleChanged(title);
}
}
class TestXWalkUIClientInternal extends TestXWalkUIClientInternalBase {
public TestXWalkUIClientInternal() {
super(mTestHelperBridge);
}
}
class TestXWalkResourceClientBase extends XWalkResourceClientInternal {
TestHelperBridge mInnerContentsClient;
public TestXWalkResourceClientBase(TestHelperBridge client) {
super(mXWalkViewInternal);
mInnerContentsClient = client;
}
@Override
public void onLoadStarted(XWalkViewInternal view, String url) {
mInnerContentsClient.onLoadStarted(url);
}
@Override
public void onReceivedLoadError(XWalkViewInternal view, int errorCode,
String description, String failingUrl) {
mInnerContentsClient.onReceivedLoadError(errorCode, description, failingUrl);
}
@Override
public WebResourceResponse shouldInterceptLoadRequest(XWalkViewInternal view,
String url) {
return mInnerContentsClient.shouldInterceptLoadRequest(url);
}
}
class TestXWalkResourceClient extends TestXWalkResourceClientBase {
public TestXWalkResourceClient() {
super(mTestHelperBridge);
}
}
class TestXWalkWebChromeClientBase extends XWalkWebChromeClient {
private CallbackHelper mOnShowCustomViewCallbackHelper = new CallbackHelper();
private CallbackHelper mOnHideCustomViewCallbackHelper = new CallbackHelper();
private Activity mActivity = getActivity();
private View mCustomView;
private XWalkWebChromeClient.CustomViewCallback mExitCallback;
public TestXWalkWebChromeClientBase() {
super(mXWalkViewInternal);
}
@Override
public void onShowCustomView(View view, XWalkWebChromeClient.CustomViewCallback callback) {
mCustomView = view;
mExitCallback = callback;
mActivity.getWindow().setFlags(
WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
mActivity.getWindow().addContentView(view,
new FrameLayout.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT,
Gravity.CENTER));
mOnShowCustomViewCallbackHelper.notifyCalled();
}
@Override
public void onHideCustomView() {
mActivity.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
mOnHideCustomViewCallbackHelper.notifyCalled();
}
public XWalkWebChromeClient.CustomViewCallback getExitCallback() {
return mExitCallback;
}
public View getCustomView() {
return mCustomView;
}
public boolean wasCustomViewShownCalled() {
return mOnShowCustomViewCallbackHelper.getCallCount() > 0;
}
public void waitForCustomViewShown() throws TimeoutException, InterruptedException {
mOnShowCustomViewCallbackHelper.waitForCallback(0, 1, WAIT_TIMEOUT_SECONDS, TimeUnit.SECONDS);
}
public void waitForCustomViewHidden() throws InterruptedException, TimeoutException {
mOnHideCustomViewCallbackHelper.waitForCallback(0, 1, WAIT_TIMEOUT_SECONDS, TimeUnit.SECONDS);
}
}
void setUIClient(final XWalkUIClientInternal client) {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
getXWalkView().setUIClient(client);
}
});
}
void setResourceClient(final XWalkResourceClientInternal client) {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
getXWalkView().setResourceClient(client);
}
});
}
void setXWalkWebChromeClient(final TestXWalkWebChromeClientBase client) {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkViewInternal.setXWalkWebChromeClient(client);
}
});
}
static class ViewPair {
private final XWalkViewInternal view0;
private final TestHelperBridge client0;
private final XWalkViewInternal view1;
private final TestHelperBridge client1;
ViewPair(XWalkViewInternal view0, TestHelperBridge client0,
XWalkViewInternal view1, TestHelperBridge client1) {
this.view0 = view0;
this.client0 = client0;
this.view1 = view1;
this.client1 = client1;
}
XWalkViewInternal getView0() {
return view0;
}
TestHelperBridge getClient0() {
return client0;
}
XWalkViewInternal getView1() {
return view1;
}
TestHelperBridge getClient1() {
return client1;
}
}
public XWalkViewInternalTestBase() {
super(XWalkViewInternalTestRunnerActivity.class);
}
@Override
protected void setUp() throws Exception {
super.setUp();
// Must call getActivity() here but not in main thread.
final Activity activity = getActivity();
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkViewInternal = new XWalkViewInternal(activity, activity);
getActivity().addView(mXWalkViewInternal);
mXWalkViewInternal.setUIClient(new TestXWalkUIClientInternal());
mXWalkViewInternal.setResourceClient(new TestXWalkResourceClient());
}
});
}
protected boolean pollOnUiThread(final Callable<Boolean> callable) throws Exception {
return CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
try {
return runTestOnUiThreadAndGetResult(callable);
} catch (Throwable e) {
return false;
}
}
});
}
protected void loadJavaScriptUrl(final String url) throws Exception {
if (!url.startsWith("javascript:")) {
Log.w(TAG, "loadJavascriptUrl only accepts javascript: url");
return;
}
loadUrlAsync(url);
}
protected void loadUrlSync(final String url) throws Exception {
CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper();
int currentCallCount = pageFinishedHelper.getCallCount();
loadUrlAsync(url);
pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
}
protected void loadUrlAsync(final String url) throws Exception {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkViewInternal.load(url, null);
}
});
}
protected void loadDataSync(final String url, final String data, final String mimeType,
final boolean isBase64Encoded) throws Exception {
CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper();
int currentCallCount = pageFinishedHelper.getCallCount();
loadDataAsync(url, data, mimeType, isBase64Encoded);
pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
}
protected void loadDataAsync(final String url, final String data, final String mimeType,
final boolean isBase64Encoded) throws Exception {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkViewInternal.load(url, data);
}
});
}
protected void loadUrlSyncByContent(final XWalkViewInternal xWalkViewInternal,
final TestHelperBridge contentsClient,
final String url) throws Exception {
CallbackHelper pageFinishedHelper = contentsClient.getOnPageFinishedHelper();
int currentCallCount = pageFinishedHelper.getCallCount();
loadUrlAsyncByContent(xWalkViewInternal, url);
pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
}
protected void loadUrlAsyncByContent(final XWalkViewInternal xWalkViewInternal,
final String url) throws Exception {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
xWalkViewInternal.load(url, null);
}
});
}
protected String getTitleOnUiThread() throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<String>() {
@Override
public String call() throws Exception {
return mXWalkViewInternal.getTitle();
}
});
}
protected <R> R runTestOnUiThreadAndGetResult(Callable<R> callable)
throws Exception {
FutureTask<R> task = new FutureTask<R>(callable);
getInstrumentation().waitForIdleSync();
getInstrumentation().runOnMainSync(task);
return task.get();
}
protected String getFileContent(String fileName) {
try {
Context context = getInstrumentation().getContext();
InputStream inputStream = context.getAssets().open(fileName);
int size = inputStream.available();
byte buffer[] = new byte[size];
inputStream.read(buffer);
inputStream.close();
String fileContent = new String(buffer);
return fileContent;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
protected String getTitleOnUiThreadByContent(final XWalkViewInternal xWalkViewInternal) throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<String>() {
@Override
public String call() throws Exception {
String title = xWalkViewInternal.getTitle();
return title;
}
});
}
protected XWalkSettings getXWalkSettingsOnUiThreadByContent(
final XWalkViewInternal xWalkViewInternal) throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<XWalkSettings>() {
@Override
public XWalkSettings call() throws Exception {
return xWalkViewInternal.getSettings();
}
});
}
protected XWalkViewInternal createXWalkViewContainerOnMainSync(
final Context context,
final XWalkUIClientInternal uiClient,
final XWalkResourceClientInternal resourceClient) throws Exception {
final AtomicReference<XWalkViewInternal> xWalkViewContainer =
new AtomicReference<XWalkViewInternal>();
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
xWalkViewContainer.set(new XWalkViewInternal(context, getActivity()));
getActivity().addView(xWalkViewContainer.get());
xWalkViewContainer.get().setUIClient(uiClient);
xWalkViewContainer.get().setResourceClient(resourceClient);
}
});
return xWalkViewContainer.get();
}
protected ViewPair createViewsOnMainSync(final TestHelperBridge helperBridge0,
final TestHelperBridge helperBridge1,
final XWalkUIClientInternal uiClient0,
final XWalkUIClientInternal uiClient1,
final XWalkResourceClientInternal resourceClient0,
final XWalkResourceClientInternal resourceClient1,
final Context context) throws Throwable {
final XWalkViewInternal walkView0 = createXWalkViewContainerOnMainSync(context,
uiClient0, resourceClient0);
final XWalkViewInternal walkView1 = createXWalkViewContainerOnMainSync(context,
uiClient1, resourceClient1);
final AtomicReference<ViewPair> viewPair = new AtomicReference<ViewPair>();
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
viewPair.set(new ViewPair(walkView0, helperBridge0, walkView1, helperBridge1));
}
});
return viewPair.get();
}
protected void loadAssetFile(String fileName) throws Exception {
String fileContent = getFileContent(fileName);
loadDataSync(fileName, fileContent, "text/html", false);
}
public void loadAssetFileAndWaitForTitle(String fileName) throws Exception {
CallbackHelper getTitleHelper = mTestHelperBridge.getOnTitleUpdatedHelper();
int currentCallCount = getTitleHelper.getCallCount();
String fileContent = getFileContent(fileName);
loadDataAsync(fileName, fileContent, "text/html", false);
getTitleHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
}
protected XWalkViewInternal getXWalkView() {
return mXWalkViewInternal;
}
protected void runTestWaitPageFinished(Runnable runnable) throws Exception{
CallbackHelper pageFinishedHelper = mTestHelperBridge.getOnPageFinishedHelper();
int currentCallCount = pageFinishedHelper.getCallCount();
runnable.run();
pageFinishedHelper.waitForCallback(currentCallCount, 1, WAIT_TIMEOUT_SECONDS,
TimeUnit.SECONDS);
}
protected void reloadSync(final int mode) throws Exception {
runTestWaitPageFinished(new Runnable(){
@Override
public void run() {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkViewInternal.reload(mode);
}
});
}
});
}
protected void goBackSync() throws Throwable {
runTestWaitPageFinished(new Runnable(){
@Override
public void run() {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkViewInternal.getNavigationHistory().navigate(
XWalkNavigationHistoryInternal.DirectionInternal.BACKWARD, 1);
}
});
}
});
}
protected void goForwardSync() throws Throwable {
runTestWaitPageFinished(new Runnable(){
@Override
public void run() {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkViewInternal.getNavigationHistory().navigate(
XWalkNavigationHistoryInternal.DirectionInternal.FORWARD, 1);
}
});
}
});
}
protected void clearHistoryOnUiThread() throws Exception {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkViewInternal.getNavigationHistory().clear();
}
});
}
protected boolean canGoBackOnUiThread() throws Throwable {
return runTestOnUiThreadAndGetResult(new Callable<Boolean>() {
@Override
public Boolean call() {
return mXWalkViewInternal.getNavigationHistory().canGoBack();
}
});
}
protected boolean canGoForwardOnUiThread() throws Throwable {
return runTestOnUiThreadAndGetResult(new Callable<Boolean>() {
@Override
public Boolean call() {
return mXWalkViewInternal.getNavigationHistory().canGoForward();
}
});
}
protected int historySizeOnUiThread() throws Throwable {
return runTestOnUiThreadAndGetResult(new Callable<Integer>() {
@Override
public Integer call() {
return mXWalkViewInternal.getNavigationHistory().size();
}
});
}
protected boolean hasItemAtOnUiThread(final int index) throws Throwable {
return runTestOnUiThreadAndGetResult(new Callable<Boolean>() {
@Override
public Boolean call() {
return mXWalkViewInternal.getNavigationHistory().hasItemAt(index);
}
});
}
protected XWalkNavigationItemInternal getItemAtOnUiThread(final int index) throws Throwable {
return runTestOnUiThreadAndGetResult(new Callable<XWalkNavigationItemInternal>() {
@Override
public XWalkNavigationItemInternal call() {
return mXWalkViewInternal.getNavigationHistory().getItemAt(index);
}
});
}
protected XWalkNavigationItemInternal getCurrentItemOnUiThread() throws Throwable {
return runTestOnUiThreadAndGetResult(new Callable<XWalkNavigationItemInternal>() {
@Override
public XWalkNavigationItemInternal call() {
return mXWalkViewInternal.getNavigationHistory().getCurrentItem();
}
});
}
protected String executeJavaScriptAndWaitForResult(final String code) throws Exception {
final TestHelperBridge.OnEvaluateJavaScriptResultHelper helper =
mTestHelperBridge.getOnEvaluateJavaScriptResultHelper();
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
helper.evaluateJavascript(mXWalkViewInternal, code);
}
});
helper.waitUntilHasValue();
Assert.assertTrue("Failed to retrieve JavaScript evaluation results.", helper.hasValue());
return helper.getJsonResultAndClear();
}
protected ViewPair createViews() throws Throwable {
TestHelperBridge helperBridge0 = new TestHelperBridge();
TestHelperBridge helperBridge1 = new TestHelperBridge();
TestXWalkUIClientInternalBase uiClient0 = new TestXWalkUIClientInternalBase(helperBridge0);
TestXWalkUIClientInternalBase uiClient1 = new TestXWalkUIClientInternalBase(helperBridge1);
TestXWalkResourceClientBase resourceClient0 =
new TestXWalkResourceClientBase(helperBridge0);
TestXWalkResourceClientBase resourceClient1 =
new TestXWalkResourceClientBase(helperBridge1);
ViewPair viewPair =
createViewsOnMainSync(helperBridge0, helperBridge1, uiClient0, uiClient1,
resourceClient0, resourceClient1, getActivity());
return viewPair;
}
protected String getUrlOnUiThread() throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<String>() {
@Override
public String call() throws Exception {
return mXWalkViewInternal.getUrl();
}
});
}
protected void clearCacheOnUiThread(final boolean includeDiskFiles) throws Exception {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkViewInternal.clearCache(includeDiskFiles);
}
});
}
protected String getAPIVersionOnUiThread() throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<String>() {
@Override
public String call() throws Exception {
return mXWalkViewInternal.getAPIVersion();
}
});
}
protected String getXWalkVersionOnUiThread() throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<String>() {
@Override
public String call() throws Exception {
return mXWalkViewInternal.getXWalkVersion();
}
});
}
protected ContentViewCore getContentViewCore() throws Exception {
return runTestOnUiThreadAndGetResult(new Callable<ContentViewCore>() {
@Override
public ContentViewCore call() throws Exception {
return mXWalkViewInternal.getXWalkContentForTest();
}
});
}
protected void zoomByOnUiThreadAndWait(final float delta) throws Throwable {
final float previousScale = getPixelScaleOnUiThread();
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
mXWalkViewInternal.zoomBy(delta);
}
});
// The zoom level is updated asynchronously.
waitForScaleChange(previousScale);
}
protected void waitForScaleChange(final float previousScale) throws Throwable {
poll(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return previousScale != getPixelScaleOnUiThread();
}
});
}
protected void waitForScaleToBecome(final float expectedScale) throws Throwable {
poll(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return expectedScale == getScaleOnUiThread();
}
});
}
/**
* Returns pure page scale.
*/
protected float getScaleOnUiThread() throws Exception {
final ContentViewCore contentViewCore = getContentViewCore();
return runTestOnUiThreadAndGetResult(new Callable<Float>() {
@Override
public Float call() throws Exception {
return contentViewCore.getScale();
}
});
}
/**
* Returns page scale multiplied by the screen density.
*/
protected float getPixelScaleOnUiThread() throws Exception {
final ContentViewCore contentViewCore = getContentViewCore();
final double dipScale = DeviceDisplayInfo.create(getActivity()).getDIPScale();
return runTestOnUiThreadAndGetResult(new Callable<Float>() {
@Override
public Float call() throws Exception {
float pixelScale = contentViewCore.getScale() * (float)dipScale;
return pixelScale;
}
});
}
/**
* Returns whether a user can zoom the page in.
*/
protected boolean canZoomInOnUiThread() throws Exception {
final ContentViewCore contentViewCore = getContentViewCore();
return runTestOnUiThreadAndGetResult(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return mXWalkViewInternal.canZoomIn();
}
});
}
/**
* Returns whether a user can zoom the page out.
*/
protected boolean canZoomOutOnUiThread() throws Exception {
final ContentViewCore contentViewCore = getContentViewCore();
return runTestOnUiThreadAndGetResult(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return mXWalkViewInternal.canZoomOut();
}
});
}
protected void zoomInOnUiThreadAndWait() throws Throwable {
final float previousScale = getPixelScaleOnUiThread();
assertTrue(runTestOnUiThreadAndGetResult(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return mXWalkViewInternal.zoomIn();
}
}));
// The zoom level is updated asynchronously.
waitForScaleChange(previousScale);
}
protected void zoomOutOnUiThreadAndWait() throws Throwable {
final float previousScale = getPixelScaleOnUiThread();
assertTrue(runTestOnUiThreadAndGetResult(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
return mXWalkViewInternal.zoomOut();
}
}));
// The zoom level is updated asynchronously.
waitForScaleChange(previousScale);
}
protected void poll(final Callable<Boolean> callable) throws Exception {
assertTrue(CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
try {
return callable.call();
} catch (Throwable e) {
Log.e(TAG, "Exception while polling.", e);
return false;
}
}
}, WAIT_TIMEOUT_MS, CHECK_INTERVAL));
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.rcfile;
import com.facebook.presto.common.block.Block;
import com.facebook.presto.rcfile.binary.BinaryRcFileEncoding;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.slice.DynamicSliceOutput;
import io.airlift.slice.Slice;
import io.airlift.slice.SliceOutput;
import io.airlift.units.DataSize;
import org.testng.annotations.Test;
import java.io.IOException;
import java.util.List;
import static com.facebook.presto.common.type.SmallintType.SMALLINT;
import static com.google.common.base.Preconditions.checkArgument;
import static io.airlift.slice.Slices.utf8Slice;
import static io.airlift.units.DataSize.Unit.MEGABYTE;
import static java.lang.Math.toIntExact;
import static java.util.stream.Collectors.toList;
import static org.testng.Assert.assertEquals;
public class TestRcFileReaderManual
{
private static final Slice COLUMN_COUNT_METADATA_KEY = utf8Slice("hive.io.rcfile.column.number");
private static final Slice RCFILE_MAGIC = utf8Slice("RCF");
private static final int CURRENT_VERSION = 1;
private static final long syncFirst = 0x1234_5678_9012_3456L;
private static final long syncSecond = 0x7890_1234_5678_9012L;
@Test
public void testNoStartSync()
throws Exception
{
SliceOutput output = new DynamicSliceOutput(10 * 1024);
List<Segment> segments = ImmutableList.of(
writeSegment(output, ImmutableList.of(ImmutableList.of(0, 2, 3, 4), ImmutableList.of(10, 12, 13))),
writeSegment(output, ImmutableList.of(ImmutableList.of(20, 22), ImmutableList.of(30, 33), ImmutableList.of(40, 44))),
writeSegment(output, ImmutableList.of(ImmutableList.of(100, 101, 102))));
assertFileSegments(output.slice(), segments);
}
@Test
public void testStartSync()
throws Exception
{
SliceOutput output = new DynamicSliceOutput(10 * 1024);
List<Segment> segments = ImmutableList.of(
writeSegment(output, ImmutableList.of()),
writeSegment(output, ImmutableList.of(ImmutableList.of(0, 2, 3, 4), ImmutableList.of(10, 12, 13))),
writeSegment(output, ImmutableList.of(ImmutableList.of(20, 22), ImmutableList.of(30, 33), ImmutableList.of(40, 44))),
writeSegment(output, ImmutableList.of(ImmutableList.of(100, 101, 102))));
assertFileSegments(output.slice(), segments);
}
private static void assertFileSegments(Slice file, List<Segment> segments)
throws IOException
{
// read whole file
List<Integer> allValues = segments.stream()
.map(Segment::getValues)
.flatMap(List::stream)
.collect(toList());
assertEquals(allValues, readValues(file, 0, file.length()));
for (Segment segment : segments) {
// whole segment
assertEquals(segment.getValues(), readValues(file, segment.getOffset(), segment.getLength()));
// first byte of segment
assertEquals(segment.getValues(), readValues(file, segment.getOffset(), 1));
// straddle segment start
assertEquals(segment.getValues(), readValues(file, segment.getOffset() - 1, 2));
// regions entirely within the segment
assertEquals(ImmutableList.of(), readValues(file, segment.getOffset() + 1, 1));
assertEquals(ImmutableList.of(), readValues(file, segment.getOffset() + 1, segment.getLength() - 1));
for (int rowGroupOffset : segment.getRowGroupSegmentOffsets()) {
// segment header to row group start
assertEquals(segment.getValues(), readValues(file, segment.getOffset(), rowGroupOffset));
assertEquals(segment.getValues(), readValues(file, segment.getOffset(), rowGroupOffset - 1));
assertEquals(segment.getValues(), readValues(file, segment.getOffset(), rowGroupOffset + 1));
// region from grow group start until end of file (row group offset is always inside of the segment since a
// segment starts with a file header or sync sequence)
assertEquals(ImmutableList.of(), readValues(file, segment.getOffset() + rowGroupOffset, segment.getLength() - rowGroupOffset));
}
}
// all combinations of segments
for (int startSegmentIndex = 0; startSegmentIndex < segments.size(); startSegmentIndex++) {
Segment startSegment = segments.get(startSegmentIndex);
for (int endSegmentIndex = startSegmentIndex; endSegmentIndex < segments.size(); endSegmentIndex++) {
Segment endSegment = segments.get(endSegmentIndex);
List<Integer> segmentsValues = segments.subList(startSegmentIndex, endSegmentIndex + 1).stream()
.map(Segment::getValues)
.flatMap(List::stream)
.collect(toList());
assertEquals(segmentsValues, readValues(file, startSegment.getOffset(), endSegment.getOffset() + endSegment.getLength() - startSegment.getOffset()));
assertEquals(segmentsValues, readValues(file, startSegment.getOffset(), endSegment.getOffset() + 1 - startSegment.getOffset()));
assertEquals(segmentsValues, readValues(file, startSegment.getOffset() - 1, endSegment.getOffset() + 1 + endSegment.getLength() - startSegment.getOffset()));
assertEquals(segmentsValues, readValues(file, startSegment.getOffset() - 1, endSegment.getOffset() + 1 + 1 - startSegment.getOffset()));
}
}
}
private static Segment writeSegment(SliceOutput output, List<List<Integer>> rowGroups)
{
int offset = output.size();
// if we are at the beginning of the file write a file header, otherwise write a sync
if (offset == 0) {
writeFileHeader(output);
}
else {
writeSync(output);
}
ImmutableList.Builder<Integer> rowGroupOffsets = ImmutableList.builder();
for (List<Integer> rowGroup : rowGroups) {
rowGroupOffsets.add(output.size() - offset);
writeRowGroup(output, rowGroup);
}
int length = output.size() - offset;
return new Segment(
rowGroups.stream()
.flatMap(List::stream)
.collect(toList()),
offset,
length,
rowGroupOffsets.build());
}
private static void writeFileHeader(SliceOutput output)
{
// write header
output.writeBytes(RCFILE_MAGIC);
output.writeByte(CURRENT_VERSION);
// write codec information
output.writeBoolean(false);
// write metadata (which contains just the column count)
output.writeInt(Integer.reverseBytes(1));
output.writeByte(COLUMN_COUNT_METADATA_KEY.length());
output.writeBytes(COLUMN_COUNT_METADATA_KEY);
output.writeByte(1);
output.writeByte('1');
// write sync sequence
output.writeLong(syncFirst);
output.writeLong(syncSecond);
}
private static void writeSync(SliceOutput output)
{
output.writeInt(-1);
output.writeLong(syncFirst);
output.writeLong(syncSecond);
}
private static void writeRowGroup(SliceOutput output, List<Integer> shortValues)
{
// add arbitrary limit assure all lengths write as a simple single vint byte
checkArgument(shortValues.size() < 32);
// key section is 4 vint sizes followed by the column data
int columnLengthsLength = shortValues.size();
int keySectionLength = 4 + columnLengthsLength;
int columnDataLength = shortValues.size() * 2;
// write the sum of the uncompressed key length and compressed value length
// this number is useless to the reader
output.writeInt(Integer.reverseBytes(keySectionLength + columnDataLength));
// key section: uncompressed size
output.writeInt(Integer.reverseBytes(keySectionLength));
// key section: compressed size
output.writeInt(Integer.reverseBytes(keySectionLength));
// key section: row count
output.writeByte(shortValues.size());
// key section: column data compressed size
output.writeByte(columnDataLength);
// key section: column data uncompressed size
output.writeByte(columnDataLength);
// key section: column lengths uncompressed size
output.writeByte(columnLengthsLength);
// key section: column lengths
for (int ignored : shortValues) {
output.write(2);
}
// value section: data
for (int value : shortValues) {
output.writeShort(Short.reverseBytes((short) value));
}
}
private static List<Integer> readValues(Slice data, int offset, int length)
throws IOException
{
// to simplify the testing:
// change negative offsets to 0
// truncate length so it is not off the end of the file
if (offset < 0) {
// adjust length to new offset
length += offset;
offset = 0;
}
if (offset + length > data.length()) {
length = data.length() - offset;
}
RcFileReader reader = new RcFileReader(
new SliceRcFileDataSource(data),
new BinaryRcFileEncoding(),
ImmutableMap.of(0, SMALLINT),
new BogusRcFileCodecFactory(),
offset,
length,
new DataSize(8, MEGABYTE));
ImmutableList.Builder<Integer> values = ImmutableList.builder();
while (reader.advance() >= 0) {
Block block = reader.readBlock(0);
for (int position = 0; position < block.getPositionCount(); position++) {
values.add((int) SMALLINT.getLong(block, position));
}
}
return values.build();
}
private static class Segment
{
private final List<Integer> values;
private final int offset;
private final int length;
private final List<Integer> rowGroupSegmentOffsets;
public Segment(List<Integer> values, int offset, int length, List<Integer> rowGroupSegmentOffsets)
{
this.values = ImmutableList.copyOf(values);
this.offset = offset;
this.length = length;
this.rowGroupSegmentOffsets = ImmutableList.copyOf(rowGroupSegmentOffsets);
}
public List<Integer> getValues()
{
return values;
}
public int getOffset()
{
return offset;
}
public int getLength()
{
return length;
}
public List<Integer> getRowGroupSegmentOffsets()
{
return rowGroupSegmentOffsets;
}
}
private static class SliceRcFileDataSource
implements RcFileDataSource
{
private static final RcFileDataSourceId DATA_SOURCE_ID = new RcFileDataSourceId("test");
private final Slice data;
public SliceRcFileDataSource(Slice data)
{
this.data = data;
}
@Override
public long getReadBytes()
{
return 0;
}
@Override
public long getReadTimeNanos()
{
return 0;
}
@Override
public long getSize()
{
return data.length();
}
@Override
public void readFully(long position, byte[] buffer, int bufferOffset, int bufferLength)
{
data.getBytes(toIntExact(position), buffer, bufferOffset, bufferLength);
}
@Override
public void close()
{
}
@Override
public RcFileDataSourceId getId()
{
return DATA_SOURCE_ID;
}
}
private static class BogusRcFileCodecFactory
implements RcFileCodecFactory
{
@Override
public RcFileCompressor createCompressor(String codecName)
{
throw new UnsupportedOperationException();
}
@Override
public RcFileDecompressor createDecompressor(String codecName)
{
throw new UnsupportedOperationException();
}
}
}
| |
/*
* Copyright 2017 Banco Bilbao Vizcaya Argentaria, S.A..
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.bbva.arq.devops.ae.mirrorgate.service;
import static com.bbva.arq.devops.ae.mirrorgate.mapper.ReviewMapper.map;
import com.bbva.arq.devops.ae.mirrorgate.dto.ApplicationDTO;
import com.bbva.arq.devops.ae.mirrorgate.dto.ReviewDTO;
import com.bbva.arq.devops.ae.mirrorgate.model.EventType;
import com.bbva.arq.devops.ae.mirrorgate.model.Review;
import com.bbva.arq.devops.ae.mirrorgate.repository.ReviewRepository;
import com.bbva.arq.devops.ae.mirrorgate.support.Platform;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import org.bson.types.ObjectId;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Service;
@Service
public class ReviewServiceImpl implements ReviewService {
private static final int LONG_TERM_MULTIPLIER = 3;
private static final long DAY_IN_MS = (long) 1000 * 60 * 60 * 24;
private static final String FB_NAMESPACE = "Mirrorgate/";
private static final String FB_HISTORY_SUFFIX = "_history";
private final ReviewRepository repository;
private final EventService eventService;
@Autowired
public ReviewServiceImpl(ReviewRepository repository, EventService eventService) {
this.repository = repository;
this.eventService = eventService;
}
@Override
public List<ApplicationDTO> getAverageRateByAppNames(List<String> names, int daysShortTerm) {
final List<ApplicationDTO> result = repository.getAppInfoByAppNames(names);
final List<Review> history = repository.findHistoricalForApps(names);
//Update merge the history review inside the result
result.forEach((app) -> {
final Optional<Review> historyReviewOpt = history.stream()
.filter((r) -> app.getAppname().equals(r.getAppname()) && app.getPlatform() == r.getPlatform())
.findFirst();
if (historyReviewOpt.isPresent()) {
final Review historyReview = historyReviewOpt.get();
app.setVotesTotal(historyReview.getAmount())
.setUrl(historyReview.getUrl())
.setRatingTotal((long) (historyReview.getStarrating() * historyReview.getAmount()));
}
});
final int daysLongTerm = daysShortTerm * LONG_TERM_MULTIPLIER;
final Date dateShortTerm = new Date(System.currentTimeMillis() - (daysShortTerm * DAY_IN_MS));
final Date dateLongTerm = new Date(System.currentTimeMillis() - (daysLongTerm * DAY_IN_MS));
final List<ApplicationDTO> statsShortTerm = repository
.getAverageRateByAppNamesAfterTimestamp(names, dateShortTerm.getTime());
final List<ApplicationDTO> statsLongTerm = repository
.getAverageRateByAppNamesAfterTimestamp(names, dateLongTerm.getTime());
result.forEach((app) -> {
final Optional<ApplicationDTO> appStatsShortTerm = statsShortTerm.stream()
.filter((stat) ->
stat.getAppname().equals(app.getAppname()) && stat.getPlatform().equals(app.getPlatform())
)
.findFirst();
final Optional<ApplicationDTO> appStatsLongTerm = statsLongTerm.stream()
.filter((stat) ->
stat.getAppname().equals(app.getAppname()) && stat.getPlatform().equals(app.getPlatform())
)
.findFirst();
appStatsShortTerm.ifPresent(applicationDTO -> app.setRatingShortTerm(applicationDTO.getRatingShortTerm())
.setVotesShortTerm(applicationDTO.getVotesShortTerm())
.setShortTermLength(daysShortTerm));
//Ugly hack... we use the shortTerm to return the data even if it's for longTerm :-(
appStatsLongTerm.ifPresent(applicationDTO -> app.setRatingLongTerm(applicationDTO.getRatingShortTerm())
.setVotesLongTerm(applicationDTO.getVotesShortTerm())
.setLongTermLength(daysLongTerm));
});
return result;
}
private List<String> getReviewIds(final Iterable<Review> reviews) {
final List<String> savedIDs = new ArrayList<>();
reviews.forEach(request -> savedIDs.add(request.getCommentId()));
return savedIDs;
}
@Override
public List<String> saveAll(final Iterable<Review> reviews) {
List<Review> singleReviews = StreamSupport.stream(reviews.spliterator(), false)
.filter((r) -> r.getTimestamp() != null).collect(Collectors.toList());
final List<Review> existingReviews = repository.findAllByCommentIdIn(
singleReviews.stream().map(Review::getCommentId).collect(Collectors.toList())
);
singleReviews = singleReviews.stream()
.filter((r) -> {
//We exclude reviews that equal existing one and update the objectId for those
// different and already in the DB while keeping the new ones
for (final Review existingReview : existingReviews) {
if (existingReview.getCommentId().equals(r.getCommentId())) {
if (existingReview.equals(r)) {
return false;
}
r.setId(existingReview.getId());
return true;
}
}
return true;
})
.collect(Collectors.toList());
final Iterable<Review> newReviews = repository.saveAll(singleReviews);
eventService.saveEvents(newReviews, EventType.REVIEW);
final List<Review> historyData = StreamSupport.stream(reviews.spliterator(), false)
.filter((r) -> r.getTimestamp() == null).collect(Collectors.toList());
if (! historyData.isEmpty()) {
List<Review> dbHistoricalReviews = repository.findAllHistorical();
if (! dbHistoricalReviews.isEmpty()) {
dbHistoricalReviews = dbHistoricalReviews.stream().filter((review) -> {
final Optional<Review> newDataOpt = historyData.stream()
.filter((h) -> review.getAppname().equals(h.getAppname()))
.findFirst();
if (newDataOpt.isPresent()) {
final Review newData = newDataOpt.get();
review
.setAmount(newData.getAmount())
.setStarrating(newData.getStarrating());
historyData.remove(newData);
return true;
}
return false;
}).collect(Collectors.toList());
repository.saveAll(dbHistoricalReviews);
}
if (! historyData.isEmpty()) {
repository.saveAll(historyData);
}
}
return getReviewIds(reviews);
}
@Override
public ReviewDTO saveApplicationReview(final String appId, final ReviewDTO review) {
final Review toSave = map(review);
final Authentication auth = SecurityContextHolder.getContext().getAuthentication();
final long id = System.currentTimeMillis();
if (auth != null) {
toSave.setAuthorName((String) auth.getPrincipal());
}
toSave.setAppname(FB_NAMESPACE + appId)
.setTimestamp(id)
.setCommentId(Long.toString(id))
.setPlatform(Platform.Unknown);
final Review savedReview = repository.save(toSave);
eventService.saveEvent(savedReview, EventType.REVIEW);
updateHistoryForApplicationReview(toSave);
return map(savedReview);
}
@Override
public Iterable<Review> getReviewsByObjectId(final List<ObjectId> objectIds) {
return repository.findAllById(objectIds);
}
private synchronized void updateHistoryForApplicationReview(final Review toSave) {
final List<Review> historyList = repository.findAllByCommentIdIn(
Collections.singletonList(toSave.getAppname() + FB_HISTORY_SUFFIX)
);
Review history;
if (! historyList.isEmpty()) {
history = historyList.get(0);
} else {
history = new Review()
.setPlatform(Platform.Unknown)
.setAppname(toSave.getAppname())
.setCommentId(toSave.getAppname() + FB_HISTORY_SUFFIX)
.setAmount(0);
}
final double rating = history.getStarrating() * history.getAmount();
history.setAmount(history.getAmount() + 1)
.setStarrating((toSave.getStarrating() + rating) / history.getAmount());
repository.save(history);
}
}
| |
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is part of dcm4che, an implementation of DICOM(TM) in
* Java(TM), available at http://sourceforge.net/projects/dcm4che.
*
* The Initial Developer of the Original Code is
* TIANI Medgraph AG.
* Portions created by the Initial Developer are Copyright (C) 2003-2005
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Gunter Zeilinger <gunter.zeilinger@tiani.com>
* Franz Willer <franz.willer@gwi-ag.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package org.dcm4chex.wado.mbean;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import javax.management.InstanceNotFoundException;
import javax.management.ListenerNotFoundException;
import javax.management.Notification;
import javax.management.NotificationListener;
import javax.management.ObjectName;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.dcm4che.data.Dataset;
import org.dcm4che.dict.Tags;
import org.dcm4che2.audit.message.ActiveParticipant;
import org.dcm4che2.audit.message.AuditEvent;
import org.dcm4che2.audit.message.AuditMessage;
import org.dcm4che2.audit.message.InstancesTransferredMessage;
import org.dcm4che2.audit.message.ParticipantObject;
import org.dcm4che2.audit.message.ParticipantObjectDescription;
import org.dcm4che2.audit.message.ParticipantObject.IDTypeCode;
import org.dcm4che2.audit.message.ParticipantObject.TypeCode;
import org.dcm4che2.audit.message.ParticipantObject.TypeCodeRole;
import org.dcm4che2.audit.message.ParticipantObjectDescription.SOPClass;
import org.dcm4chex.archive.common.SeriesStored;
import org.dcm4chex.archive.mbean.HttpUserInfo;
import org.dcm4chex.wado.common.WADORequestObject;
import org.dcm4chex.wado.common.WADOResponseObject;
import org.dcm4chex.wado.mbean.cache.WADOCacheImpl;
/**
* @author franz.willer
*
* The MBean to manage the WADO service.
* <p>
* This class use WADOSupport for the WADO methods and WADOCache for caching jpg
* images.
*/
public class WADOService extends AbstractCacheService {
private static final String NONE = "NONE";
private WADOSupport support = new WADOSupport(this.server);
private final NotificationListener seriesStoredListener =
new NotificationListener() {
public void handleNotification(Notification notif, Object handback) {
SeriesStored seriesStored = (SeriesStored) notif.getUserData();
onSeriesStored(seriesStored);
}
};
private boolean clearCacheForReceivedSeries = false;
public WADOService() {
cache = WADOCacheImpl.getWADOCache();
}
/**
* @return Returns the clientRedirect.
*/
public boolean isClientRedirect() {
return cache.isClientRedirect();
}
/**
* @param clientRedirect
* The clientRedirect to set.
*/
public void setClientRedirect(boolean clientRedirect) {
cache.setClientRedirect(clientRedirect);
}
/**
* @return Returns the redirectCaching.
*/
public boolean isRedirectCaching() {
return cache.isRedirectCaching();
}
/**
* @param redirectCaching
* The redirectCaching to set.
*/
public void setRedirectCaching(boolean redirectCaching) {
cache.setRedirectCaching(redirectCaching);
}
public String getImageQuality() {
return cache.getImageQuality();
}
public void setImageQuality(String imageQuality) {
cache.setImageQuality(imageQuality);
}
public String getImageWriterClass() {
return cache.getImageWriterClass();
}
public void setImageWriterClass(String imageWriterClass) {
cache.setImageWriterClass(imageWriterClass);
}
public boolean isForceRGB() {
return support.isForceRGB();
}
public void setForceRGB(boolean forceRGB) {
support.setForceRGB(forceRGB);
}
/**
* @return Returns the useTransferSyntaxOfFileAsDefault.
*/
public boolean isUseTransferSyntaxOfFileAsDefault() {
return support.isUseTransferSyntaxOfFileAsDefault();
}
/**
* Set default transfer syntax option.
* <p>
* If true use the TS from file.<br>
* If false use Explicit VR littlle Endian (as defined in part 18)
*
* @param b
* If true use TS from file.
*/
public void setUseTransferSyntaxOfFileAsDefault(boolean b) {
support.setUseTransferSyntaxOfFileAsDefault(b);
}
public String getSrImageRows() {
String rows = support.getSrImageRows();
return rows == null ? NONE : support.getSrImageRows();
}
public void setSrImageRows(String srImageRows) {
support.setSrImageRows( NONE.equals(srImageRows) ? null : srImageRows );
}
/**
* Set URL to XSLT stylesheet that should be used to transform DICOM SR to
* HTML document.
*
* @return
*/
public String getHtmlXslURL() {
return support.getHtmlXslURL();
}
public void setHtmlXslURL(String htmlXslURL) {
support.setHtmlXslURL(htmlXslURL);
}
/**
* Set URL to XSLT stylesheet that should be used to transform DICOM SR to
* XHTML document.
*
* @return
*/
public String getXHtmlXslURL() {
return support.getXHtmlXslURL();
}
public void setXHtmlXslURL(String htmlXslURL) {
support.setXHtmlXslURL(htmlXslURL);
}
/**
* Set URL to XSLT stylesheet that should be used to transform DICOM SR to
* xml document.
*
* @return
*/
public String getXmlXslURL() {
return support.getXmlXslURL();
}
public void setXmlXslURL(String xslURL) {
support.setXmlXslURL(xslURL);
}
/**
* Set URL to XSLT stylesheet that should be used to transform DICOM SR to
* xml document.
*
* @return
*/
public String getDicomXslURL() {
return support.getDicomXslURL();
}
public void setDicomXslURL(String xslURL) {
support.setDicomXslURL(xslURL);
}
public void clearTemplateCache() {
support.clearTemplateCache();
}
public String getContentTypeDicomXML() {
return support.getContentTypeDicomXML();
}
public void setContentTypeDicomXML(String contentTypeDicomXML) {
support.setContentTypeDicomXML(contentTypeDicomXML);
}
public String getImageSopCuids() throws Exception{
return map2string(support.getImageSopCuids());
}
/**
* Returns a String with all defined SOP Class UIDs that are used to find
* text (SR) documents.
* <p>
* The uids are separated with line separator.
*
* @return SOP Class UIDs to find ECG related files.
*/
public String getTextSopCuids() {
Map uids = support.getTextSopCuids();
return map2string(uids);
}
/**
* Set a list of SOP Class UIDs that are used to find text (SR) documents.
* <p>
* The UIDs are separated with line separator.
*
* @param sopCuids
* String with SOP class UIDs separated with ';'
*/
public void setTextSopCuids(String sopCuids) {
support.setTextSopCuids(sopCuids);
}
/**
* Returns a String with all defined SOP Class UIDs that are used to support
* Video (mpeg2) DICOM objects.
* <p>
* The uids are separated with line separator.
*
* @return SOP Class UIDs to find ECG related files.
*/
public String getVideoSopCuids() {
Map uids = support.getVideoSopCuids();
return uids.isEmpty() ? WADOSupport.NONE : map2string(uids);
}
/**
* Set a list of SOP Class UIDs that are used to support
* Video (mpeg2) DICOM objects.
* <p>
* The UIDs are separated with line separator.
*
* @param sopCuids
* String with SOP class UIDs separated with ';'
*/
public void setVideoSopCuids(String sopCuids) {
support.setVideoSopCuids(sopCuids);
}
public String getEncapsulatedSopCuids() {
Map uids = support.getEncapsulatedSopCuids();
return uids.isEmpty() ? WADOSupport.NONE : map2string(uids);
}
public void setEncapsulatedSopCuids(String sopCuids) {
support.setEncapsulatedSopCuids(sopCuids);
}
private String map2string(Map map) {
if (map == null || map.isEmpty())
return "";
StringBuffer sb = new StringBuffer(map.size() << 5);// StringBuffer
// initial size:
// nrOfUIDs x 32
Iterator iter = map.keySet().iterator();
while (iter.hasNext()) {
sb.append(iter.next()).append(
System.getProperty("line.separator", "\n"));
}
return sb.toString();
}
public void setEnableClearCacheForReceivedSeries(boolean b) throws InstanceNotFoundException, ListenerNotFoundException {
if (b != clearCacheForReceivedSeries) {
clearCacheForReceivedSeries = b;
if (b) {
server.addNotificationListener(getStoreScpServiceName(),
seriesStoredListener, SeriesStored.NOTIF_FILTER, null);
} else {
server.removeNotificationListener(getStoreScpServiceName(),
seriesStoredListener, SeriesStored.NOTIF_FILTER, null);
}
}
}
public boolean isEnableClearCacheForReceivedSeries() {
return clearCacheForReceivedSeries;
}
/**
* Getter for the name of the StoreScp Service Name.
* <p>
* This bean is used to get list of Image SOP Classs UID.
*
* @return Name of the MBean
*/
public ObjectName getStoreScpServiceName() {
return support.getStoreScpServiceName();
}
public void setStoreScpServiceName(ObjectName name) {
support.setStoreScpServiceName(name);
}
/**
* Set the name of the AuditLogger MBean.
* <p>
* This bean is used to create Audit Logs.
*
* @param name
* The Audit Logger Name to set.
*/
public void setAuditLoggerName(ObjectName name) {
support.setAuditLoggerName(name);
}
/**
* Get the name of the AuditLogger MBean.
* <p>
* This bean is used to create Audit Logs.
*
* @return Returns the name of the Audit Logger MBean.
*/
public ObjectName getAuditLoggerName() {
return support.getAuditLoggerName();
}
public ObjectName getQueryRetrieveScpName() {
return support.getQueryRetrieveScpName();
}
public void setQueryRetrieveScpName(ObjectName name) {
support.setQueryRetrieveScpName(name);
}
public String getDisabledAuditLogHosts() {
Set s = support.getDisabledAuditLogHosts();
if (s == null)
return "ALL";
if (s.isEmpty())
return NONE;
StringBuffer sb = new StringBuffer(s.size() << 4);
for (Iterator it = s.iterator(); it.hasNext();) {
sb.append(it.next()).append(
System.getProperty("line.separator", "\n"));
}
return sb.toString();
}
public void setDisabledAuditLogHosts(String disabledAuditLogHosts) {
if ("ALL".equals(disabledAuditLogHosts)) {
support.setDisabledAuditLogHosts(null);
} else {
Set disabledHosts = new HashSet();
if (!NONE.equals(disabledAuditLogHosts)) {
StringTokenizer st = new StringTokenizer(disabledAuditLogHosts,
"\r\n;");
while (st.hasMoreTokens()) {
disabledHosts.add(st.nextElement());
}
}
support.setDisabledAuditLogHosts(disabledHosts);
}
}
public boolean isDisableDNS() {
return support.isDisableDNS();
}
/**
* @param disableDNS
* the disableDNS to set
*/
public void setDisableDNS(boolean disableDNS) {
support.setDisableDNS(disableDNS);
}
public boolean isDisableCache() {
return support.isDisableCache();
}
public void setDisableCache(boolean disableCache) {
support.setDisableCache(disableCache);
}
/**
* Get the requested DICOM object as File packed in a WADOResponseObject.
* <p>
*
* @param reqVO
* The request parameters packed in an value object.
*
* @return The value object containing the retrieved object or an error.
* @throws Exception
*/
public WADOResponseObject getWADOObject(WADORequestObject reqVO) throws Exception {
long t1 = System.currentTimeMillis();
WADOResponseObject resp = support.getWADOObject(reqVO);
if (support.isAuditLogEnabled(reqVO)) {
if (support.isAuditLogIHEYr4() && resp.getPatInfo() != null) {
support.logInstancesSent(reqVO, resp);
} else {
log.debug("Suppress (IHEYr4) audit log! No patient info available!");
}
logExport(reqVO, resp);
} else {
log.debug("Suppress audit log! Disabled for host:"
+ reqVO.getRemoteHost());
}
long t2 = System.currentTimeMillis();
log.debug("getWADOObject(): " + (t2 - t1) + "ms");
return resp;
}
private void logExport(WADORequestObject reqObj, WADOResponseObject resp) {
if (support.isAuditLogIHEYr4())
return;
try {
HttpUserInfo userInfo = new HttpUserInfo(reqObj.getRequest(),
AuditMessage.isEnableDNSLookups());
String user = userInfo.getUserId();
String destHost = userInfo.getHostName();
InstancesTransferredMessage msg = new InstancesTransferredMessage(
InstancesTransferredMessage.EXECUTE);
msg.setOutcomeIndicator(resp.getReturnCode() == HttpServletResponse.SC_OK ? AuditEvent.OutcomeIndicator.SUCCESS
: AuditEvent.OutcomeIndicator.MINOR_FAILURE);
msg.addSourceProcess(AuditMessage.getProcessID(), AuditMessage
.getLocalAETitles(), AuditMessage.getProcessName(),
AuditMessage.getLocalHostName(), false);
ParticipantObject obj = new ParticipantObject(reqObj.getRequest()
.getRequestURL().toString(), IDTypeCode.URI);
obj.setParticipantObjectTypeCode(TypeCode.SYSTEM);
obj.setParticipantObjectTypeCodeRole(TypeCodeRole.DATA_REPOSITORY);
msg.addParticipantObject(obj);
msg.addDestinationProcess(destHost, null, null, destHost,
user == null);
if (user != null) {
ActiveParticipant ap = ActiveParticipant.createActivePerson(
user, null, user, null, true);
msg.addActiveParticipant(ap);
}
Dataset ds = resp.getPatInfo();
if (ds != null) {
msg.addPatient(ds.getString(Tags.PatientID), ds
.getString(Tags.PatientName));
ParticipantObjectDescription desc = new ParticipantObjectDescription();
SOPClass sopClass = new SOPClass(ds.getString(Tags.SOPClassUID));
sopClass.setNumberOfInstances(1);
desc.addSOPClass(sopClass);
msg.addStudy(ds.getString(Tags.StudyInstanceUID), desc);
} else {
msg.addPatient("unknown_patid", "unknown_pn");
msg.addStudy(reqObj.getStudyUID(), null);
}
msg.validate();
Logger.getLogger("auditlog").info(msg);
} catch (Exception e) {
log.warn("Audit Log failed:", e);
}
}
private void onSeriesStored(SeriesStored seriesStored) {
Dataset ian = seriesStored.getIAN();
String studyIUID = ian.getString(Tags.StudyInstanceUID);
log.info("SeriesStored! remove cached entries for seriesStored:"+seriesStored);
cache.purgeStudy(studyIUID);
}
public boolean isRenderOverlays() {
return support.isRenderOverlays();
}
public void setRenderOverlays(boolean b) {
support.setRenderOverlays(b);
}
}
| |
/* Generic definitions */
/* Assertions (useful to generate conditional code) */
/* Current type and class (and size, if applicable) */
/* Value methods */
/* Interfaces (keys) */
/* Interfaces (values) */
/* Abstract implementations (keys) */
/* Abstract implementations (values) */
/* Static containers (keys) */
/* Static containers (values) */
/* Implementations */
/* Synchronized wrappers */
/* Unmodifiable wrappers */
/* Other wrappers */
/* Methods (keys) */
/* Methods (values) */
/* Methods (keys/values) */
/* Methods that have special names depending on keys (but the special names depend on values) */
/* Equality */
/* Object/Reference-only definitions (keys) */
/* Primitive-type-only definitions (keys) */
/* Object/Reference-only definitions (values) */
/*
* Copyright (C) 2002-2013 Sebastiano Vigna
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.unimi.dsi.fastutil.doubles;
import it.unimi.dsi.fastutil.objects.ObjectSet;
import it.unimi.dsi.fastutil.objects.ObjectSets;
import it.unimi.dsi.fastutil.objects.ReferenceCollection;
import it.unimi.dsi.fastutil.objects.ReferenceCollections;
import it.unimi.dsi.fastutil.objects.ReferenceSets;
import java.util.Map;
/** A class providing static methods and objects that do useful things with type-specific maps.
*
* @see it.unimi.dsi.fastutil.Maps
* @see java.util.Collections
*/
public class Double2ReferenceMaps {
private Double2ReferenceMaps() {}
/** An immutable class representing an empty type-specific map.
*
* <P>This class may be useful to implement your own in case you subclass
* a type-specific map.
*/
public static class EmptyMap <V> extends Double2ReferenceFunctions.EmptyFunction <V> implements Double2ReferenceMap <V>, java.io.Serializable, Cloneable {
private static final long serialVersionUID = -7046029254386353129L;
protected EmptyMap() {}
public boolean containsValue( final Object v ) { return false; }
public void putAll( final Map<? extends Double, ? extends V> m ) { throw new UnsupportedOperationException(); }
@SuppressWarnings("unchecked")
public ObjectSet<Double2ReferenceMap.Entry <V> > double2ReferenceEntrySet() { return ObjectSets.EMPTY_SET; }
@SuppressWarnings("unchecked")
public DoubleSet keySet() { return DoubleSets.EMPTY_SET; }
@SuppressWarnings("unchecked")
public ReferenceCollection <V> values() { return ReferenceSets.EMPTY_SET; }
private Object readResolve() { return EMPTY_MAP; }
public Object clone() { return EMPTY_MAP; }
public boolean isEmpty() { return true; }
@SuppressWarnings({ "rawtypes", "unchecked" })
public ObjectSet<Map.Entry<Double, V>> entrySet() { return (ObjectSet)double2ReferenceEntrySet(); }
public int hashCode() { return 0; }
public boolean equals( final Object o ) {
if ( ! ( o instanceof Map ) ) return false;
return ((Map<?,?>)o).isEmpty();
}
public String toString() { return "{}"; }
}
/** An empty type-specific map (immutable). It is serializable and cloneable. */
@SuppressWarnings("rawtypes")
public static final EmptyMap EMPTY_MAP = new EmptyMap();
/** An immutable class representing a type-specific singleton map.
*
* <P>This class may be useful to implement your own in case you subclass
* a type-specific map.
*/
public static class Singleton <V> extends Double2ReferenceFunctions.Singleton <V> implements Double2ReferenceMap <V>, java.io.Serializable, Cloneable {
private static final long serialVersionUID = -7046029254386353129L;
protected transient volatile ObjectSet<Double2ReferenceMap.Entry <V> > entries;
protected transient volatile DoubleSet keys;
protected transient volatile ReferenceCollection <V> values;
protected Singleton( final double key, final V value ) {
super( key, value );
}
public boolean containsValue( final Object v ) { return ( (value) == (v) ); }
public void putAll( final Map<? extends Double, ? extends V> m ) { throw new UnsupportedOperationException(); }
public ObjectSet<Double2ReferenceMap.Entry <V> > double2ReferenceEntrySet() { if ( entries == null ) entries = ObjectSets.singleton( (Double2ReferenceMap.Entry <V>)new SingletonEntry() ); return entries; }
public DoubleSet keySet() { if ( keys == null ) keys = DoubleSets.singleton( key ); return keys; }
public ReferenceCollection <V> values() { if ( values == null ) values = ReferenceSets.singleton( value ); return values; }
protected class SingletonEntry implements Double2ReferenceMap.Entry <V>, Map.Entry<Double,V> {
public Double getKey() { return (Double.valueOf(Singleton.this.key)); }
public V getValue() { return (Singleton.this.value); }
public double getDoubleKey() { return Singleton.this.key; }
public V setValue( final V value ) { throw new UnsupportedOperationException(); }
public boolean equals( final Object o ) {
if (!(o instanceof Map.Entry)) return false;
Map.Entry<?,?> e = (Map.Entry<?,?>)o;
return ( (Singleton.this.key) == (((((Double)(e.getKey())).doubleValue()))) ) && ( (Singleton.this.value) == ((e.getValue())) );
}
public int hashCode() { return it.unimi.dsi.fastutil.HashCommon.double2int(Singleton.this.key) ^ ( (Singleton.this.value) == null ? 0 : System.identityHashCode(Singleton.this.value) ); }
public String toString() { return Singleton.this.key + "->" + Singleton.this.value; }
}
public boolean isEmpty() { return false; }
@SuppressWarnings({ "rawtypes", "unchecked" })
public ObjectSet<Map.Entry<Double, V>> entrySet() { return (ObjectSet)double2ReferenceEntrySet(); }
public int hashCode() { return it.unimi.dsi.fastutil.HashCommon.double2int(key) ^ ( (value) == null ? 0 : System.identityHashCode(value) ); }
public boolean equals( final Object o ) {
if ( o == this ) return true;
if ( ! ( o instanceof Map ) ) return false;
Map<?,?> m = (Map<?,?>)o;
if ( m.size() != 1 ) return false;
return entrySet().iterator().next().equals( m.entrySet().iterator().next() );
}
public String toString() { return "{" + key + "=>" + value + "}"; }
}
/** Returns a type-specific immutable map containing only the specified pair. The returned map is serializable and cloneable.
*
* <P>Note that albeit the returned map is immutable, its default return value may be changed.
*
* @param key the only key of the returned map.
* @param value the only value of the returned map.
* @return a type-specific immutable map containing just the pair <code><key,value></code>.
*/
public static <V> Double2ReferenceMap <V> singleton( final double key, V value ) {
return new Singleton <V>( key, value );
}
/** Returns a type-specific immutable map containing only the specified pair. The returned map is serializable and cloneable.
*
* <P>Note that albeit the returned map is immutable, its default return value may be changed.
*
* @param key the only key of the returned map.
* @param value the only value of the returned map.
* @return a type-specific immutable map containing just the pair <code><key,value></code>.
*/
public static <V> Double2ReferenceMap <V> singleton( final Double key, final V value ) {
return new Singleton <V>( ((key).doubleValue()), (value) );
}
/** A synchronized wrapper class for maps. */
public static class SynchronizedMap <V> extends Double2ReferenceFunctions.SynchronizedFunction <V> implements Double2ReferenceMap <V>, java.io.Serializable {
private static final long serialVersionUID = -7046029254386353129L;
protected final Double2ReferenceMap <V> map;
protected transient volatile ObjectSet<Double2ReferenceMap.Entry <V> > entries;
protected transient volatile DoubleSet keys;
protected transient volatile ReferenceCollection <V> values;
protected SynchronizedMap( final Double2ReferenceMap <V> m, final Object sync ) {
super( m, sync );
this.map = m;
}
protected SynchronizedMap( final Double2ReferenceMap <V> m ) {
super( m );
this.map = m;
}
public int size() { synchronized( sync ) { return map.size(); } }
public boolean containsKey( final double k ) { synchronized( sync ) { return map.containsKey( k ); } }
public boolean containsValue( final Object v ) { synchronized( sync ) { return map.containsValue( v ); } }
public V defaultReturnValue() { synchronized( sync ) { return map.defaultReturnValue(); } }
public void defaultReturnValue( final V defRetValue ) { synchronized( sync ) { map.defaultReturnValue( defRetValue ); } }
public V put( final double k, final V v ) { synchronized( sync ) { return map.put( k, v ); } }
//public void putAll( final MAP KEY_VALUE_EXTENDS_GENERIC c ) { synchronized( sync ) { map.putAll( c ); } }
public void putAll( final Map<? extends Double, ? extends V> m ) { synchronized( sync ) { map.putAll( m ); } }
public ObjectSet<Double2ReferenceMap.Entry <V> > double2ReferenceEntrySet() { if ( entries == null ) entries = ObjectSets.synchronize( map.double2ReferenceEntrySet(), sync ); return entries; }
public DoubleSet keySet() { if ( keys == null ) keys = DoubleSets.synchronize( map.keySet(), sync ); return keys; }
public ReferenceCollection <V> values() { if ( values == null ) return ReferenceCollections.synchronize( map.values(), sync ); return values; }
public void clear() { synchronized( sync ) { map.clear(); } }
public String toString() { synchronized( sync ) { return map.toString(); } }
public V put( final Double k, final V v ) { synchronized( sync ) { return map.put( k, v ); } }
public V remove( final double k ) { synchronized( sync ) { return map.remove( k ); } }
public V get( final double k ) { synchronized( sync ) { return map.get( k ); } }
public boolean containsKey( final Object ok ) { synchronized( sync ) { return map.containsKey( ok ); } }
public boolean isEmpty() { synchronized( sync ) { return map.isEmpty(); } }
public ObjectSet<Map.Entry<Double, V>> entrySet() { synchronized( sync ) { return map.entrySet(); } }
public int hashCode() { synchronized( sync ) { return map.hashCode(); } }
public boolean equals( final Object o ) { synchronized( sync ) { return map.equals( o ); } }
}
/** Returns a synchronized type-specific map backed by the given type-specific map.
*
* @param m the map to be wrapped in a synchronized map.
* @return a synchronized view of the specified map.
* @see java.util.Collections#synchronizedMap(Map)
*/
public static <V> Double2ReferenceMap <V> synchronize( final Double2ReferenceMap <V> m ) { return new SynchronizedMap <V>( m ); }
/** Returns a synchronized type-specific map backed by the given type-specific map, using an assigned object to synchronize.
*
* @param m the map to be wrapped in a synchronized map.
* @param sync an object that will be used to synchronize the access to the map.
* @return a synchronized view of the specified map.
* @see java.util.Collections#synchronizedMap(Map)
*/
public static <V> Double2ReferenceMap <V> synchronize( final Double2ReferenceMap <V> m, final Object sync ) { return new SynchronizedMap <V>( m, sync ); }
/** An unmodifiable wrapper class for maps. */
public static class UnmodifiableMap <V> extends Double2ReferenceFunctions.UnmodifiableFunction <V> implements Double2ReferenceMap <V>, java.io.Serializable {
private static final long serialVersionUID = -7046029254386353129L;
protected final Double2ReferenceMap <V> map;
protected transient volatile ObjectSet<Double2ReferenceMap.Entry <V> > entries;
protected transient volatile DoubleSet keys;
protected transient volatile ReferenceCollection <V> values;
protected UnmodifiableMap( final Double2ReferenceMap <V> m ) {
super( m );
this.map = m;
}
public int size() { return map.size(); }
public boolean containsKey( final double k ) { return map.containsKey( k ); }
public boolean containsValue( final Object v ) { return map.containsValue( v ); }
public V defaultReturnValue() { throw new UnsupportedOperationException(); }
public void defaultReturnValue( final V defRetValue ) { throw new UnsupportedOperationException(); }
public V put( final double k, final V v ) { throw new UnsupportedOperationException(); }
//public void putAll( final MAP KEY_VALUE_EXTENDS_GENERIC c ) { throw new UnsupportedOperationException(); }
public void putAll( final Map<? extends Double, ? extends V> m ) { throw new UnsupportedOperationException(); }
public ObjectSet<Double2ReferenceMap.Entry <V> > double2ReferenceEntrySet() { if ( entries == null ) entries = ObjectSets.unmodifiable( map.double2ReferenceEntrySet() ); return entries; }
public DoubleSet keySet() { if ( keys == null ) keys = DoubleSets.unmodifiable( map.keySet() ); return keys; }
public ReferenceCollection <V> values() { if ( values == null ) return ReferenceCollections.unmodifiable( map.values() ); return values; }
public void clear() { throw new UnsupportedOperationException(); }
public String toString() { return map.toString(); }
public V remove( final double k ) { throw new UnsupportedOperationException(); }
public V get( final double k ) { return map.get( k ); }
public boolean containsKey( final Object ok ) { return map.containsKey( ok ); }
public V remove( final Object k ) { throw new UnsupportedOperationException(); }
public V get( final Object k ) { return map.get( k ); }
public boolean isEmpty() { return map.isEmpty(); }
public ObjectSet<Map.Entry<Double, V>> entrySet() { return ObjectSets.unmodifiable( map.entrySet() ); }
}
/** Returns an unmodifiable type-specific map backed by the given type-specific map.
*
* @param m the map to be wrapped in an unmodifiable map.
* @return an unmodifiable view of the specified map.
* @see java.util.Collections#unmodifiableMap(Map)
*/
public static <V> Double2ReferenceMap <V> unmodifiable( final Double2ReferenceMap <V> m ) { return new UnmodifiableMap <V>( m ); }
}
| |
// Generated from /home/simon/PhD/src/jim/jim/src/uk/ac/open/crc/jim/parser/java17/Java.g4 by ANTLR 4.1
package uk.ac.open.crc.jim.parser.java17;
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.tree.ParseTreeVisitor;
/**
* This interface defines a complete generic visitor for a parse tree produced
* by {@link JavaParser}.
*
* @param <T> The return type of the visit operation. Use {@link Void} for
* operations with no return type.
*/
public interface JavaVisitor<T> extends ParseTreeVisitor<T> {
/**
* Visit a parse tree produced by {@link JavaParser#innerCreator}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitInnerCreator(@NotNull JavaParser.InnerCreatorContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#genericMethodDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitGenericMethodDeclaration(@NotNull JavaParser.GenericMethodDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#expressionList}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExpressionList(@NotNull JavaParser.ExpressionListContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#typeDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTypeDeclaration(@NotNull JavaParser.TypeDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#forUpdate}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitForUpdate(@NotNull JavaParser.ForUpdateContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotation}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotation(@NotNull JavaParser.AnnotationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#enumConstant}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitEnumConstant(@NotNull JavaParser.EnumConstantContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#importDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitImportDeclaration(@NotNull JavaParser.ImportDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotationMethodOrConstantRest}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotationMethodOrConstantRest(@NotNull JavaParser.AnnotationMethodOrConstantRestContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#enumConstantName}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitEnumConstantName(@NotNull JavaParser.EnumConstantNameContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#finallyBlock}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFinallyBlock(@NotNull JavaParser.FinallyBlockContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#variableDeclarators}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitVariableDeclarators(@NotNull JavaParser.VariableDeclaratorsContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#elementValuePairs}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitElementValuePairs(@NotNull JavaParser.ElementValuePairsContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#interfaceMethodDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitInterfaceMethodDeclaration(@NotNull JavaParser.InterfaceMethodDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#interfaceBodyDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitInterfaceBodyDeclaration(@NotNull JavaParser.InterfaceBodyDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#enumConstants}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitEnumConstants(@NotNull JavaParser.EnumConstantsContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#catchClause}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCatchClause(@NotNull JavaParser.CatchClauseContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#constantExpression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitConstantExpression(@NotNull JavaParser.ConstantExpressionContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#enumDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitEnumDeclaration(@NotNull JavaParser.EnumDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#explicitGenericInvocationSuffix}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExplicitGenericInvocationSuffix(@NotNull JavaParser.ExplicitGenericInvocationSuffixContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#typeParameter}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTypeParameter(@NotNull JavaParser.TypeParameterContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#enumBodyDeclarations}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitEnumBodyDeclarations(@NotNull JavaParser.EnumBodyDeclarationsContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#typeBound}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTypeBound(@NotNull JavaParser.TypeBoundContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#statementExpression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitStatementExpression(@NotNull JavaParser.StatementExpressionContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#variableInitializer}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitVariableInitializer(@NotNull JavaParser.VariableInitializerContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#block}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitBlock(@NotNull JavaParser.BlockContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#genericInterfaceMethodDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitGenericInterfaceMethodDeclaration(@NotNull JavaParser.GenericInterfaceMethodDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#localVariableDeclarationStatement}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLocalVariableDeclarationStatement(@NotNull JavaParser.LocalVariableDeclarationStatementContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#superSuffix}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSuperSuffix(@NotNull JavaParser.SuperSuffixContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#fieldDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFieldDeclaration(@NotNull JavaParser.FieldDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#formalParameterList}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFormalParameterList(@NotNull JavaParser.FormalParameterListContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#explicitGenericInvocation}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExplicitGenericInvocation(@NotNull JavaParser.ExplicitGenericInvocationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#parExpression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitParExpression(@NotNull JavaParser.ParExpressionContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#switchLabel}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSwitchLabel(@NotNull JavaParser.SwitchLabelContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#typeParameters}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTypeParameters(@NotNull JavaParser.TypeParametersContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#qualifiedName}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitQualifiedName(@NotNull JavaParser.QualifiedNameContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#classDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitClassDeclaration(@NotNull JavaParser.ClassDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotationConstantRest}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotationConstantRest(@NotNull JavaParser.AnnotationConstantRestContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#typeName}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTypeName(@NotNull JavaParser.TypeNameContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#arguments}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitArguments(@NotNull JavaParser.ArgumentsContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#constructorBody}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitConstructorBody(@NotNull JavaParser.ConstructorBodyContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#formalParameters}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFormalParameters(@NotNull JavaParser.FormalParametersContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#typeArgument}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTypeArgument(@NotNull JavaParser.TypeArgumentContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#forInit}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitForInit(@NotNull JavaParser.ForInitContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#variableDeclarator}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitVariableDeclarator(@NotNull JavaParser.VariableDeclaratorContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotationTypeDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotationTypeDeclaration(@NotNull JavaParser.AnnotationTypeDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExpression(@NotNull JavaParser.ExpressionContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#resources}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitResources(@NotNull JavaParser.ResourcesContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#formalParameter}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFormalParameter(@NotNull JavaParser.FormalParameterContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#type}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitType(@NotNull JavaParser.TypeContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#elementValueArrayInitializer}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitElementValueArrayInitializer(@NotNull JavaParser.ElementValueArrayInitializerContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotationName}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotationName(@NotNull JavaParser.AnnotationNameContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#enhancedForControl}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitEnhancedForControl(@NotNull JavaParser.EnhancedForControlContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotationMethodRest}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotationMethodRest(@NotNull JavaParser.AnnotationMethodRestContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#primary}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitPrimary(@NotNull JavaParser.PrimaryContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#classBody}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitClassBody(@NotNull JavaParser.ClassBodyContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#classOrInterfaceModifier}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitClassOrInterfaceModifier(@NotNull JavaParser.ClassOrInterfaceModifierContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#defaultValue}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitDefaultValue(@NotNull JavaParser.DefaultValueContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#variableModifier}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitVariableModifier(@NotNull JavaParser.VariableModifierContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#constDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitConstDeclaration(@NotNull JavaParser.ConstDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#createdName}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCreatedName(@NotNull JavaParser.CreatedNameContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#interfaceDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitInterfaceDeclaration(@NotNull JavaParser.InterfaceDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#packageDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitPackageDeclaration(@NotNull JavaParser.PackageDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#constantDeclarator}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitConstantDeclarator(@NotNull JavaParser.ConstantDeclaratorContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#catchType}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCatchType(@NotNull JavaParser.CatchTypeContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#typeArguments}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTypeArguments(@NotNull JavaParser.TypeArgumentsContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#classCreatorRest}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitClassCreatorRest(@NotNull JavaParser.ClassCreatorRestContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#modifier}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitModifier(@NotNull JavaParser.ModifierContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#statement}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitStatement(@NotNull JavaParser.StatementContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#interfaceBody}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitInterfaceBody(@NotNull JavaParser.InterfaceBodyContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#packageOrTypeName}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitPackageOrTypeName(@NotNull JavaParser.PackageOrTypeNameContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#classBodyDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitClassBodyDeclaration(@NotNull JavaParser.ClassBodyDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#lastFormalParameter}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLastFormalParameter(@NotNull JavaParser.LastFormalParameterContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#forControl}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitForControl(@NotNull JavaParser.ForControlContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#typeList}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTypeList(@NotNull JavaParser.TypeListContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#localVariableDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLocalVariableDeclaration(@NotNull JavaParser.LocalVariableDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#variableDeclaratorId}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitVariableDeclaratorId(@NotNull JavaParser.VariableDeclaratorIdContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#compilationUnit}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCompilationUnit(@NotNull JavaParser.CompilationUnitContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#elementValue}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitElementValue(@NotNull JavaParser.ElementValueContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#classOrInterfaceType}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitClassOrInterfaceType(@NotNull JavaParser.ClassOrInterfaceTypeContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#typeArgumentsOrDiamond}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTypeArgumentsOrDiamond(@NotNull JavaParser.TypeArgumentsOrDiamondContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotationTypeElementDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotationTypeElementDeclaration(@NotNull JavaParser.AnnotationTypeElementDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#blockStatement}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitBlockStatement(@NotNull JavaParser.BlockStatementContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotationTypeBody}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotationTypeBody(@NotNull JavaParser.AnnotationTypeBodyContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#qualifiedNameList}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitQualifiedNameList(@NotNull JavaParser.QualifiedNameListContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#creator}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCreator(@NotNull JavaParser.CreatorContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#memberDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitMemberDeclaration(@NotNull JavaParser.MemberDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#methodDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitMethodDeclaration(@NotNull JavaParser.MethodDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotationTypeElementRest}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotationTypeElementRest(@NotNull JavaParser.AnnotationTypeElementRestContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#resourceSpecification}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitResourceSpecification(@NotNull JavaParser.ResourceSpecificationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#constructorDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitConstructorDeclaration(@NotNull JavaParser.ConstructorDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#resource}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitResource(@NotNull JavaParser.ResourceContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#elementValuePair}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitElementValuePair(@NotNull JavaParser.ElementValuePairContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#methodBody}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitMethodBody(@NotNull JavaParser.MethodBodyContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#arrayInitializer}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitArrayInitializer(@NotNull JavaParser.ArrayInitializerContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#nonWildcardTypeArgumentsOrDiamond}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitNonWildcardTypeArgumentsOrDiamond(@NotNull JavaParser.NonWildcardTypeArgumentsOrDiamondContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#primitiveType}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitPrimitiveType(@NotNull JavaParser.PrimitiveTypeContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#nonWildcardTypeArguments}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitNonWildcardTypeArguments(@NotNull JavaParser.NonWildcardTypeArgumentsContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#arrayCreatorRest}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitArrayCreatorRest(@NotNull JavaParser.ArrayCreatorRestContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#interfaceMemberDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitInterfaceMemberDeclaration(@NotNull JavaParser.InterfaceMemberDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#genericConstructorDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitGenericConstructorDeclaration(@NotNull JavaParser.GenericConstructorDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#literal}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLiteral(@NotNull JavaParser.LiteralContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#switchBlockStatementGroup}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSwitchBlockStatementGroup(@NotNull JavaParser.SwitchBlockStatementGroupContext ctx);
}
| |
package org.basex.util.ft;
import java.util.*;
import org.basex.util.*;
/**
* German stemming algorithm, derived from the Apache Lucene project and the report
* "Development of a Stemmer for the Greek Language" by Georgios Ntais.
*
* @author BaseX Team 2005-22, BSD License
* @author Christian Gruen
*/
final class GreekStemmer extends InternalStemmer {
/**
* Constructor.
* @param fti full-text iterator
*/
GreekStemmer(final FTIterator fti) {
super(fti);
}
@Override
GreekStemmer get(final Language lang, final FTIterator fti) {
return new GreekStemmer(fti);
}
@Override
Collection<Language> languages() {
return collection("el");
}
@Override
protected byte[] stem(final byte[] word) {
int ln = 0;
final int wl = word.length;
final char[] s = new char[wl];
for(int i = 0; i < wl; i += Token.cl(word, i)) {
s[ln++] = (char) Token.cp(word, i);
}
if(ln < 4) return word;
final int olen = ln;
// "short rules": if it hits one of these, it skips the "long list"
int l = rule0(s, ln);
l = rule1(s, l);
l = rule2(s, l);
l = rule3(s, l);
l = rule4(s, l);
l = rule5(s, l);
l = rule6(s, l);
l = rule7(s, l);
l = rule8(s, l);
l = rule9(s, l);
l = rule10(s, l);
l = rule11(s, l);
l = rule12(s, l);
l = rule13(s, l);
l = rule14(s, l);
l = rule15(s, l);
l = rule16(s, l);
l = rule17(s, l);
l = rule18(s, l);
l = rule19(s, l);
l = rule20(s, l);
if(l == olen) l = rule21(s, l);
// "long list"
l = rule22(s, l);
final TokenBuilder tb = new TokenBuilder(l << 1);
for(int i = 0; i < l; i++) tb.add(s[i]);
return tb.finish();
}
/**
* Applies rule 0.
* @param s characters
* @param l length
* @return new length
*/
private static int rule0(final char[] s, final int l) {
if(l > 9 && (
e(s, l, "\u03ba\u03b1\u03b8\u03b5\u03c3\u03c4\u03c9\u03c4\u03bf\u03c3") ||
e(s, l, "\u03ba\u03b1\u03b8\u03b5\u03c3\u03c4\u03c9\u03c4\u03c9\u03bd")))
return l - 4;
if(l > 8 && (
e(s, l, "\u03b3\u03b5\u03b3\u03bf\u03bd\u03bf\u03c4\u03bf\u03c3") ||
e(s, l, "\u03b3\u03b5\u03b3\u03bf\u03bd\u03bf\u03c4\u03c9\u03bd")))
return l - 4;
if(l > 8 &&
e(s, l, "\u03ba\u03b1\u03b8\u03b5\u03c3\u03c4\u03c9\u03c4\u03b1"))
return l - 3;
if(l > 7 && (
e(s, l, "\u03c4\u03b1\u03c4\u03bf\u03b3\u03b9\u03bf\u03c5") ||
e(s, l, "\u03c4\u03b1\u03c4\u03bf\u03b3\u03b9\u03c9\u03bd")))
return l - 4;
if(l > 7 &&
e(s, l, "\u03b3\u03b5\u03b3\u03bf\u03bd\u03bf\u03c4\u03b1"))
return l - 3;
if(l > 7 &&
e(s, l, "\u03ba\u03b1\u03b8\u03b5\u03c3\u03c4\u03c9\u03c3"))
return l - 2;
if(l > 6 &&
e(s, l, "\u03c3\u03ba\u03b1\u03b3\u03b9\u03bf\u03c5") ||
e(s, l, "\u03c3\u03ba\u03b1\u03b3\u03b9\u03c9\u03bd") ||
e(s, l, "\u03bf\u03bb\u03bf\u03b3\u03b9\u03bf\u03c5") ||
e(s, l, "\u03bf\u03bb\u03bf\u03b3\u03b9\u03c9\u03bd") ||
e(s, l, "\u03ba\u03c1\u03b5\u03b1\u03c4\u03bf\u03c3") ||
e(s, l, "\u03ba\u03c1\u03b5\u03b1\u03c4\u03c9\u03bd") ||
e(s, l, "\u03c0\u03b5\u03c1\u03b1\u03c4\u03bf\u03c3") ||
e(s, l, "\u03c0\u03b5\u03c1\u03b1\u03c4\u03c9\u03bd") ||
e(s, l, "\u03c4\u03b5\u03c1\u03b1\u03c4\u03bf\u03c3") ||
e(s, l, "\u03c4\u03b5\u03c1\u03b1\u03c4\u03c9\u03bd"))
return l - 4;
if(l > 6 &&
e(s, l, "\u03c4\u03b1\u03c4\u03bf\u03b3\u03b9\u03b1"))
return l - 3;
if(l > 6 &&
e(s, l, "\u03b3\u03b5\u03b3\u03bf\u03bd\u03bf\u03c3"))
return l - 2;
if(l > 5 && (
e(s, l, "\u03c6\u03b1\u03b3\u03b9\u03bf\u03c5") ||
e(s, l, "\u03c6\u03b1\u03b3\u03b9\u03c9\u03bd") ||
e(s, l, "\u03c3\u03bf\u03b3\u03b9\u03bf\u03c5") ||
e(s, l, "\u03c3\u03bf\u03b3\u03b9\u03c9\u03bd")))
return l - 4;
if(l > 5 && (
e(s, l, "\u03c3\u03ba\u03b1\u03b3\u03b9\u03b1") ||
e(s, l, "\u03bf\u03bb\u03bf\u03b3\u03b9\u03b1") ||
e(s, l, "\u03ba\u03c1\u03b5\u03b1\u03c4\u03b1") ||
e(s, l, "\u03c0\u03b5\u03c1\u03b1\u03c4\u03b1") ||
e(s, l, "\u03c4\u03b5\u03c1\u03b1\u03c4\u03b1")))
return l - 3;
if(l > 4 && (
e(s, l, "\u03c6\u03b1\u03b3\u03b9\u03b1") ||
e(s, l, "\u03c3\u03bf\u03b3\u03b9\u03b1") ||
e(s, l, "\u03c6\u03c9\u03c4\u03bf\u03c3") ||
e(s, l, "\u03c6\u03c9\u03c4\u03c9\u03bd")))
return l - 3;
if(l > 4 && (
e(s, l, "\u03ba\u03c1\u03b5\u03b1\u03c3") ||
e(s, l, "\u03c0\u03b5\u03c1\u03b1\u03c3") ||
e(s, l, "\u03c4\u03b5\u03c1\u03b1\u03c3")))
return l - 2;
if(l > 3 &&
e(s, l, "\u03c6\u03c9\u03c4\u03b1"))
return l - 2;
if(l > 2 &&
e(s, l, "\u03c6\u03c9\u03c3"))
return l - 1;
return l;
}
/**
* Applies rule 1.
* @param s characters
* @param l length
* @return new length
*/
private static int rule1(final char[] s, final int l) {
int len = l;
if(len > 4 && (
e(s, len, "\u03b1\u03b4\u03b5\u03c3") ||
e(s, len, "\u03b1\u03b4\u03c9\u03bd"))) {
len -= 4;
if(!(e(s, len, "\u03bf\u03ba") ||
e(s, len, "\u03bc\u03b1\u03bc") ||
e(s, len, "\u03bc\u03b1\u03bd") ||
e(s, len, "\u03bc\u03c0\u03b1\u03bc\u03c0") ||
e(s, len, "\u03c0\u03b1\u03c4\u03b5\u03c1") ||
e(s, len, "\u03b3\u03b9\u03b1\u03b3\u03b9") ||
e(s, len, "\u03bd\u03c4\u03b1\u03bd\u03c4") ||
e(s, len, "\u03ba\u03c5\u03c1") ||
e(s, len, "\u03b8\u03b5\u03b9") ||
e(s, len, "\u03c0\u03b5\u03b8\u03b5\u03c1")))
len += 2; // add back -\u03b1\u03b4
}
return len;
}
/**
* Applies rule 2.
* @param s characters
* @param l length
* @return new length
*/
private static int rule2(final char[] s, final int l) {
int len = l;
if(len > 4 && (
e(s, len, "\u03b5\u03b4\u03b5\u03c3") ||
e(s, len, "\u03b5\u03b4\u03c9\u03bd"))) {
len -= 4;
if(e(s, len, "\u03bf\u03c0") ||
e(s, len, "\u03b9\u03c0") ||
e(s, len, "\u03b5\u03bc\u03c0") ||
e(s, len, "\u03c5\u03c0") ||
e(s, len, "\u03b3\u03b7\u03c0") ||
e(s, len, "\u03b4\u03b1\u03c0") ||
e(s, len, "\u03ba\u03c1\u03b1\u03c3\u03c0") ||
e(s, len, "\u03bc\u03b9\u03bb"))
len += 2; // add back
}
return len;
}
/**
* Applies rule 3.
* @param s characters
* @param l length
* @return new length
*/
private static int rule3(final char[] s, final int l) {
int len = l;
if(len > 5 && (
e(s, len, "\u03bf\u03c5\u03b4\u03b5\u03c3") ||
e(s, len, "\u03bf\u03c5\u03b4\u03c9\u03bd"))) {
len -= 5;
if(e(s, len, "\u03b1\u03c1\u03ba") ||
e(s, len, "\u03ba\u03b1\u03bb\u03b9\u03b1\u03ba") ||
e(s, len, "\u03c0\u03b5\u03c4\u03b1\u03bb") ||
e(s, len, "\u03bb\u03b9\u03c7") ||
e(s, len, "\u03c0\u03bb\u03b5\u03be") ||
e(s, len, "\u03c3\u03ba") ||
e(s, len, "\u03c3") ||
e(s, len, "\u03c6\u03bb") ||
e(s, len, "\u03c6\u03c1") ||
e(s, len, "\u03b2\u03b5\u03bb") ||
e(s, len, "\u03bb\u03bf\u03c5\u03bb") ||
e(s, len, "\u03c7\u03bd") || e(s, len, "\u03c3\u03c0") ||
e(s, len, "\u03c4\u03c1\u03b1\u03b3") ||
e(s, len, "\u03c6\u03b5"))
len += 3; // add back
}
return len;
}
/** String arrays. */
private static final String[] EXC4 = {
"\u03b8", "\u03b4", "\u03b5\u03bb", "\u03b3\u03b1\u03bb", "\u03bd", "\u03c0",
"\u03b9\u03b4", "\u03c0\u03b1\u03c1"
};
/**
* Applies rule 4.
* @param s characters
* @param l length
* @return new length
*/
private static int rule4(final char[] s, final int l) {
int len = l;
if(len > 3 && (
e(s, len, "\u03b5\u03c9\u03c3") ||
e(s, len, "\u03b5\u03c9\u03bd"))) {
len -= 3;
if(c(EXC4, s, len)) len++; // add back -\u03b5
}
return len;
}
/**
* Applies rule 5.
* @param s characters
* @param l length
* @return new length
*/
private static int rule5(final char[] s, final int l) {
int len = l;
if(len > 2 && e(s, len, "\u03b9\u03b1")) {
len -= 2;
if(ev(s, len)) len++; // add back -\u03b9
} else if(len > 3 && (
e(s, len, "\u03b9\u03bf\u03c5") ||
e(s, len, "\u03b9\u03c9\u03bd"))) {
len -= 3;
if(ev(s, len)) len++; // add back -\u03b9
}
return len;
}
/** String arrays. */
private static final String[] EXC6 = {
"\u03b1\u03bb", "\u03b1\u03b4", "\u03b5\u03bd\u03b4", "\u03b1\u03bc\u03b1\u03bd",
"\u03b1\u03bc\u03bc\u03bf\u03c7\u03b1\u03bb", "\u03b7\u03b8",
"\u03b1\u03bd\u03b7\u03b8", "\u03b1\u03bd\u03c4\u03b9\u03b4", "\u03c6\u03c5\u03c3",
"\u03b2\u03c1\u03c9\u03bc", "\u03b3\u03b5\u03c1", "\u03b5\u03be\u03c9\u03b4",
"\u03ba\u03b1\u03bb\u03c0", "\u03ba\u03b1\u03bb\u03bb\u03b9\u03bd",
"\u03ba\u03b1\u03c4\u03b1\u03b4", "\u03bc\u03bf\u03c5\u03bb",
"\u03bc\u03c0\u03b1\u03bd", "\u03bc\u03c0\u03b1\u03b3\u03b9\u03b1\u03c4",
"\u03bc\u03c0\u03bf\u03bb", "\u03bc\u03c0\u03bf\u03c3", "\u03bd\u03b9\u03c4",
"\u03be\u03b9\u03ba", "\u03c3\u03c5\u03bd\u03bf\u03bc\u03b7\u03bb",
"\u03c0\u03b5\u03c4\u03c3", "\u03c0\u03b9\u03c4\u03c3",
"\u03c0\u03b9\u03ba\u03b1\u03bd\u03c4", "\u03c0\u03bb\u03b9\u03b1\u03c4\u03c3",
"\u03c0\u03bf\u03c3\u03c4\u03b5\u03bb\u03bd", "\u03c0\u03c1\u03c9\u03c4\u03bf\u03b4",
"\u03c3\u03b5\u03c1\u03c4", "\u03c3\u03c5\u03bd\u03b1\u03b4",
"\u03c4\u03c3\u03b1\u03bc", "\u03c5\u03c0\u03bf\u03b4",
"\u03c6\u03b9\u03bb\u03bf\u03bd", "\u03c6\u03c5\u03bb\u03bf\u03b4",
"\u03c7\u03b1\u03c3"
};
/**
* Applies rule 6.
* @param s characters
* @param l length
* @return new length
*/
private static int rule6(final char[] s, final int l) {
int len = l;
boolean rem = false;
if(len > 3 && (
e(s, len, "\u03b9\u03ba\u03b1") ||
e(s, len, "\u03b9\u03ba\u03bf"))) {
len -= 3;
rem = true;
} else if(len > 4 && (
e(s, len, "\u03b9\u03ba\u03bf\u03c5") ||
e(s, len, "\u03b9\u03ba\u03c9\u03bd"))) {
len -= 4;
rem = true;
}
if(rem && (ev(s, len) || c(EXC6, s, len))) len += 2; // add back -\u03b9\u03ba
return len;
}
/** String arrays. */
private static final String[] EXC7 = {
"\u03b1\u03bd\u03b1\u03c0", "\u03b1\u03c0\u03bf\u03b8", "\u03b1\u03c0\u03bf\u03ba",
"\u03b1\u03c0\u03bf\u03c3\u03c4", "\u03b2\u03bf\u03c5\u03b2", "\u03be\u03b5\u03b8",
"\u03bf\u03c5\u03bb", "\u03c0\u03b5\u03b8", "\u03c0\u03b9\u03ba\u03c1",
"\u03c0\u03bf\u03c4", "\u03c3\u03b9\u03c7", "\u03c7"
};
/**
* Applies rule 7.
* @param s characters
* @param l length
* @return new length
*/
private static int rule7(final char[] s, final int l) {
int len = l;
if(len == 5 && e(s, len, "\u03b1\u03b3\u03b1\u03bc\u03b5")) return 5 - 1;
if(len > 7 && e(s, len, "\u03b7\u03b8\u03b7\u03ba\u03b1\u03bc\u03b5"))
len -= 7;
else if(len > 6 && e(s, len, "\u03bf\u03c5\u03c3\u03b1\u03bc\u03b5"))
len -= 6;
else if(len > 5 && (
e(s, len, "\u03b1\u03b3\u03b1\u03bc\u03b5") ||
e(s, len, "\u03b7\u03c3\u03b1\u03bc\u03b5") ||
e(s, len, "\u03b7\u03ba\u03b1\u03bc\u03b5"))) len -= 5;
if(len > 3 && e(s, len, "\u03b1\u03bc\u03b5")) {
len -= 3;
if(c(EXC7, s, len)) len += 2; // add back -\u03b1\u03bc
}
return len;
}
/** String arrays. */
private static final String[] EXC8A = {
"\u03c4\u03c1", "\u03c4\u03c3"
};
/** String arrays. */
private static final String[] EXC8B = {
"\u03b2\u03b5\u03c4\u03b5\u03c1", "\u03b2\u03bf\u03c5\u03bb\u03ba",
"\u03b2\u03c1\u03b1\u03c7\u03bc", "\u03b3",
"\u03b4\u03c1\u03b1\u03b4\u03bf\u03c5\u03bc", "\u03b8",
"\u03ba\u03b1\u03bb\u03c0\u03bf\u03c5\u03b6", "\u03ba\u03b1\u03c3\u03c4\u03b5\u03bb",
"\u03ba\u03bf\u03c1\u03bc\u03bf\u03c1", "\u03bb\u03b1\u03bf\u03c0\u03bb",
"\u03bc\u03c9\u03b1\u03bc\u03b5\u03b8", "\u03bc",
"\u03bc\u03bf\u03c5\u03c3\u03bf\u03c5\u03bb\u03bc", "\u03bd", "\u03bf\u03c5\u03bb",
"\u03c0", "\u03c0\u03b5\u03bb\u03b5\u03ba", "\u03c0\u03bb",
"\u03c0\u03bf\u03bb\u03b9\u03c3", "\u03c0\u03bf\u03c1\u03c4\u03bf\u03bb",
"\u03c3\u03b1\u03c1\u03b1\u03ba\u03b1\u03c4\u03c3", "\u03c3\u03bf\u03c5\u03bb\u03c4",
"\u03c4\u03c3\u03b1\u03c1\u03bb\u03b1\u03c4", "\u03bf\u03c1\u03c6",
"\u03c4\u03c3\u03b9\u03b3\u03b3", "\u03c4\u03c3\u03bf\u03c0",
"\u03c6\u03c9\u03c4\u03bf\u03c3\u03c4\u03b5\u03c6", "\u03c7",
"\u03c8\u03c5\u03c7\u03bf\u03c0\u03bb", "\u03b1\u03b3", "\u03bf\u03c1\u03c6",
"\u03b3\u03b1\u03bb", "\u03b3\u03b5\u03c1", "\u03b4\u03b5\u03ba",
"\u03b4\u03b9\u03c0\u03bb", "\u03b1\u03bc\u03b5\u03c1\u03b9\u03ba\u03b1\u03bd",
"\u03bf\u03c5\u03c1", "\u03c0\u03b9\u03b8", "\u03c0\u03bf\u03c5\u03c1\u03b9\u03c4",
"\u03c3", "\u03b6\u03c9\u03bd\u03c4", "\u03b9\u03ba", "\u03ba\u03b1\u03c3\u03c4",
"\u03ba\u03bf\u03c0", "\u03bb\u03b9\u03c7", "\u03bb\u03bf\u03c5\u03b8\u03b7\u03c1",
"\u03bc\u03b1\u03b9\u03bd\u03c4", "\u03bc\u03b5\u03bb", "\u03c3\u03b9\u03b3",
"\u03c3\u03c0", "\u03c3\u03c4\u03b5\u03b3", "\u03c4\u03c1\u03b1\u03b3",
"\u03c4\u03c3\u03b1\u03b3", "\u03c6", "\u03b5\u03c1", "\u03b1\u03b4\u03b1\u03c0",
"\u03b1\u03b8\u03b9\u03b3\u03b3", "\u03b1\u03bc\u03b7\u03c7",
"\u03b1\u03bd\u03b9\u03ba", "\u03b1\u03bd\u03bf\u03c1\u03b3",
"\u03b1\u03c0\u03b7\u03b3", "\u03b1\u03c0\u03b9\u03b8",
"\u03b1\u03c4\u03c3\u03b9\u03b3\u03b3", "\u03b2\u03b1\u03c3",
"\u03b2\u03b1\u03c3\u03ba", "\u03b2\u03b1\u03b8\u03c5\u03b3\u03b1\u03bb",
"\u03b2\u03b9\u03bf\u03bc\u03b7\u03c7", "\u03b2\u03c1\u03b1\u03c7\u03c5\u03ba",
"\u03b4\u03b9\u03b1\u03c4", "\u03b4\u03b9\u03b1\u03c6",
"\u03b5\u03bd\u03bf\u03c1\u03b3", "\u03b8\u03c5\u03c3",
"\u03ba\u03b1\u03c0\u03bd\u03bf\u03b2\u03b9\u03bf\u03bc\u03b7\u03c7",
"\u03ba\u03b1\u03c4\u03b1\u03b3\u03b1\u03bb", "\u03ba\u03bb\u03b9\u03b2",
"\u03ba\u03bf\u03b9\u03bb\u03b1\u03c1\u03c6", "\u03bb\u03b9\u03b2",
"\u03bc\u03b5\u03b3\u03bb\u03bf\u03b2\u03b9\u03bf\u03bc\u03b7\u03c7",
"\u03bc\u03b9\u03ba\u03c1\u03bf\u03b2\u03b9\u03bf\u03bc\u03b7\u03c7",
"\u03bd\u03c4\u03b1\u03b2", "\u03be\u03b7\u03c1\u03bf\u03ba\u03bb\u03b9\u03b2",
"\u03bf\u03bb\u03b9\u03b3\u03bf\u03b4\u03b1\u03bc",
"\u03bf\u03bb\u03bf\u03b3\u03b1\u03bb", "\u03c0\u03b5\u03bd\u03c4\u03b1\u03c1\u03c6",
"\u03c0\u03b5\u03c1\u03b7\u03c6", "\u03c0\u03b5\u03c1\u03b9\u03c4\u03c1",
"\u03c0\u03bb\u03b1\u03c4", "\u03c0\u03bf\u03bb\u03c5\u03b4\u03b1\u03c0",
"\u03c0\u03bf\u03bb\u03c5\u03bc\u03b7\u03c7", "\u03c3\u03c4\u03b5\u03c6",
"\u03c4\u03b1\u03b2", "\u03c4\u03b5\u03c4", "\u03c5\u03c0\u03b5\u03c1\u03b7\u03c6",
"\u03c5\u03c0\u03bf\u03ba\u03bf\u03c0",
"\u03c7\u03b1\u03bc\u03b7\u03bb\u03bf\u03b4\u03b1\u03c0",
"\u03c8\u03b7\u03bb\u03bf\u03c4\u03b1\u03b2"
};
/**
* Applies rule 8.
* @param s characters
* @param l length
* @return new length
*/
private static int rule8(final char[] s, final int l) {
boolean rem = false;
int len = l;
if(len > 8 && e(s, len, "\u03b9\u03bf\u03c5\u03bd\u03c4\u03b1\u03bd\u03b5")) {
len -= 8;
rem = true;
} else if(len > 7 &&
e(s, len, "\u03b9\u03bf\u03bd\u03c4\u03b1\u03bd\u03b5") ||
e(s, len, "\u03bf\u03c5\u03bd\u03c4\u03b1\u03bd\u03b5") ||
e(s, len, "\u03b7\u03b8\u03b7\u03ba\u03b1\u03bd\u03b5")) {
len -= 7;
rem = true;
} else if(len > 6 &&
e(s, len, "\u03b9\u03bf\u03c4\u03b1\u03bd\u03b5") ||
e(s, len, "\u03bf\u03bd\u03c4\u03b1\u03bd\u03b5") ||
e(s, len, "\u03bf\u03c5\u03c3\u03b1\u03bd\u03b5")) {
len -= 6;
rem = true;
} else if(len > 5 &&
e(s, len, "\u03b1\u03b3\u03b1\u03bd\u03b5") ||
e(s, len, "\u03b7\u03c3\u03b1\u03bd\u03b5") ||
e(s, len, "\u03bf\u03c4\u03b1\u03bd\u03b5") ||
e(s, len, "\u03b7\u03ba\u03b1\u03bd\u03b5")) {
len -= 5;
rem = true;
}
if(rem && c(EXC8A, s, len)) {
// add -\u03b1\u03b3\u03b1\u03bd (we rem > 4 chars so its safe)
len += 4;
s[len - 4] = '\u03b1';
s[len - 3] = '\u03b3';
s[len - 2] = '\u03b1';
s[len - 1] = '\u03bd';
}
if(len > 3 && e(s, len, "\u03b1\u03bd\u03b5")) {
len -= 3;
if(ey(s, len) || c(EXC8B, s, len)) {
len += 2; // add back -\u03b1\u03bd
}
}
return len;
}
/** String arrays. */
private static final String[] EXC9 = {
"\u03b1\u03b2\u03b1\u03c1", "\u03b2\u03b5\u03bd", "\u03b5\u03bd\u03b1\u03c1",
"\u03b1\u03b2\u03c1", "\u03b1\u03b4", "\u03b1\u03b8", "\u03b1\u03bd",
"\u03b1\u03c0\u03bb", "\u03b2\u03b1\u03c1\u03bf\u03bd", "\u03bd\u03c4\u03c1",
"\u03c3\u03ba", "\u03ba\u03bf\u03c0", "\u03bc\u03c0\u03bf\u03c1",
"\u03bd\u03b9\u03c6", "\u03c0\u03b1\u03b3",
"\u03c0\u03b1\u03c1\u03b1\u03ba\u03b1\u03bb", "\u03c3\u03b5\u03c1\u03c0",
"\u03c3\u03ba\u03b5\u03bb", "\u03c3\u03c5\u03c1\u03c6", "\u03c4\u03bf\u03ba",
"\u03c5", "\u03b4", "\u03b5\u03bc", "\u03b8\u03b1\u03c1\u03c1", "\u03b8"
};
/**
* Applies rule 9.
* @param s characters
* @param l length
* @return new length
*/
private static int rule9(final char[] s, final int l) {
int len = l;
if(len > 5 && e(s, len, "\u03b7\u03c3\u03b5\u03c4\u03b5")) len -= 5;
if(len > 3 && e(s, len, "\u03b5\u03c4\u03b5")) {
len -= 3;
if(c(EXC9, s, len) || ey(s, len) ||
e(s, len, "\u03bf\u03b4") ||
e(s, len, "\u03b1\u03b9\u03c1") ||
e(s, len, "\u03c6\u03bf\u03c1") ||
e(s, len, "\u03c4\u03b1\u03b8") ||
e(s, len, "\u03b4\u03b9\u03b1\u03b8") ||
e(s, len, "\u03c3\u03c7") ||
e(s, len, "\u03b5\u03bd\u03b4") ||
e(s, len, "\u03b5\u03c5\u03c1") ||
e(s, len, "\u03c4\u03b9\u03b8") ||
e(s, len, "\u03c5\u03c0\u03b5\u03c1\u03b8") ||
e(s, len, "\u03c1\u03b1\u03b8") ||
e(s, len, "\u03b5\u03bd\u03b8") ||
e(s, len, "\u03c1\u03bf\u03b8") ||
e(s, len, "\u03c3\u03b8") ||
e(s, len, "\u03c0\u03c5\u03c1") ||
e(s, len, "\u03b1\u03b9\u03bd") ||
e(s, len, "\u03c3\u03c5\u03bd\u03b4") ||
e(s, len, "\u03c3\u03c5\u03bd") ||
e(s, len, "\u03c3\u03c5\u03bd\u03b8") ||
e(s, len, "\u03c7\u03c9\u03c1") ||
e(s, len, "\u03c0\u03bf\u03bd") ||
e(s, len, "\u03b2\u03c1") ||
e(s, len, "\u03ba\u03b1\u03b8") ||
e(s, len, "\u03b5\u03c5\u03b8") ||
e(s, len, "\u03b5\u03ba\u03b8") ||
e(s, len, "\u03bd\u03b5\u03c4") ||
e(s, len, "\u03c1\u03bf\u03bd") ||
e(s, len, "\u03b1\u03c1\u03ba") ||
e(s, len, "\u03b2\u03b1\u03c1") ||
e(s, len, "\u03b2\u03bf\u03bb") ||
e(s, len, "\u03c9\u03c6\u03b5\u03bb")) {
len += 2; // add back -\u03b5\u03c4
}
}
return len;
}
/**
* Applies rule 10.
* @param s characters
* @param l length
* @return new length
*/
private static int rule10(final char[] s, final int l) {
int len = l;
if(len > 5 && (
e(s, len, "\u03bf\u03bd\u03c4\u03b1\u03c3") ||
e(s, len, "\u03c9\u03bd\u03c4\u03b1\u03c3"))) {
len -= 5;
if(len == 3 && e(s, len, "\u03b1\u03c1\u03c7")) {
len += 3; // add back *\u03bd\u03c4
s[len - 3] = '\u03bf';
}
if(e(s, len, "\u03ba\u03c1\u03b5")) {
len += 3; // add back *\u03bd\u03c4
s[len - 3] = '\u03c9';
}
}
return len;
}
/**
* Applies rule 11.
* @param s characters
* @param l length
* @return new length
*/
private static int rule11(final char[] s, final int l) {
int len = l;
if(len > 6 && e(s, len, "\u03bf\u03bc\u03b1\u03c3\u03c4\u03b5")) {
len -= 6;
if(len == 2 && e(s, len, "\u03bf\u03bd")) {
len += 5; // add back -\u03bf\u03bc\u03b1\u03c3\u03c4
}
} else if(len > 7 && e(s, len, "\u03b9\u03bf\u03bc\u03b1\u03c3\u03c4\u03b5")) {
len -= 7;
if(len == 2 && e(s, len, "\u03bf\u03bd")) {
len += 5;
s[len - 5] = '\u03bf';
s[len - 4] = '\u03bc';
s[len - 3] = '\u03b1';
s[len - 2] = '\u03c3';
s[len - 1] = '\u03c4';
}
}
return len;
}
/** String arrays. */
private static final String[] EXC12A = {
"\u03c0", "\u03b1\u03c0", "\u03c3\u03c5\u03bc\u03c0",
"\u03b1\u03c3\u03c5\u03bc\u03c0", "\u03b1\u03ba\u03b1\u03c4\u03b1\u03c0",
"\u03b1\u03bc\u03b5\u03c4\u03b1\u03bc\u03c6"
};
/** String arrays. */
private static final String[] EXC12B = {
"\u03b1\u03bb", "\u03b1\u03c1", "\u03b5\u03ba\u03c4\u03b5\u03bb", "\u03b6", "\u03bc",
"\u03be", "\u03c0\u03b1\u03c1\u03b1\u03ba\u03b1\u03bb", "\u03b1\u03c1",
"\u03c0\u03c1\u03bf", "\u03bd\u03b9\u03c3"
};
/**
* Applies rule 12.
* @param s characters
* @param l length
* @return new length
*/
private static int rule12(final char[] s, final int l) {
int len = l;
if(len > 5 && e(s, len, "\u03b9\u03b5\u03c3\u03c4\u03b5")) {
len -= 5;
if(c(EXC12A, s, len)) len += 4; // add back -\u03b9\u03b5\u03c3\u03c4
}
if(len > 4 && e(s, len, "\u03b5\u03c3\u03c4\u03b5")) {
len -= 4;
if(c(EXC12B, s, len)) len += 3; // add back -\u03b5\u03c3\u03c4
}
return len;
}
/** String arrays. */
private static final String[] EXC13 = {
"\u03b4\u03b9\u03b1\u03b8", "\u03b8",
"\u03c0\u03b1\u03c1\u03b1\u03ba\u03b1\u03c4\u03b1\u03b8",
"\u03c0\u03c1\u03bf\u03c3\u03b8", "\u03c3\u03c5\u03bd\u03b8"
};
/**
* Applies rule 13.
* @param s characters
* @param l length
* @return new length
*/
private static int rule13(final char[] s, final int l) {
int len = l;
if(len > 6 && e(s, len, "\u03b7\u03b8\u03b7\u03ba\u03b5\u03c3")) {
len -= 6;
} else if(len > 5 && (
e(s, len, "\u03b7\u03b8\u03b7\u03ba\u03b1") ||
e(s, len, "\u03b7\u03b8\u03b7\u03ba\u03b5"))) {
len -= 5;
}
boolean rem = false;
if(len > 4 && e(s, len, "\u03b7\u03ba\u03b5\u03c3")) {
len -= 4;
rem = true;
} else if(len > 3 && (
e(s, len, "\u03b7\u03ba\u03b1") ||
e(s, len, "\u03b7\u03ba\u03b5"))) {
len -= 3;
rem = true;
}
if(rem && (c(EXC13, s, len) ||
e(s, len, "\u03c3\u03ba\u03c9\u03bb") ||
e(s, len, "\u03c3\u03ba\u03bf\u03c5\u03bb") ||
e(s, len, "\u03bd\u03b1\u03c1\u03b8") ||
e(s, len, "\u03c3\u03c6") ||
e(s, len, "\u03bf\u03b8") ||
e(s, len, "\u03c0\u03b9\u03b8"))) {
len += 2; // add back the -\u03b7\u03ba
}
return len;
}
/** String arrays. */
private static final String[] EXC14 = {
"\u03c6\u03b1\u03c1\u03bc\u03b1\u03ba", "\u03c7\u03b1\u03b4", "\u03b1\u03b3\u03ba",
"\u03b1\u03bd\u03b1\u03c1\u03c1", "\u03b2\u03c1\u03bf\u03bc",
"\u03b5\u03ba\u03bb\u03b9\u03c0", "\u03bb\u03b1\u03bc\u03c0\u03b9\u03b4",
"\u03bb\u03b5\u03c7", "\u03bc", "\u03c0\u03b1\u03c4", "\u03c1", "\u03bb",
"\u03bc\u03b5\u03b4", "\u03bc\u03b5\u03c3\u03b1\u03b6",
"\u03c5\u03c0\u03bf\u03c4\u03b5\u03b9\u03bd", "\u03b1\u03bc", "\u03b1\u03b9\u03b8",
"\u03b1\u03bd\u03b7\u03ba", "\u03b4\u03b5\u03c3\u03c0\u03bf\u03b6",
"\u03b5\u03bd\u03b4\u03b9\u03b1\u03c6\u03b5\u03c1", "\u03b4\u03b5",
"\u03b4\u03b5\u03c5\u03c4\u03b5\u03c1\u03b5\u03c5",
"\u03ba\u03b1\u03b8\u03b1\u03c1\u03b5\u03c5", "\u03c0\u03bb\u03b5",
"\u03c4\u03c3\u03b1"
};
/**
* Applies rule 14.
* @param s characters
* @param l length
* @return new length
*/
private static int rule14(final char[] s, final int l) {
int len = l;
boolean rem = false;
if(len > 5 && e(s, len, "\u03bf\u03c5\u03c3\u03b5\u03c3")) {
len -= 5;
rem = true;
} else if(len > 4 && (
e(s, len, "\u03bf\u03c5\u03c3\u03b1") ||
e(s, len, "\u03bf\u03c5\u03c3\u03b5"))) {
len -= 4;
rem = true;
}
if(rem && (c(EXC14, s, len) || ev(s, len) ||
e(s, len, "\u03c0\u03bf\u03b4\u03b1\u03c1") ||
e(s, len, "\u03b2\u03bb\u03b5\u03c0") ||
e(s, len, "\u03c0\u03b1\u03bd\u03c4\u03b1\u03c7") ||
e(s, len, "\u03c6\u03c1\u03c5\u03b4") ||
e(s, len, "\u03bc\u03b1\u03bd\u03c4\u03b9\u03bb") ||
e(s, len, "\u03bc\u03b1\u03bb\u03bb") ||
e(s, len, "\u03ba\u03c5\u03bc\u03b1\u03c4") ||
e(s, len, "\u03bb\u03b1\u03c7") ||
e(s, len, "\u03bb\u03b7\u03b3") ||
e(s, len, "\u03c6\u03b1\u03b3") ||
e(s, len, "\u03bf\u03bc") ||
e(s, len, "\u03c0\u03c1\u03c9\u03c4"))) {
len += 3; // add back -\u03bf\u03c5\u03c3
}
return len;
}
/** String arrays. */
private static final String[] EXC15A = {
"\u03b1\u03b2\u03b1\u03c3\u03c4", "\u03c0\u03bf\u03bb\u03c5\u03c6",
"\u03b1\u03b4\u03b7\u03c6", "\u03c0\u03b1\u03bc\u03c6", "\u03c1",
"\u03b1\u03c3\u03c0", "\u03b1\u03c6", "\u03b1\u03bc\u03b1\u03bb",
"\u03b1\u03bc\u03b1\u03bb\u03bb\u03b9", "\u03b1\u03bd\u03c5\u03c3\u03c4",
"\u03b1\u03c0\u03b5\u03c1", "\u03b1\u03c3\u03c0\u03b1\u03c1",
"\u03b1\u03c7\u03b1\u03c1", "\u03b4\u03b5\u03c1\u03b2\u03b5\u03bd",
"\u03b4\u03c1\u03bf\u03c3\u03bf\u03c0", "\u03be\u03b5\u03c6",
"\u03bd\u03b5\u03bf\u03c0", "\u03bd\u03bf\u03bc\u03bf\u03c4",
"\u03bf\u03bb\u03bf\u03c0", "\u03bf\u03bc\u03bf\u03c4",
"\u03c0\u03c1\u03bf\u03c3\u03c4", "\u03c0\u03c1\u03bf\u03c3\u03c9\u03c0\u03bf\u03c0",
"\u03c3\u03c5\u03bc\u03c0", "\u03c3\u03c5\u03bd\u03c4", "\u03c4",
"\u03c5\u03c0\u03bf\u03c4", "\u03c7\u03b1\u03c1", "\u03b1\u03b5\u03b9\u03c0",
"\u03b1\u03b9\u03bc\u03bf\u03c3\u03c4", "\u03b1\u03bd\u03c5\u03c0",
"\u03b1\u03c0\u03bf\u03c4", "\u03b1\u03c1\u03c4\u03b9\u03c0",
"\u03b4\u03b9\u03b1\u03c4", "\u03b5\u03bd", "\u03b5\u03c0\u03b9\u03c4",
"\u03ba\u03c1\u03bf\u03ba\u03b1\u03bb\u03bf\u03c0",
"\u03c3\u03b9\u03b4\u03b7\u03c1\u03bf\u03c0", "\u03bb", "\u03bd\u03b1\u03c5",
"\u03bf\u03c5\u03bb\u03b1\u03bc", "\u03bf\u03c5\u03c1", "\u03c0", "\u03c4\u03c1",
"\u03bc"
};
/** String arrays. */
private static final String[] EXC15B = {
"\u03c8\u03bf\u03c6", "\u03bd\u03b1\u03c5\u03bb\u03bf\u03c7"
};
/**
* Applies rule 15.
* @param s characters
* @param l length
* @return new length
*/
private static int rule15(final char[] s, final int l) {
int len = l;
boolean rem = false;
if(len > 4 && e(s, len, "\u03b1\u03b3\u03b5\u03c3")) {
len -= 4;
rem = true;
} else if(len > 3 && (
e(s, len, "\u03b1\u03b3\u03b1") ||
e(s, len, "\u03b1\u03b3\u03b5"))) {
len -= 3;
rem = true;
}
if(rem) {
final boolean cond1 = c(EXC15A, s, len) ||
e(s, len, "\u03bf\u03c6") ||
e(s, len, "\u03c0\u03b5\u03bb") ||
e(s, len, "\u03c7\u03bf\u03c1\u03c4") ||
e(s, len, "\u03bb\u03bb") ||
e(s, len, "\u03c3\u03c6") ||
e(s, len, "\u03c1\u03c0") ||
e(s, len, "\u03c6\u03c1") ||
e(s, len, "\u03c0\u03c1") ||
e(s, len, "\u03bb\u03bf\u03c7") ||
e(s, len, "\u03c3\u03bc\u03b7\u03bd");
final boolean cond2 = c(EXC15B, s, len) ||
e(s, len, "\u03ba\u03bf\u03bb\u03bb");
if(cond1 && !cond2) len += 2; // add back -\u03b1\u03b3
}
return len;
}
/** String arrays. */
private static final String[] EXC16 = {
"\u03bd", "\u03c7\u03b5\u03c1\u03c3\u03bf\u03bd",
"\u03b4\u03c9\u03b4\u03b5\u03ba\u03b1\u03bd", "\u03b5\u03c1\u03b7\u03bc\u03bf\u03bd",
"\u03bc\u03b5\u03b3\u03b1\u03bb\u03bf\u03bd", "\u03b5\u03c0\u03c4\u03b1\u03bd"
};
/**
* Applies rule 16.
* @param s characters
* @param l length
* @return new length
*/
private static int rule16(final char[] s, final int l) {
int len = l;
boolean rem = false;
if(len > 4 && e(s, len, "\u03b7\u03c3\u03bf\u03c5")) {
len -= 4;
rem = true;
} else if(len > 3 && (
e(s, len, "\u03b7\u03c3\u03b5") ||
e(s, len, "\u03b7\u03c3\u03b1"))) {
len -= 3;
rem = true;
}
if(rem && c(EXC16, s, len)) len += 2; // add back -\u03b7\u03c3
return len;
}
/** String arrays. */
private static final String[] EXC17 = {
"\u03b1\u03c3\u03b2", "\u03c3\u03b2", "\u03b1\u03c7\u03c1", "\u03c7\u03c1",
"\u03b1\u03c0\u03bb", "\u03b1\u03b5\u03b9\u03bc\u03bd",
"\u03b4\u03c5\u03c3\u03c7\u03c1", "\u03b5\u03c5\u03c7\u03c1",
"\u03ba\u03bf\u03b9\u03bd\u03bf\u03c7\u03c1", "\u03c0\u03b1\u03bb\u03b9\u03bc\u03c8"
};
/**
* Applies rule 17.
* @param s characters
* @param l length
* @return new length
*/
private static int rule17(final char[] s, final int l) {
int len = l;
if(len > 4 && e(s, len, "\u03b7\u03c3\u03c4\u03b5")) {
len -= 4;
if(c(EXC17, s, len)) len += 3; // add back the -\u03b7\u03c3\u03c4
}
return len;
}
/** String arrays. */
private static final String[] EXC18 = {
"\u03bd", "\u03c1", "\u03c3\u03c0\u03b9",
"\u03c3\u03c4\u03c1\u03b1\u03b2\u03bf\u03bc\u03bf\u03c5\u03c4\u03c3",
"\u03ba\u03b1\u03ba\u03bf\u03bc\u03bf\u03c5\u03c4\u03c3", "\u03b5\u03be\u03c9\u03bd"
};
/**
* Applies rule 18.
* @param s characters
* @param l length
* @return new length
*/
private static int rule18(final char[] s, final int l) {
boolean rem = false;
int len = l;
if(len > 6 && (
e(s, len, "\u03b7\u03c3\u03bf\u03c5\u03bd\u03b5") ||
e(s, len, "\u03b7\u03b8\u03bf\u03c5\u03bd\u03b5"))) {
len -= 6;
rem = true;
} else if(len > 4 && e(s, len, "\u03bf\u03c5\u03bd\u03b5")) {
len -= 4;
rem = true;
}
if(rem && c(EXC18, s, len)) {
len += 3;
s[len - 3] = '\u03bf';
s[len - 2] = '\u03c5';
s[len - 1] = '\u03bd';
}
return len;
}
/** String arrays. */
private static final String[] EXC19 = {
"\u03c0\u03b1\u03c1\u03b1\u03c3\u03bf\u03c5\u03c3", "\u03c6", "\u03c7",
"\u03c9\u03c1\u03b9\u03bf\u03c0\u03bb", "\u03b1\u03b6",
"\u03b1\u03bb\u03bb\u03bf\u03c3\u03bf\u03c5\u03c3", "\u03b1\u03c3\u03bf\u03c5\u03c3"
};
/**
* Applies rule 19.
* @param s characters
* @param l length
* @return new length
*/
private static int rule19(final char[] s, final int l) {
int len = l;
boolean rem = false;
if(len > 6 && (
e(s, len, "\u03b7\u03c3\u03bf\u03c5\u03bc\u03b5") ||
e(s, len, "\u03b7\u03b8\u03bf\u03c5\u03bc\u03b5"))) {
len -= 6;
rem = true;
} else if(len > 4 && e(s, len, "\u03bf\u03c5\u03bc\u03b5")) {
len -= 4;
rem = true;
}
if(rem && c(EXC19, s, len)) {
len += 3;
s[len - 3] = '\u03bf';
s[len - 2] = '\u03c5';
s[len - 1] = '\u03bc';
}
return len;
}
/**
* Applies rule 20.
* @param s characters
* @param l length
* @return new length
*/
private static int rule20(final char[] s, final int l) {
int len = l;
if(len > 5 && (
e(s, len, "\u03bc\u03b1\u03c4\u03c9\u03bd") ||
e(s, len, "\u03bc\u03b1\u03c4\u03bf\u03c3"))) len -= 3;
else if(len > 4 && e(s, len, "\u03bc\u03b1\u03c4\u03b1")) len -= 2;
return len;
}
/**
* Applies rule 21.
* @param s characters
* @param l length
* @return new length
*/
private static int rule21(final char[] s, final int l) {
if(l > 9 && e(s, l, "\u03b9\u03bf\u03bd\u03c4\u03bf\u03c5\u03c3\u03b1\u03bd"))
return l - 9;
if(l > 8 && (
e(s, l, "\u03b9\u03bf\u03bc\u03b1\u03c3\u03c4\u03b1\u03bd") ||
e(s, l, "\u03b9\u03bf\u03c3\u03b1\u03c3\u03c4\u03b1\u03bd") ||
e(s, l, "\u03b9\u03bf\u03c5\u03bc\u03b1\u03c3\u03c4\u03b5") ||
e(s, l, "\u03bf\u03bd\u03c4\u03bf\u03c5\u03c3\u03b1\u03bd"))) return l - 8;
if(l > 7 && (
e(s, l, "\u03b9\u03b5\u03bc\u03b1\u03c3\u03c4\u03b5") ||
e(s, l, "\u03b9\u03b5\u03c3\u03b1\u03c3\u03c4\u03b5") ||
e(s, l, "\u03b9\u03bf\u03bc\u03bf\u03c5\u03bd\u03b1") ||
e(s, l, "\u03b9\u03bf\u03c3\u03b1\u03c3\u03c4\u03b5") ||
e(s, l, "\u03b9\u03bf\u03c3\u03bf\u03c5\u03bd\u03b1") ||
e(s, l, "\u03b9\u03bf\u03c5\u03bd\u03c4\u03b1\u03b9") ||
e(s, l, "\u03b9\u03bf\u03c5\u03bd\u03c4\u03b1\u03bd") ||
e(s, l, "\u03b7\u03b8\u03b7\u03ba\u03b1\u03c4\u03b5") ||
e(s, l, "\u03bf\u03bc\u03b1\u03c3\u03c4\u03b1\u03bd") ||
e(s, l, "\u03bf\u03c3\u03b1\u03c3\u03c4\u03b1\u03bd") ||
e(s, l, "\u03bf\u03c5\u03bc\u03b1\u03c3\u03c4\u03b5"))) return l - 7;
if(l > 6 && (
e(s, l, "\u03b9\u03bf\u03bc\u03bf\u03c5\u03bd") ||
e(s, l, "\u03b9\u03bf\u03bd\u03c4\u03b1\u03bd") ||
e(s, l, "\u03b9\u03bf\u03c3\u03bf\u03c5\u03bd") ||
e(s, l, "\u03b7\u03b8\u03b5\u03b9\u03c4\u03b5") ||
e(s, l, "\u03b7\u03b8\u03b7\u03ba\u03b1\u03bd") ||
e(s, l, "\u03bf\u03bc\u03bf\u03c5\u03bd\u03b1") ||
e(s, l, "\u03bf\u03c3\u03b1\u03c3\u03c4\u03b5") ||
e(s, l, "\u03bf\u03c3\u03bf\u03c5\u03bd\u03b1") ||
e(s, l, "\u03bf\u03c5\u03bd\u03c4\u03b1\u03b9") ||
e(s, l, "\u03bf\u03c5\u03bd\u03c4\u03b1\u03bd") ||
e(s, l, "\u03bf\u03c5\u03c3\u03b1\u03c4\u03b5"))) return l - 6;
if(l > 5 && (
e(s, l, "\u03b1\u03b3\u03b1\u03c4\u03b5") ||
e(s, l, "\u03b9\u03b5\u03bc\u03b1\u03b9") ||
e(s, l, "\u03b9\u03b5\u03c4\u03b1\u03b9") ||
e(s, l, "\u03b9\u03b5\u03c3\u03b1\u03b9") ||
e(s, l, "\u03b9\u03bf\u03c4\u03b1\u03bd") ||
e(s, l, "\u03b9\u03bf\u03c5\u03bc\u03b1") ||
e(s, l, "\u03b7\u03b8\u03b5\u03b9\u03c3") ||
e(s, l, "\u03b7\u03b8\u03bf\u03c5\u03bd") ||
e(s, l, "\u03b7\u03ba\u03b1\u03c4\u03b5") ||
e(s, l, "\u03b7\u03c3\u03b1\u03c4\u03b5") ||
e(s, l, "\u03b7\u03c3\u03bf\u03c5\u03bd") ||
e(s, l, "\u03bf\u03bc\u03bf\u03c5\u03bd") ||
e(s, l, "\u03bf\u03bd\u03c4\u03b1\u03b9") ||
e(s, l, "\u03bf\u03bd\u03c4\u03b1\u03bd") ||
e(s, l, "\u03bf\u03c3\u03bf\u03c5\u03bd") ||
e(s, l, "\u03bf\u03c5\u03bc\u03b1\u03b9") ||
e(s, l, "\u03bf\u03c5\u03c3\u03b1\u03bd"))) return l - 5;
if(l > 4 && (
e(s, l, "\u03b1\u03b3\u03b1\u03bd") ||
e(s, l, "\u03b1\u03bc\u03b1\u03b9") ||
e(s, l, "\u03b1\u03c3\u03b1\u03b9") ||
e(s, l, "\u03b1\u03c4\u03b1\u03b9") ||
e(s, l, "\u03b5\u03b9\u03c4\u03b5") ||
e(s, l, "\u03b5\u03c3\u03b1\u03b9") ||
e(s, l, "\u03b5\u03c4\u03b1\u03b9") ||
e(s, l, "\u03b7\u03b4\u03b5\u03c3") ||
e(s, l, "\u03b7\u03b4\u03c9\u03bd") ||
e(s, l, "\u03b7\u03b8\u03b5\u03b9") ||
e(s, l, "\u03b7\u03ba\u03b1\u03bd") ||
e(s, l, "\u03b7\u03c3\u03b1\u03bd") ||
e(s, l, "\u03b7\u03c3\u03b5\u03b9") ||
e(s, l, "\u03b7\u03c3\u03b5\u03c3") ||
e(s, l, "\u03bf\u03bc\u03b1\u03b9") ||
e(s, l, "\u03bf\u03c4\u03b1\u03bd"))) return l - 4;
if(l > 3 && (
e(s, l, "\u03b1\u03b5\u03b9") ||
e(s, l, "\u03b5\u03b9\u03c3") ||
e(s, l, "\u03b7\u03b8\u03c9") ||
e(s, l, "\u03b7\u03c3\u03c9") ||
e(s, l, "\u03bf\u03c5\u03bd") ||
e(s, l, "\u03bf\u03c5\u03c3"))) return l - 3;
if(l > 2 && (
e(s, l, "\u03b1\u03bd") ||
e(s, l, "\u03b1\u03c3") ||
e(s, l, "\u03b1\u03c9") ||
e(s, l, "\u03b5\u03b9") ||
e(s, l, "\u03b5\u03c3") ||
e(s, l, "\u03b7\u03c3") ||
e(s, l, "\u03bf\u03b9") ||
e(s, l, "\u03bf\u03c3") ||
e(s, l, "\u03bf\u03c5") ||
e(s, l, "\u03c5\u03c3") ||
e(s, l, "\u03c9\u03bd"))) return l - 2;
if(l > 1 && ev(s, l)) return l - 1;
return l;
}
/**
* Applies rule 22.
* @param s characters
* @param l length
* @return new length
*/
private static int rule22(final char[] s, final int l) {
if(e(s, l, "\u03b5\u03c3\u03c4\u03b5\u03c1") ||
e(s, l, "\u03b5\u03c3\u03c4\u03b1\u03c4")) return l - 5;
if(e(s, l, "\u03bf\u03c4\u03b5\u03c1") ||
e(s, l, "\u03bf\u03c4\u03b1\u03c4") ||
e(s, l, "\u03c5\u03c4\u03b5\u03c1") ||
e(s, l, "\u03c5\u03c4\u03b1\u03c4") ||
e(s, l, "\u03c9\u03c4\u03b5\u03c1") ||
e(s, l, "\u03c9\u03c4\u03b1\u03c4")) return l - 4;
return l;
}
/**
* Checks if the specified characters end with a specified suffix.
* @param s characters
* @param l length
* @param suf suffix
* @return result of check
*/
private static boolean e(final char[] s, final int l, final String suf) {
final int sl = suf.length();
if(sl > l) return false;
for(int i = sl - 1; i >= 0; i--) {
if(s[l - (sl - i)] != suf.charAt(i)) return false;
}
return true;
}
/**
* Checks if the specified characters end with a vowel.
* @param s characters
* @param l length
* @return result of check
*/
private static boolean ev(final char[] s, final int l) {
if(l == 0) return false;
switch(s[l - 1]) {
case '\u03b1':
case '\u03b5':
case '\u03b7':
case '\u03b9':
case '\u03bf':
case '\u03c5':
case '\u03c9':
return true;
default:
return false;
}
}
/**
* Checks if the specified characters end with a vowel, but no y.
* @param s characters
* @param l length
* @return result of check
*/
private static boolean ey(final char[] s, final int l) {
if(l == 0) return false;
switch(s[l - 1]) {
case '\u03b1':
case '\u03b5':
case '\u03b7':
case '\u03b9':
case '\u03bf':
case '\u03c9':
return true;
default:
return false;
}
}
/**
* Checks if the specified characters are contained in the string array.
* @param strings string array
* @param s characters
* @param l length of characters
* @return result of check
*/
private static boolean c(final String[] strings, final char[] s, final int l) {
for(final String e : strings) {
final int el = e.length();
if(l != el) continue;
int i = -1;
while(++i < l && e.charAt(i) == s[i]);
if(i == l) return true;
}
return false;
}
}
| |
/*
* Swaggy Jenkins
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: blah@cliffano.com
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package com.cliffano.swaggyjenkins.model;
import java.util.Objects;
import java.util.Arrays;
import com.cliffano.swaggyjenkins.model.CauseAction;
import com.cliffano.swaggyjenkins.model.EmptyChangeLogSet;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* FreeStyleBuild
*/
@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", date = "2022-02-13T02:15:08.056011Z[Etc/UTC]")
public class FreeStyleBuild {
public static final String SERIALIZED_NAME_PROPERTY_CLASS = "_class";
@SerializedName(SERIALIZED_NAME_PROPERTY_CLASS)
private String propertyClass;
public static final String SERIALIZED_NAME_NUMBER = "number";
@SerializedName(SERIALIZED_NAME_NUMBER)
private Integer number;
public static final String SERIALIZED_NAME_URL = "url";
@SerializedName(SERIALIZED_NAME_URL)
private String url;
public static final String SERIALIZED_NAME_ACTIONS = "actions";
@SerializedName(SERIALIZED_NAME_ACTIONS)
private List<CauseAction> actions = null;
public static final String SERIALIZED_NAME_BUILDING = "building";
@SerializedName(SERIALIZED_NAME_BUILDING)
private Boolean building;
public static final String SERIALIZED_NAME_DESCRIPTION = "description";
@SerializedName(SERIALIZED_NAME_DESCRIPTION)
private String description;
public static final String SERIALIZED_NAME_DISPLAY_NAME = "displayName";
@SerializedName(SERIALIZED_NAME_DISPLAY_NAME)
private String displayName;
public static final String SERIALIZED_NAME_DURATION = "duration";
@SerializedName(SERIALIZED_NAME_DURATION)
private Integer duration;
public static final String SERIALIZED_NAME_ESTIMATED_DURATION = "estimatedDuration";
@SerializedName(SERIALIZED_NAME_ESTIMATED_DURATION)
private Integer estimatedDuration;
public static final String SERIALIZED_NAME_EXECUTOR = "executor";
@SerializedName(SERIALIZED_NAME_EXECUTOR)
private String executor;
public static final String SERIALIZED_NAME_FULL_DISPLAY_NAME = "fullDisplayName";
@SerializedName(SERIALIZED_NAME_FULL_DISPLAY_NAME)
private String fullDisplayName;
public static final String SERIALIZED_NAME_ID = "id";
@SerializedName(SERIALIZED_NAME_ID)
private String id;
public static final String SERIALIZED_NAME_KEEP_LOG = "keepLog";
@SerializedName(SERIALIZED_NAME_KEEP_LOG)
private Boolean keepLog;
public static final String SERIALIZED_NAME_QUEUE_ID = "queueId";
@SerializedName(SERIALIZED_NAME_QUEUE_ID)
private Integer queueId;
public static final String SERIALIZED_NAME_RESULT = "result";
@SerializedName(SERIALIZED_NAME_RESULT)
private String result;
public static final String SERIALIZED_NAME_TIMESTAMP = "timestamp";
@SerializedName(SERIALIZED_NAME_TIMESTAMP)
private Integer timestamp;
public static final String SERIALIZED_NAME_BUILT_ON = "builtOn";
@SerializedName(SERIALIZED_NAME_BUILT_ON)
private String builtOn;
public static final String SERIALIZED_NAME_CHANGE_SET = "changeSet";
@SerializedName(SERIALIZED_NAME_CHANGE_SET)
private EmptyChangeLogSet changeSet;
public FreeStyleBuild() {
}
public FreeStyleBuild propertyClass(String propertyClass) {
this.propertyClass = propertyClass;
return this;
}
/**
* Get propertyClass
* @return propertyClass
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getPropertyClass() {
return propertyClass;
}
public void setPropertyClass(String propertyClass) {
this.propertyClass = propertyClass;
}
public FreeStyleBuild number(Integer number) {
this.number = number;
return this;
}
/**
* Get number
* @return number
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Integer getNumber() {
return number;
}
public void setNumber(Integer number) {
this.number = number;
}
public FreeStyleBuild url(String url) {
this.url = url;
return this;
}
/**
* Get url
* @return url
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public FreeStyleBuild actions(List<CauseAction> actions) {
this.actions = actions;
return this;
}
public FreeStyleBuild addActionsItem(CauseAction actionsItem) {
if (this.actions == null) {
this.actions = new ArrayList<CauseAction>();
}
this.actions.add(actionsItem);
return this;
}
/**
* Get actions
* @return actions
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public List<CauseAction> getActions() {
return actions;
}
public void setActions(List<CauseAction> actions) {
this.actions = actions;
}
public FreeStyleBuild building(Boolean building) {
this.building = building;
return this;
}
/**
* Get building
* @return building
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Boolean getBuilding() {
return building;
}
public void setBuilding(Boolean building) {
this.building = building;
}
public FreeStyleBuild description(String description) {
this.description = description;
return this;
}
/**
* Get description
* @return description
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public FreeStyleBuild displayName(String displayName) {
this.displayName = displayName;
return this;
}
/**
* Get displayName
* @return displayName
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getDisplayName() {
return displayName;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
public FreeStyleBuild duration(Integer duration) {
this.duration = duration;
return this;
}
/**
* Get duration
* @return duration
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Integer getDuration() {
return duration;
}
public void setDuration(Integer duration) {
this.duration = duration;
}
public FreeStyleBuild estimatedDuration(Integer estimatedDuration) {
this.estimatedDuration = estimatedDuration;
return this;
}
/**
* Get estimatedDuration
* @return estimatedDuration
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Integer getEstimatedDuration() {
return estimatedDuration;
}
public void setEstimatedDuration(Integer estimatedDuration) {
this.estimatedDuration = estimatedDuration;
}
public FreeStyleBuild executor(String executor) {
this.executor = executor;
return this;
}
/**
* Get executor
* @return executor
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getExecutor() {
return executor;
}
public void setExecutor(String executor) {
this.executor = executor;
}
public FreeStyleBuild fullDisplayName(String fullDisplayName) {
this.fullDisplayName = fullDisplayName;
return this;
}
/**
* Get fullDisplayName
* @return fullDisplayName
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getFullDisplayName() {
return fullDisplayName;
}
public void setFullDisplayName(String fullDisplayName) {
this.fullDisplayName = fullDisplayName;
}
public FreeStyleBuild id(String id) {
this.id = id;
return this;
}
/**
* Get id
* @return id
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public FreeStyleBuild keepLog(Boolean keepLog) {
this.keepLog = keepLog;
return this;
}
/**
* Get keepLog
* @return keepLog
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Boolean getKeepLog() {
return keepLog;
}
public void setKeepLog(Boolean keepLog) {
this.keepLog = keepLog;
}
public FreeStyleBuild queueId(Integer queueId) {
this.queueId = queueId;
return this;
}
/**
* Get queueId
* @return queueId
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Integer getQueueId() {
return queueId;
}
public void setQueueId(Integer queueId) {
this.queueId = queueId;
}
public FreeStyleBuild result(String result) {
this.result = result;
return this;
}
/**
* Get result
* @return result
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getResult() {
return result;
}
public void setResult(String result) {
this.result = result;
}
public FreeStyleBuild timestamp(Integer timestamp) {
this.timestamp = timestamp;
return this;
}
/**
* Get timestamp
* @return timestamp
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public Integer getTimestamp() {
return timestamp;
}
public void setTimestamp(Integer timestamp) {
this.timestamp = timestamp;
}
public FreeStyleBuild builtOn(String builtOn) {
this.builtOn = builtOn;
return this;
}
/**
* Get builtOn
* @return builtOn
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public String getBuiltOn() {
return builtOn;
}
public void setBuiltOn(String builtOn) {
this.builtOn = builtOn;
}
public FreeStyleBuild changeSet(EmptyChangeLogSet changeSet) {
this.changeSet = changeSet;
return this;
}
/**
* Get changeSet
* @return changeSet
**/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public EmptyChangeLogSet getChangeSet() {
return changeSet;
}
public void setChangeSet(EmptyChangeLogSet changeSet) {
this.changeSet = changeSet;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
FreeStyleBuild freeStyleBuild = (FreeStyleBuild) o;
return Objects.equals(this.propertyClass, freeStyleBuild.propertyClass) &&
Objects.equals(this.number, freeStyleBuild.number) &&
Objects.equals(this.url, freeStyleBuild.url) &&
Objects.equals(this.actions, freeStyleBuild.actions) &&
Objects.equals(this.building, freeStyleBuild.building) &&
Objects.equals(this.description, freeStyleBuild.description) &&
Objects.equals(this.displayName, freeStyleBuild.displayName) &&
Objects.equals(this.duration, freeStyleBuild.duration) &&
Objects.equals(this.estimatedDuration, freeStyleBuild.estimatedDuration) &&
Objects.equals(this.executor, freeStyleBuild.executor) &&
Objects.equals(this.fullDisplayName, freeStyleBuild.fullDisplayName) &&
Objects.equals(this.id, freeStyleBuild.id) &&
Objects.equals(this.keepLog, freeStyleBuild.keepLog) &&
Objects.equals(this.queueId, freeStyleBuild.queueId) &&
Objects.equals(this.result, freeStyleBuild.result) &&
Objects.equals(this.timestamp, freeStyleBuild.timestamp) &&
Objects.equals(this.builtOn, freeStyleBuild.builtOn) &&
Objects.equals(this.changeSet, freeStyleBuild.changeSet);
}
@Override
public int hashCode() {
return Objects.hash(propertyClass, number, url, actions, building, description, displayName, duration, estimatedDuration, executor, fullDisplayName, id, keepLog, queueId, result, timestamp, builtOn, changeSet);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class FreeStyleBuild {\n");
sb.append(" propertyClass: ").append(toIndentedString(propertyClass)).append("\n");
sb.append(" number: ").append(toIndentedString(number)).append("\n");
sb.append(" url: ").append(toIndentedString(url)).append("\n");
sb.append(" actions: ").append(toIndentedString(actions)).append("\n");
sb.append(" building: ").append(toIndentedString(building)).append("\n");
sb.append(" description: ").append(toIndentedString(description)).append("\n");
sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n");
sb.append(" duration: ").append(toIndentedString(duration)).append("\n");
sb.append(" estimatedDuration: ").append(toIndentedString(estimatedDuration)).append("\n");
sb.append(" executor: ").append(toIndentedString(executor)).append("\n");
sb.append(" fullDisplayName: ").append(toIndentedString(fullDisplayName)).append("\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" keepLog: ").append(toIndentedString(keepLog)).append("\n");
sb.append(" queueId: ").append(toIndentedString(queueId)).append("\n");
sb.append(" result: ").append(toIndentedString(result)).append("\n");
sb.append(" timestamp: ").append(toIndentedString(timestamp)).append("\n");
sb.append(" builtOn: ").append(toIndentedString(builtOn)).append("\n");
sb.append(" changeSet: ").append(toIndentedString(changeSet)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.flex.compiler.internal.tree.mxml;
import java.util.EnumSet;
import org.apache.flex.compiler.common.ISourceLocation;
import org.apache.flex.compiler.definitions.IDefinition;
import org.apache.flex.compiler.definitions.ITypeDefinition;
import org.apache.flex.compiler.internal.mxml.MXMLDialect.TextParsingFlags;
import org.apache.flex.compiler.internal.parsing.ISourceFragment;
import org.apache.flex.compiler.internal.semantics.SemanticUtils;
import org.apache.flex.compiler.internal.tree.as.NodeBase;
import org.apache.flex.compiler.mxml.IMXMLTagData;
import org.apache.flex.compiler.mxml.IMXMLTextData;
import org.apache.flex.compiler.problems.ICompilerProblem;
import org.apache.flex.compiler.problems.ImplicitCoercionToUnrelatedTypeProblem;
import org.apache.flex.compiler.projects.ICompilerProject;
import org.apache.flex.compiler.tree.as.IASNode;
import org.apache.flex.compiler.tree.as.IExpressionNode;
import org.apache.flex.compiler.tree.mxml.IMXMLExpressionNode;
abstract class MXMLExpressionNodeBase extends MXMLInstanceNode implements IMXMLExpressionNode
{
protected static final EnumSet<TextParsingFlags> FLAGS = EnumSet.of(
TextParsingFlags.ALLOW_BINDING,
TextParsingFlags.ALLOW_COMPILER_DIRECTIVE);
protected static enum ExpressionType
{
BOOLEAN,
INT,
UINT,
NUMBER,
STRING,
CLASS,
FUNCTION,
REGEXP
}
/**
* Constructor
*
* @param parent The parent node of this node, or <code>null</code> if there
* is no parent.
*/
MXMLExpressionNodeBase(NodeBase parent)
{
super(parent);
}
/**
* The LiteralNode or MXMLDataBindingNode which represents the ActionScript
* value for this node.
*/
private IASNode expressionNode;
@Override
public IASNode getChild(int i)
{
return i == 0 ? expressionNode : null;
}
@Override
public int getChildCount()
{
return expressionNode == null ? 0 : 1;
}
@Override
public IASNode getExpressionNode()
{
return expressionNode;
}
void setExpressionNode(NodeBase value)
{
this.expressionNode = value;
if (value != null)
value.setParent(this);
}
public abstract ExpressionType getExpressionType();
/**
* This initialization method is used when implicit <int> etc. nodes are
* created, such as for property values.
*/
public void initialize(MXMLTreeBuilder builder, ISourceLocation location,
String type, NodeBase expressionNode)
{
setLocation(location);
setClassReference(builder.getProject(), type);
setExpressionNode(expressionNode);
}
@Override
protected void processChildWhitespaceUnit(MXMLTreeBuilder builder, IMXMLTagData tag,
IMXMLTextData text, MXMLNodeInfo info)
{
accumulateTextFragments(builder, text, info);
}
@Override
protected void processChildNonWhitespaceUnit(MXMLTreeBuilder builder, IMXMLTagData tag,
IMXMLTextData text, MXMLNodeInfo info)
{
info.hasDualContent = true;
accumulateTextFragments(builder, text, info);
}
/**
* Processes the source fragments that were gathered on the
* {@link MXMLNodeInfo} from the child text units of a
* <code><Boolean></code>, <code><int></code>,
* <code><uint></code>, <code><Number></code>, or
* <code><String></code> tag.
* <p>
* They get parsed to create a child node which is a databinding node, a
* compiler directive node, or a literal node of the tag's type.
*/
protected NodeBase createExpressionNodeFromFragments(MXMLTreeBuilder builder,
IMXMLTagData tag,
MXMLNodeInfo info,
Object defaultValue)
{
ITypeDefinition type = builder.getBuiltinType(getName());
ISourceFragment[] fragments = info.getSourceFragments();
ISourceLocation location = info.getSourceLocation();
if (location == null)
location = tag.getLocationOfChildUnits();
MXMLClassDefinitionNode classNode =
(MXMLClassDefinitionNode)getClassDefinitionNode();
return builder.createExpressionNode(
null, type, fragments, location, FLAGS, defaultValue, classNode);
}
/**
* Processes the source fragments that were gathered on the
* {@link MXMLNodeInfo} from the child text units of a
* <code><Class></code>, <code><Function></code>, or
* <code><RegExp></code> tag.
* <p>
* They get parsed to create the child node. TODO: Check for
* databindings and compiler directives.
*/
protected NodeBase parseExpressionNodeFromFragments(MXMLTreeBuilder builder,
IMXMLTagData tag,
MXMLNodeInfo info,
Object defaultValue)
{
ITypeDefinition type = builder.getBuiltinType(getName());
ISourceFragment[] fragments = info.getSourceFragments();
ISourceLocation location = info.getSourceLocation();
if (location == null)
location = tag.getLocationOfChildUnits();
MXMLClassDefinitionNode classNode =
(MXMLClassDefinitionNode)getClassDefinitionNode();
return builder.parseExpressionNode(
type, fragments, location, FLAGS, defaultValue, classNode, true);
}
protected void checkExpressionType(MXMLTreeBuilder builder, IDefinition expectedType)
{
IExpressionNode expressionNode = (IExpressionNode)getExpressionNode();
if (expressionNode != null)
{
ICompilerProject project = builder.getProject();
IDefinition exprType = expressionNode.resolveType(project);
if (exprType != null)
{
if (!SemanticUtils.isValidTypeConversion(expectedType, exprType, project, builder.getCompilationUnit().isInvisible()))
{
ICompilerProblem problem = new ImplicitCoercionToUnrelatedTypeProblem(
expressionNode, exprType.getBaseName(), expectedType.getBaseName());
builder.addProblem(problem);
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.server.metadata;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.dataImpl.KeyExtent;
import org.apache.accumulo.core.metadata.StoredTabletFile;
import org.apache.accumulo.core.metadata.TabletFile;
import org.apache.accumulo.core.metadata.schema.Ample;
import org.apache.accumulo.core.metadata.schema.DataFileValue;
import org.apache.accumulo.core.metadata.schema.MetadataSchema;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.DataFileColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.ScanFileColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataSchema.TabletsSection.ServerColumnFamily;
import org.apache.accumulo.core.metadata.schema.MetadataTime;
import org.apache.accumulo.core.metadata.schema.TabletMetadata.LocationType;
import org.apache.accumulo.core.tabletserver.log.LogEntry;
import org.apache.accumulo.fate.FateTxId;
import org.apache.accumulo.fate.zookeeper.ZooLock;
import org.apache.accumulo.server.ServerContext;
import org.apache.hadoop.io.Text;
import com.google.common.base.Preconditions;
public abstract class TabletMutatorBase implements Ample.TabletMutator {
private final ServerContext context;
private final KeyExtent extent;
private final Mutation mutation;
protected AutoCloseable closeAfterMutate;
private boolean updatesEnabled = true;
protected TabletMutatorBase(ServerContext ctx, KeyExtent extent) {
this.extent = extent;
this.context = ctx;
mutation = new Mutation(extent.getMetadataEntry());
}
@Override
public Ample.TabletMutator putPrevEndRow(Text per) {
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
TabletsSection.TabletColumnFamily.PREV_ROW_COLUMN.put(mutation,
KeyExtent.encodePrevEndRow(extent.getPrevEndRow()));
return this;
}
@Override
public Ample.TabletMutator putDirName(String dirName) {
ServerColumnFamily.validateDirCol(dirName);
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
TabletsSection.ServerColumnFamily.DIRECTORY_COLUMN.put(mutation, new Value(dirName));
return this;
}
@Override
public Ample.TabletMutator putFile(TabletFile path, DataFileValue dfv) {
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
mutation.put(DataFileColumnFamily.NAME, path.getMetaInsertText(), new Value(dfv.encode()));
return this;
}
@Override
public Ample.TabletMutator deleteFile(StoredTabletFile path) {
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
mutation.putDelete(DataFileColumnFamily.NAME, path.getMetaUpdateDeleteText());
return this;
}
@Override
public Ample.TabletMutator putScan(TabletFile path) {
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
mutation.put(ScanFileColumnFamily.NAME, path.getMetaInsertText(), new Value(new byte[0]));
return this;
}
@Override
public Ample.TabletMutator deleteScan(StoredTabletFile path) {
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
mutation.putDelete(ScanFileColumnFamily.NAME, path.getMetaUpdateDeleteText());
return this;
}
@Override
public Ample.TabletMutator putCompactionId(long compactionId) {
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
TabletsSection.ServerColumnFamily.COMPACT_COLUMN.put(mutation,
new Value(Long.toString(compactionId)));
return this;
}
@Override
public Ample.TabletMutator putFlushId(long flushId) {
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
TabletsSection.ServerColumnFamily.FLUSH_COLUMN.put(mutation, new Value(Long.toString(flushId)));
return this;
}
@Override
public Ample.TabletMutator putTime(MetadataTime time) {
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
TabletsSection.ServerColumnFamily.TIME_COLUMN.put(mutation, new Value(time.encode()));
return this;
}
private String getLocationFamily(LocationType type) {
switch (type) {
case CURRENT:
return TabletsSection.CurrentLocationColumnFamily.STR_NAME;
case FUTURE:
return TabletsSection.FutureLocationColumnFamily.STR_NAME;
case LAST:
return TabletsSection.LastLocationColumnFamily.STR_NAME;
default:
throw new IllegalArgumentException();
}
}
@Override
public Ample.TabletMutator putLocation(Ample.TServer tsi, LocationType type) {
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
mutation.put(getLocationFamily(type), tsi.getSession(), tsi.getLocation().toString());
return this;
}
@Override
public Ample.TabletMutator deleteLocation(Ample.TServer tsi, LocationType type) {
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
mutation.putDelete(getLocationFamily(type), tsi.getSession());
return this;
}
@Override
public Ample.TabletMutator putZooLock(ZooLock zooLock) {
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
TabletsSection.ServerColumnFamily.LOCK_COLUMN.put(mutation,
new Value(zooLock.getLockID().serialize(context.getZooKeeperRoot() + "/")));
return this;
}
@Override
public Ample.TabletMutator putWal(LogEntry logEntry) {
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
mutation.put(logEntry.getColumnFamily(), logEntry.getColumnQualifier(), logEntry.getValue());
return this;
}
@Override
public Ample.TabletMutator deleteWal(LogEntry logEntry) {
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
mutation.putDelete(logEntry.getColumnFamily(), logEntry.getColumnQualifier());
return this;
}
@Override
public Ample.TabletMutator deleteWal(String wal) {
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
mutation.putDelete(MetadataSchema.TabletsSection.LogColumnFamily.STR_NAME, wal);
return this;
}
@Override
public Ample.TabletMutator putBulkFile(TabletFile bulkref, long tid) {
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
mutation.put(TabletsSection.BulkFileColumnFamily.NAME, bulkref.getMetaInsertText(),
new Value(FateTxId.formatTid(tid)));
return this;
}
@Override
public Ample.TabletMutator deleteBulkFile(Ample.FileMeta bulkref) {
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
mutation.putDelete(TabletsSection.BulkFileColumnFamily.NAME, bulkref.meta());
return this;
}
@Override
public Ample.TabletMutator putChopped() {
Preconditions.checkState(updatesEnabled, "Cannot make updates after calling mutate.");
TabletsSection.ChoppedColumnFamily.CHOPPED_COLUMN.put(mutation, new Value("chopped"));
return this;
}
protected Mutation getMutation() {
updatesEnabled = false;
return mutation;
}
public void setCloseAfterMutate(AutoCloseable closeable) {
this.closeAfterMutate = closeable;
}
}
| |
/**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portal.portlet.om;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.jasig.portal.EntityIdentifier;
import org.jasig.portal.IBasicEntity;
import org.jasig.portal.io.xml.IPortalData;
/**
* A portlet definition is equivalent to a published ChannelDefinition.
*
* @author Eric Dalquist
*/
public interface IPortletDefinition extends IBasicEntity, IPortalData {
public static final String EDITABLE_PARAM = "editable";
public static final String CONFIGURABLE_PARAM = "configurable";
public static final String HAS_HELP_PARAM = "hasHelp";
public static final String HAS_ABOUT_PARAM = "hasAbout";
/**
* The name of the portlet parameter that if present represents an alternative
* URL that ought to be used to "maximize" the defined portlet.
*
* This is useful for portlets that when maximized ought to instead be the external URL
* or web application that they're representing in the portal.
*/
public static final String ALT_MAX_LINK_PARAM = "alternativeMaximizedLink";
/**
* A portlet parameter that specifies a target for the flyout, eg : _blank
*/
public static final String TARGET_PARAM = "target";
/**
* @return The unique identifier for this portlet definition.
*/
public IPortletDefinitionId getPortletDefinitionId();
/**
* @return The List of PortletPreferences, will not be null
*/
public List<IPortletPreference> getPortletPreferences();
/**
* @param portletPreferences The List of PortletPreferences, null clears the preferences but actually sets an empty list
* @return true if the portlet preferences changed
*/
public boolean setPortletPreferences(List<IPortletPreference> portletPreferences);
public PortletLifecycleState getLifecycleState();
public String getFName();
public String getName();
public String getDescription();
public IPortletDescriptorKey getPortletDescriptorKey();
public String getTitle();
/**
* @return Default timeout in ms, -1 means no timeout.
*/
public int getTimeout();
/**
* @return Optional timeout for action requests in ms, if null {@link #getTimeout()} should be used, -1 means no timeout.
*/
public Integer getActionTimeout();
/**
* @return Optional timeout for event requests in ms, if null {@link #getTimeout()} should be used, -1 means no timeout.
*/
public Integer getEventTimeout();
/**
* @return Optional timeout for render requests in ms, if null {@link #getTimeout()} should be used, -1 means no timeout.
*/
public Integer getRenderTimeout();
/**
* @return Optional timeout for resource requests in ms, if null {@link #getTimeout()} should be used, -1 means no timeout.
*/
public Integer getResourceTimeout();
public IPortletType getType();
public int getPublisherId();
public int getApproverId();
public Date getPublishDate();
public Date getApprovalDate();
public int getExpirerId();
public Date getExpirationDate();
/**
* @return a READ-ONLY copy of the parameters
*/
public Set<IPortletDefinitionParameter> getParameters();
public IPortletDefinitionParameter getParameter(String key);
public Map<String, IPortletDefinitionParameter> getParametersAsUnmodifiableMap();
// I18n
public String getName(String locale);
public String getDescription(String locale);
public String getTitle(String locale);
/**
* Returns the alternative maximized link (URL) associated with this portlet definition,
* or null if none.
*
* Syntactic sugar for parsing potential alternative maximized link as a preferable alternative
* to directly parsing the portlet parameters elsewhere.
*
* @return String representing alternative max URL, or null if none.
*
* @since uPortal 4.2
*/
public String getAlternativeMaximizedLink();
/**
* Syntactic sugar for getting the target parameter from the portlet parameters.
* @return the target tab/window
*/
public String getTarget();
// Setter methods
public void setFName(String fname);
public void setName(String name);
public void setDescription(String descr);
public void setTitle(String title);
/**
* @param timeout The default timeout value in ms, -1 means no timeout.
*/
public void setTimeout(int timeout);
/**
* @param actionTimeout Optional timeout for action requests in ms, if null {@link #getTimeout()} will be used, -1 means no timeout.
*/
public void setActionTimeout(Integer actionTimeout);
/**
* @param eventTimeout Optional timeout for event requests in ms, if null {@link #getTimeout()} will be used, -1 means no timeout.
*/
public void setEventTimeout(Integer eventTimeout);
/**
* @param renderTimeout Optional timeout for render requests in ms, if null {@link #getTimeout()} will be used, -1 means no timeout.
*/
public void setRenderTimeout(Integer renderTimeout);
/**
* @param resourceTimeout Optional timeout for resource requests in ms, if null {@link #getTimeout()} will be used, -1 means no timeout.
*/
public void setResourceTimeout(Integer resourceTimeout);
public void setType(IPortletType channelType);
public void setPublisherId(int publisherId);
public void setApproverId(int approvalId);
public void setPublishDate(Date publishDate);
public void setApprovalDate(Date approvalDate);
public void setExpirerId(int expirerId);
public void setExpirationDate(Date expirationDate);
public void setParameters(Set<IPortletDefinitionParameter> parameters);
public void addLocalizedTitle(String locale, String chanTitle);
public void addLocalizedName(String locale, String chanName);
public void addLocalizedDescription(String locale, String chanDesc);
/**
* @return a portlet rating
*/
public Double getRating();
/**
* @param rating sets portlet rating
*/
public void setRating(Double rating);
/**
* @return Number of users that rated this portlet
*/
public Long getUsersRated();
/**
* @param usersRated sets number of users that rated this portlet
*/
public void setUsersRated(Long usersRated);
/**
* Implementation required by IBasicEntity interface.
*
* @return EntityIdentifier
*/
@Override
public EntityIdentifier getEntityIdentifier();
/**
* Adds a parameter to this channel definition
*
* @param parameter
* the channel parameter to add
*/
public void addParameter(IPortletDefinitionParameter parameter);
public void addParameter(String name, String value);
/**
* Removes a parameter from this channel definition
*
* @param parameter
* the channel parameter to remove
*/
public void removeParameter(IPortletDefinitionParameter parameter);
/**
* Removes a parameter from this channel definition
*
* @param name
* the parameter name
*/
public void removeParameter(String name);
/**
* @return Hash code based only on the fname of the portlet definition
*/
@Override
public int hashCode();
/**
* Equals must be able to compare against any other {@link IPortletDefinition} and
* the comparison must only use the fname
*/
@Override
public boolean equals(Object o);
}
| |
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.jet.pipeline;
import com.hazelcast.function.FunctionEx;
import com.hazelcast.function.SupplierEx;
import com.hazelcast.jet.config.ProcessingGuarantee;
import com.hazelcast.jet.core.EventTimePolicy;
import com.hazelcast.jet.core.ProcessorMetaSupplier;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.Session;
import static com.hazelcast.internal.util.Preconditions.checkNotNull;
import static com.hazelcast.jet.core.processor.SourceProcessors.streamJmsQueueP;
import static com.hazelcast.jet.core.processor.SourceProcessors.streamJmsTopicP;
import static com.hazelcast.jet.impl.util.Util.checkSerializable;
import static java.util.Objects.requireNonNull;
/**
* See {@link Sources#jmsQueueBuilder} or {@link Sources#jmsTopicBuilder}.
*
* @since Jet 3.0
*/
public final class JmsSourceBuilder {
private final SupplierEx<? extends ConnectionFactory> factorySupplier;
private final boolean isTopic;
private FunctionEx<? super ConnectionFactory, ? extends Connection> connectionFn;
private FunctionEx<? super Session, ? extends MessageConsumer> consumerFn;
private FunctionEx<? super Message, ?> messageIdFn = Message::getJMSMessageID;
private String username;
private String password;
private String destinationName;
private ProcessingGuarantee maxGuarantee = ProcessingGuarantee.EXACTLY_ONCE;
private boolean isSharedConsumer;
/**
* Use {@link Sources#jmsQueueBuilder} of {@link Sources#jmsTopicBuilder}.
* <p>
* The given function must be stateless.
*/
JmsSourceBuilder(@Nonnull SupplierEx<? extends ConnectionFactory> factorySupplier, boolean isTopic) {
checkSerializable(factorySupplier, "factorySupplier");
this.factorySupplier = checkNotNull(factorySupplier);
this.isTopic = isTopic;
}
/**
* Sets the connection parameters. If {@link #connectionFn(FunctionEx)} is
* set, these parameters are ignored.
*
* @return this instance for fluent API
*/
@Nonnull
public JmsSourceBuilder connectionParams(@Nullable String username, @Nullable String password) {
this.username = username;
this.password = password;
return this;
}
/**
* Sets the function which creates the connection using the connection
* factory.
* <p>
* If not provided, this function is used:
* <pre>
* connectionFn = factory -> username != null || password != null
* ? factory.createConnection(usernameLocal, passwordLocal)
* : factory.createConnection()
* </pre>
* The user name and password set with {@link #connectionParams} are used.
* <p>
* The given function must be stateless.
*
* @return this instance for fluent API
*/
@Nonnull
public JmsSourceBuilder connectionFn(
@Nullable FunctionEx<? super ConnectionFactory, ? extends Connection> connectionFn
) {
checkSerializable(connectionFn, "connectionFn");
this.connectionFn = connectionFn;
return this;
}
/**
* Sets the name of the destination (name of the topic or queue). If {@link
* #consumerFn(FunctionEx)} is provided, this parameter is ignored.
*
* @return this instance for fluent API
*/
@Nonnull
public JmsSourceBuilder destinationName(@Nullable String destinationName) {
this.destinationName = destinationName;
return this;
}
/**
* Sets the function which creates the message consumer from session.
* <p>
* If not provided, {@code Session#createConsumer(destinationName)} is used
* to create the consumer. See {@link #destinationName(String)}.
* <p>
* If you're consuming a topic and you create a shared consumer, make
* sure to also call {@link #sharedConsumer(boolean) sharedConsumer(true)}.
* <p>
* The given function must be stateless.
*
* @return this instance for fluent API
*/
@Nonnull
public JmsSourceBuilder consumerFn(
@Nullable FunctionEx<? super Session, ? extends MessageConsumer> consumerFn
) {
checkSerializable(consumerFn, "consumerFn");
this.consumerFn = consumerFn;
return this;
}
/**
* Configures the function to extract IDs from the messages, if
* exactly-once guarantee is used. If a lower guarantee is used, this
* function is not used.
* <p>
* Make sure the function returns non-null for every message, or the job
* will fail. The returned object should also implement {@code equals()}
* and {@code hashCode()} methods. If you don't have a unique message ID,
* {@linkplain #maxGuarantee(ProcessingGuarantee) reduce the guarantee} to
* at-least-once.
* <p>
* The default is to use {@code Message.getJMSMessageID()}.
* <p>
* The given function must be stateless.
*
* @return this instance for fluent API
*/
@Nonnull
public JmsSourceBuilder messageIdFn(@Nonnull FunctionEx<? super Message, ?> messageIdFn) {
this.messageIdFn = checkNotNull(messageIdFn);
return this;
}
/**
* Sets the maximum processing guarantee for the source. You can use it to
* reduce the guarantee of this source compared to the job's guarantee. If
* you configure a stronger guarantee than the job has, the job's guarantee
* will be used. Use it if you want to avoid the overhead of acknowledging
* the messages or storing IDs of seen messages, if you can tolerate
* duplicated or missed messages.
* <p>
* If the processing guarantee is NONE, the processor will consume the
* messages in {@link Session#DUPS_OK_ACKNOWLEDGE} mode. If the processing
* guarantee is other than NONE, the processor will acknowledge messages in
* transactions in the 2nd phase of the snapshot, that is after all
* downstream stages fully processed the messages. Additionally, if the
* processing guarantee is EXACTLY_ONCE, the processor will store
* {@linkplain #messageIdFn(FunctionEx) message IDs} of the unacknowledged
* messages to the snapshot and should the job fail after the snapshot was
* successful, but before Jet managed to acknowledge the messages. The
* stored IDs will be used to filter out the re-delivered messages.
* <p>
* If you use a non-durable consumer with a topic, the guarantee will not
* work since the broker doesn't store the messages at all. You can also
* set the max-guarantee to NONE in this case - the acknowledge operation
* is ignored anyway. If you didn't specify your own {@link
* #consumerFn(FunctionEx)}, a non-durable consumer is created for a topic
* by default.
* <p>
* The default is {@link ProcessingGuarantee#EXACTLY_ONCE}, which means
* that the source's guarantee will match the job's guarantee.
*
* @return this instance for fluent API
*/
@Nonnull
public JmsSourceBuilder maxGuarantee(@Nonnull ProcessingGuarantee guarantee) {
maxGuarantee = checkNotNull(guarantee);
return this;
}
/**
* Specifies whether the MessageConsumer of the JMS topic is shared, that
* is whether {@code createSharedConsumer()} or {@code
* createSharedDurableConsumer()} was used to create it in the {@link
* #consumerFn(FunctionEx)}.
* <p>
* If the consumer is not shared, only a single processor on a single
* member will connect to the broker to receive the messages. If you set
* this parameter to {@code true} for a non-shared consumer, all messages
* will be emitted on every member, leading to duplicate processing.
* <p>
* A consumer for a queue is always assumed to be shared, regardless of
* this setting.
* <p>
* The default value is {@code false}.
*
* @return this instance for fluent API
*/
@Nonnull
public JmsSourceBuilder sharedConsumer(boolean isSharedConsumer) {
this.isSharedConsumer = isSharedConsumer;
return this;
}
/**
* Creates and returns the JMS {@link StreamSource} with the supplied
* components and the projection function {@code projectionFn}.
* <p>
* The given function must be stateless.
*
* @param projectionFn the function which creates output object from each
* message
* @param <T> the type of the items the source emits
*/
@Nonnull
public <T> StreamSource<T> build(@Nonnull FunctionEx<? super Message, ? extends T> projectionFn) {
String usernameLocal = username;
String passwordLocal = password;
String destinationLocal = destinationName;
ProcessingGuarantee maxGuaranteeLocal = maxGuarantee;
@SuppressWarnings("UnnecessaryLocalVariable")
boolean isTopicLocal = isTopic;
if (connectionFn == null) {
connectionFn = factory -> requireNonNull(usernameLocal != null || passwordLocal != null
? factory.createConnection(usernameLocal, passwordLocal)
: factory.createConnection());
}
if (consumerFn == null) {
checkNotNull(destinationLocal, "neither consumerFn nor destinationName set");
consumerFn = session -> session.createConsumer(isTopicLocal
? session.createTopic(destinationLocal)
: session.createQueue(destinationLocal));
if (isTopic) {
// the user didn't specify a custom consumerFn and we know we're using a non-durable consumer
// for a topic - there's no point in using any guarantee, see `maxGuarantee`
maxGuaranteeLocal = ProcessingGuarantee.NONE;
}
}
ProcessingGuarantee maxGuaranteeFinal = maxGuaranteeLocal;
FunctionEx<? super ConnectionFactory, ? extends Connection> connectionFnLocal = connectionFn;
@SuppressWarnings("UnnecessaryLocalVariable")
SupplierEx<? extends ConnectionFactory> factorySupplierLocal = factorySupplier;
SupplierEx<? extends Connection> newConnectionFn =
() -> connectionFnLocal.apply(factorySupplierLocal.get());
FunctionEx<? super Session, ? extends MessageConsumer> consumerFnLocal = consumerFn;
boolean isSharedConsumerLocal = isSharedConsumer;
FunctionEx<? super Message, ?> messageIdFnLocal = messageIdFn;
FunctionEx<EventTimePolicy<? super T>, ProcessorMetaSupplier> metaSupplierFactory =
policy -> isTopicLocal
? streamJmsTopicP(destinationLocal, isSharedConsumerLocal, maxGuaranteeFinal, policy,
newConnectionFn, consumerFnLocal, messageIdFnLocal, projectionFn)
: streamJmsQueueP(destinationLocal, maxGuaranteeFinal, policy, newConnectionFn, consumerFnLocal,
messageIdFnLocal, projectionFn);
return Sources.streamFromProcessorWithWatermarks(sourceName(), true, metaSupplierFactory);
}
/**
* Convenience for {@link JmsSourceBuilder#build(FunctionEx)}.
*/
@Nonnull
public StreamSource<Message> build() {
return build(message -> message);
}
private String sourceName() {
return (isTopic ? "jmsTopicSource(" : "jmsQueueSource(")
+ (destinationName == null ? "?" : destinationName) + ')';
}
}
| |
/**
* $Id$
* $URL$
* SiteEntityProvider.java - entity-broker - Jun 29, 2008 8:35:55 AM - azeckoski
**************************************************************************
* Copyright (c) 2008, 2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sakaiproject.webcomponents.permissions;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Collectors;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.sakaiproject.authz.api.AuthzGroup;
import org.sakaiproject.authz.api.AuthzGroupService;
import org.sakaiproject.authz.api.AuthzPermissionException;
import org.sakaiproject.authz.api.FunctionManager;
import org.sakaiproject.authz.api.GroupNotDefinedException;
import org.sakaiproject.authz.api.Role;
import org.sakaiproject.authz.api.SecurityService;
import org.sakaiproject.component.api.ServerConfigurationService;
import org.sakaiproject.entitybroker.EntityReference;
import org.sakaiproject.entitybroker.EntityView;
import org.sakaiproject.entitybroker.entityprovider.EntityProvider;
import org.sakaiproject.entitybroker.entityprovider.annotations.EntityCustomAction;
import org.sakaiproject.entitybroker.entityprovider.capabilities.ActionsExecutable;
import org.sakaiproject.entitybroker.entityprovider.capabilities.Outputable;
import org.sakaiproject.entitybroker.entityprovider.extension.ActionReturn;
import org.sakaiproject.entitybroker.entityprovider.extension.Formats;
import org.sakaiproject.entitybroker.util.AbstractEntityProvider;
import org.sakaiproject.exception.IdUnusedException;
import org.sakaiproject.site.api.Group;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.site.api.SiteService;
/**
* Creates a provider for dealing with permissions
*
* @author Adrian Fish (adrian.r.fish@gmail.com)
*/
@Slf4j
@Setter
public class PermissionsEntityProvider extends AbstractEntityProvider implements EntityProvider, ActionsExecutable, Outputable {
private SiteService siteService;
private AuthzGroupService authzGroupService;
private FunctionManager functionManager;
private ServerConfigurationService serverConfigurationService;
private SecurityService securityService;
public static String PREFIX = "permissions";
public String getEntityPrefix() {
return PREFIX;
}
public String[] getHandledOutputFormats() {
return new String[] { Formats.JSON };
}
@EntityCustomAction(action = "getPerms", viewKey = EntityView.VIEW_SHOW)
public ActionReturn handleGet(EntityView view, Map<String, Object> params) {
final String siteId = view.getEntityReference().getId();
// expects permissions/siteId/getPerms[/:TOOL:]
String tool = view.getPathSegment(3);
String userId = developerHelperService.getCurrentUserId();
if (!securityService.isSuperUser(userId) && !authzGroupService.isAllowed(userId, SiteService.SECURE_UPDATE_SITE, "/site/" + siteId)) {
throw new SecurityException("This action (getPerms) is not allowed.");
}
String groupRef = (String) params.get("ref");
try {
AuthzGroup authzGroup = authzGroupService.getAuthzGroup(groupRef);
Site site = getSiteById(view.getEntityReference().getId());
Set<Role> roles = authzGroup.getRoles();
Map<String, Set<String>> on = new HashMap<>();
for (Role role : roles) {
Set<String> functions = role.getAllowedFunctions();
Set<String> filteredFunctions = new TreeSet<>();
if (tool != null) {
for (String function : functions) {
if (function.startsWith(tool)) {
filteredFunctions.add(function);
}
}
} else {
filteredFunctions = functions;
}
on.put(role.getId(), filteredFunctions);
}
Map<String, String> roleNameMappings
= roles.stream().collect(
Collectors.toMap(Role::getId, r -> authzGroupService.getRoleName(r.getId())));
List<String> available = functionManager.getRegisteredFunctions(tool);
Map<String, Object> data = new HashMap<>();
data.put("on", on);
data.put("available", available);
data.put("roleNameMappings", roleNameMappings);
List<PermissionGroup> groups = site.getGroups().stream().map(PermissionGroup::new).collect(Collectors.toList());
data.put("groups", groups);
return new ActionReturn(data, null, Formats.JSON);
} catch (GroupNotDefinedException gnde) {
throw new IllegalArgumentException("No realm defined for ref " + groupRef + ".");
}
}
@EntityCustomAction(action="setPerms", viewKey=EntityView.VIEW_EDIT)
public String handleSet(EntityReference entityRef, Map<String, Object> params) {
String userId = developerHelperService.getCurrentUserId();
if (userId == null) {
throw new SecurityException(
"This action (setPerms) is not accessible to anon and there is no current user.");
}
String siteId = entityRef.getId();
Site site = getSiteById(siteId);
List<String> userMutableFunctions = functionManager.getRegisteredUserMutableFunctions();
boolean admin = developerHelperService.isUserAdmin(developerHelperService.getCurrentUserReference());
String groupRef = (String) params.get("ref");
try {
AuthzGroup authzGroup = authzGroupService.getAuthzGroup(groupRef);
boolean changed = false;
for (String name : params.keySet()) {
if (!name.contains(":")) {
continue;
}
String value = (String) params.get(name);
String roleId = name.substring(0, name.indexOf(":"));
Role role = authzGroup.getRole(roleId);
if (role == null) {
throw new IllegalArgumentException("Invalid role id '" + roleId
+ "' provided in POST parameters.");
}
String function = name.substring(name.indexOf(":") + 1);
// Only change this function if registered as userMutable
if (admin || userMutableFunctions.contains(function)) {
if ("true".equals(value)) {
role.allowFunction(function);
} else {
role.disallowFunction(function);
}
} else {
throw new SecurityException("The function " + function
+ " cannot be updated by the current user.");
}
changed = true;
}
if (changed) {
try {
authzGroupService.save(authzGroup);
} catch (AuthzPermissionException ape) {
throw new SecurityException("The permissions for this site (" + siteId
+ ") cannot be updated by the current user.");
}
}
} catch (GroupNotDefinedException gnde) {
throw new IllegalArgumentException("No realm defined for ref " + groupRef + ".");
}
return "SUCCESS";
}
private Site getSiteById(String siteId) {
Site site;
try {
site = siteService.getSite(siteId);
} catch (IdUnusedException e) {
throw new IllegalArgumentException("Cannot find site by siteId: " + siteId, e);
}
return site;
}
public class PermissionGroup {
public String reference;
public String title;
public PermissionGroup(Group group) {
this.reference = group.getReference();
this.title = group.getTitle();
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.cassandra;
import com.datastax.driver.core.utils.Bytes;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.trino.spi.connector.ColumnHandle;
import io.trino.spi.connector.ColumnMetadata;
import io.trino.spi.connector.Connector;
import io.trino.spi.connector.ConnectorMetadata;
import io.trino.spi.connector.ConnectorRecordSetProvider;
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.connector.ConnectorSplit;
import io.trino.spi.connector.ConnectorSplitManager;
import io.trino.spi.connector.ConnectorSplitSource;
import io.trino.spi.connector.ConnectorTableHandle;
import io.trino.spi.connector.ConnectorTableMetadata;
import io.trino.spi.connector.ConnectorTransactionHandle;
import io.trino.spi.connector.Constraint;
import io.trino.spi.connector.DynamicFilter;
import io.trino.spi.connector.RecordCursor;
import io.trino.spi.connector.SchemaNotFoundException;
import io.trino.spi.connector.SchemaTableName;
import io.trino.spi.connector.SchemaTablePrefix;
import io.trino.spi.predicate.Domain;
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.type.Type;
import io.trino.spi.type.VarcharType;
import io.trino.testing.TestingConnectorContext;
import io.trino.testing.TestingConnectorSession;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static com.google.common.base.Preconditions.checkArgument;
import static io.airlift.concurrent.MoreFutures.getFutureValue;
import static io.airlift.testing.Assertions.assertInstanceOf;
import static io.trino.plugin.cassandra.CassandraTestingUtils.TABLE_ALL_TYPES;
import static io.trino.plugin.cassandra.CassandraTestingUtils.TABLE_DELETE_DATA;
import static io.trino.plugin.cassandra.CassandraTestingUtils.createTestTables;
import static io.trino.spi.connector.ConnectorSplitManager.SplitSchedulingStrategy.UNGROUPED_SCHEDULING;
import static io.trino.spi.connector.NotPartitionedPartitionHandle.NOT_PARTITIONED;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.BooleanType.BOOLEAN;
import static io.trino.spi.type.DateTimeEncoding.packDateTimeWithZone;
import static io.trino.spi.type.DoubleType.DOUBLE;
import static io.trino.spi.type.IntegerType.INTEGER;
import static io.trino.spi.type.RealType.REAL;
import static io.trino.spi.type.TimeZoneKey.UTC_KEY;
import static io.trino.spi.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE;
import static io.trino.spi.type.VarbinaryType.VARBINARY;
import static java.lang.String.format;
import static java.util.Locale.ENGLISH;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
public class TestCassandraConnector
{
protected static final String INVALID_DATABASE = "totally_invalid_database";
private static final Date DATE = new Date();
private static final ConnectorSession SESSION = TestingConnectorSession.builder()
.setPropertyMetadata(new CassandraSessionProperties(new CassandraClientConfig()).getSessionProperties())
.build();
private CassandraServer server;
protected String database;
protected SchemaTableName table;
protected SchemaTableName tableForDelete;
private ConnectorMetadata metadata;
private ConnectorSplitManager splitManager;
private ConnectorRecordSetProvider recordSetProvider;
@BeforeClass
public void setup()
throws Exception
{
this.server = new CassandraServer();
String keyspace = "test_connector";
createTestTables(server.getSession(), keyspace, DATE);
CassandraConnectorFactory connectorFactory = new CassandraConnectorFactory();
Connector connector = connectorFactory.create("test", ImmutableMap.of(
"cassandra.contact-points", server.getHost(),
"cassandra.native-protocol-port", Integer.toString(server.getPort())),
new TestingConnectorContext());
metadata = connector.getMetadata(CassandraTransactionHandle.INSTANCE);
assertInstanceOf(metadata, CassandraMetadata.class);
splitManager = connector.getSplitManager();
assertInstanceOf(splitManager, CassandraSplitManager.class);
recordSetProvider = connector.getRecordSetProvider();
assertInstanceOf(recordSetProvider, CassandraRecordSetProvider.class);
database = keyspace;
table = new SchemaTableName(database, TABLE_ALL_TYPES.toLowerCase(ENGLISH));
tableForDelete = new SchemaTableName(database, TABLE_DELETE_DATA.toLowerCase(ENGLISH));
}
@AfterClass(alwaysRun = true)
public void tearDown()
{
server.close();
}
@Test
public void testGetDatabaseNames()
{
List<String> databases = metadata.listSchemaNames(SESSION);
assertTrue(databases.contains(database.toLowerCase(ENGLISH)));
}
@Test
public void testGetTableNames()
{
List<SchemaTableName> tables = metadata.listTables(SESSION, Optional.of(database));
assertTrue(tables.contains(table));
}
// disabled until metadata manager is updated to handle invalid catalogs and schemas
@Test(enabled = false, expectedExceptions = SchemaNotFoundException.class)
public void testGetTableNamesException()
{
metadata.listTables(SESSION, Optional.of(INVALID_DATABASE));
}
@Test
public void testListUnknownSchema()
{
assertNull(metadata.getTableHandle(SESSION, new SchemaTableName("totally_invalid_database_name", "dual")));
assertEquals(metadata.listTables(SESSION, Optional.of("totally_invalid_database_name")), ImmutableList.of());
assertEquals(metadata.listTableColumns(SESSION, new SchemaTablePrefix("totally_invalid_database_name", "dual")), ImmutableMap.of());
}
@Test
public void testGetRecords()
{
ConnectorTableHandle tableHandle = getTableHandle(table);
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(SESSION, tableHandle);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(SESSION, tableHandle).values());
Map<String, Integer> columnIndex = indexColumns(columnHandles);
ConnectorTransactionHandle transaction = CassandraTransactionHandle.INSTANCE;
tableHandle = metadata.applyFilter(SESSION, tableHandle, Constraint.alwaysTrue()).get().getHandle();
List<ConnectorSplit> splits = getAllSplits(splitManager.getSplits(transaction, SESSION, tableHandle, UNGROUPED_SCHEDULING, DynamicFilter.EMPTY));
long rowNumber = 0;
for (ConnectorSplit split : splits) {
CassandraSplit cassandraSplit = (CassandraSplit) split;
long completedBytes = 0;
try (RecordCursor cursor = recordSetProvider.getRecordSet(transaction, SESSION, cassandraSplit, tableHandle, columnHandles).cursor()) {
while (cursor.advanceNextPosition()) {
try {
assertReadFields(cursor, tableMetadata.getColumns());
}
catch (RuntimeException e) {
throw new RuntimeException("row " + rowNumber, e);
}
rowNumber++;
String keyValue = cursor.getSlice(columnIndex.get("key")).toStringUtf8();
assertTrue(keyValue.startsWith("key "));
int rowId = Integer.parseInt(keyValue.substring(4));
assertEquals(keyValue, "key " + rowId);
assertEquals(Bytes.toHexString(cursor.getSlice(columnIndex.get("typebytes")).getBytes()), format("0x%08X", rowId));
// VARINT is returned as a string
assertEquals(cursor.getSlice(columnIndex.get("typeinteger")).toStringUtf8(), String.valueOf(rowId));
assertEquals(cursor.getLong(columnIndex.get("typelong")), 1000 + rowId);
assertEquals(cursor.getSlice(columnIndex.get("typeuuid")).toStringUtf8(), format("00000000-0000-0000-0000-%012d", rowId));
assertEquals(cursor.getLong(columnIndex.get("typetimestamp")), packDateTimeWithZone(DATE.getTime(), UTC_KEY));
long newCompletedBytes = cursor.getCompletedBytes();
assertTrue(newCompletedBytes >= completedBytes);
completedBytes = newCompletedBytes;
}
}
}
assertEquals(rowNumber, 9);
}
@Test
public void testExecuteDelete()
{
assertNumberOfRows(tableForDelete, 15);
CassandraTableHandle handle1 = getTableHandle(Optional.of(List.of(createPartition(1, 1))), "");
metadata.executeDelete(SESSION, handle1);
assertNumberOfRows(tableForDelete, 12);
CassandraTableHandle handle2 = getTableHandle(Optional.of(List.of(createPartition(1, 2))), "clust_one='clust_one_2'");
metadata.executeDelete(SESSION, handle2);
assertNumberOfRows(tableForDelete, 11);
CassandraTableHandle handle3 = getTableHandle(Optional.of(List.of(createPartition(1, 2), createPartition(2, 2))), "");
metadata.executeDelete(SESSION, handle3);
assertNumberOfRows(tableForDelete, 7);
}
private static void assertReadFields(RecordCursor cursor, List<ColumnMetadata> schema)
{
for (int columnIndex = 0; columnIndex < schema.size(); columnIndex++) {
ColumnMetadata column = schema.get(columnIndex);
if (!cursor.isNull(columnIndex)) {
Type type = column.getType();
if (BOOLEAN.equals(type)) {
cursor.getBoolean(columnIndex);
}
else if (INTEGER.equals(type)) {
cursor.getLong(columnIndex);
}
else if (BIGINT.equals(type)) {
cursor.getLong(columnIndex);
}
else if (TIMESTAMP_WITH_TIME_ZONE.equals(type)) {
cursor.getLong(columnIndex);
}
else if (DOUBLE.equals(type)) {
cursor.getDouble(columnIndex);
}
else if (REAL.equals(type)) {
cursor.getLong(columnIndex);
}
else if (type instanceof VarcharType || VARBINARY.equals(type)) {
try {
cursor.getSlice(columnIndex);
}
catch (RuntimeException e) {
throw new RuntimeException("column " + column, e);
}
}
else {
fail("Unknown primitive type " + columnIndex);
}
}
}
}
private ConnectorTableHandle getTableHandle(SchemaTableName tableName)
{
ConnectorTableHandle handle = metadata.getTableHandle(SESSION, tableName);
checkArgument(handle != null, "table not found: %s", tableName);
return handle;
}
private static List<ConnectorSplit> getAllSplits(ConnectorSplitSource splitSource)
{
ImmutableList.Builder<ConnectorSplit> splits = ImmutableList.builder();
while (!splitSource.isFinished()) {
splits.addAll(getFutureValue(splitSource.getNextBatch(NOT_PARTITIONED, 1000)).getSplits());
}
return splits.build();
}
private static ImmutableMap<String, Integer> indexColumns(List<ColumnHandle> columnHandles)
{
ImmutableMap.Builder<String, Integer> index = ImmutableMap.builder();
int i = 0;
for (ColumnHandle columnHandle : columnHandles) {
String name = ((CassandraColumnHandle) columnHandle).getName();
index.put(name, i);
i++;
}
return index.build();
}
private CassandraTableHandle getTableHandle(Optional<List<CassandraPartition>> partitions, String clusteringKeyPredicates)
{
CassandraTableHandle handle = (CassandraTableHandle) getTableHandle(tableForDelete);
return new CassandraTableHandle(handle.getSchemaName(), handle.getTableName(), partitions, clusteringKeyPredicates);
}
private CassandraPartition createPartition(long value1, long value2)
{
CassandraColumnHandle column1 = new CassandraColumnHandle("partition_one", 1, CassandraType.BIGINT, true, false, false, false);
CassandraColumnHandle column2 = new CassandraColumnHandle("partition_two", 2, CassandraType.INT, true, false, false, false);
TupleDomain<ColumnHandle> tupleDomain = TupleDomain.withColumnDomains(
ImmutableMap.of(
column1, Domain.singleValue(BIGINT, value1),
column2, Domain.singleValue(INTEGER, value2)));
String partitionId = format("partition_one=%d AND partition_two=%d", value1, value2);
return new CassandraPartition(new byte[0], partitionId, tupleDomain, true);
}
private void assertNumberOfRows(SchemaTableName tableName, int rowsCount)
{
CassandraSession session = server.getSession();
assertEquals(session.execute("SELECT COUNT(*) FROM " + tableName).all().get(0).getLong(0), rowsCount);
}
}
| |
package messageimpl;
public class GrowthLPCPaidCases
{
public static String growthLPCPaidcasesIntent(String channel,String period,String userzone,String user_region,
String real_tim_timstamp,String user_circle,String grth_lpc_paid_cases_ytd,
String prev_lpc_paid_cases_ytd,String lpc_paid_cases_ytd_growth,String grth_lpc_paid_cases_mtd,
String prev_lpc_paid_cases_mtd,String lpc_paid_cases_mtd_growth, String subchannel, String user_clusters, String user_go)
{
String finalresponse="";
if("MLI".equalsIgnoreCase(channel))
{channel="";}
if("Monthly".equalsIgnoreCase(period))
{period="";}
else
{
if("FTD".equalsIgnoreCase(period))
{
period="YTD";
}else
{
period=period.toUpperCase();
}
}
if(!"".equalsIgnoreCase(user_circle))
{user_region="Circle "+user_circle;}
/*------------------------------------------------*/
if(!"".equalsIgnoreCase(user_go))
{
user_clusters="Office "+user_go;
}
/*------------------------------------------------*/
if(!"".equalsIgnoreCase(subchannel))
{
channel = subchannel;
}
if("".equalsIgnoreCase(channel) && "".equalsIgnoreCase(userzone) && "".equalsIgnoreCase(user_region)
&& "".equalsIgnoreCase(user_clusters) && "".equalsIgnoreCase(period))
{
finalresponse= "MLI has witnessed LPC paid cases growth of "+grth_lpc_paid_cases_ytd+"% on YTD basis, last year same time we had clocked "+
prev_lpc_paid_cases_ytd+ ". LPC paid cases as compared to " +lpc_paid_cases_ytd_growth+ " today MTD business Growth of "+
grth_lpc_paid_cases_mtd+ "% on MTD basis, last year same month we have clocked "+
prev_lpc_paid_cases_mtd+" of LPC paid cases as compared to " +lpc_paid_cases_mtd_growth+ " today."
+ " If you want to see the Zone/region wise business numbers, please specIfy the same.";
}
else if(!"".equalsIgnoreCase(channel) && "".equalsIgnoreCase(userzone) && "".equalsIgnoreCase(user_region)
&& "".equalsIgnoreCase(user_clusters) && "".equalsIgnoreCase(period))
{
finalresponse= channel+" has witnessed LPC paid cases growth of "+grth_lpc_paid_cases_ytd+"% on YTD basis, last year same time we had clocked "+
prev_lpc_paid_cases_ytd+ ". LPC paid cases as compared to " +lpc_paid_cases_ytd_growth+ " today MTD business Growth of "+
grth_lpc_paid_cases_mtd+ " % on MTD basis, last year same month we have clocked "+
prev_lpc_paid_cases_mtd+" of LPC paid cases as compared to " +lpc_paid_cases_mtd_growth+ " today."
+ " If you want to see the Zone/region wise business numbers, please specIfy the same.";
}
else if(!"".equalsIgnoreCase(channel) && !"".equalsIgnoreCase(userzone) && "".equalsIgnoreCase(user_region)
&& "".equalsIgnoreCase(user_clusters) && "".equalsIgnoreCase(period))
{
finalresponse= "Zone "+userzone+" has witnessed LPC paid cases growth of "+grth_lpc_paid_cases_ytd+"% on YTD basis, last year same time we had clocked "+
prev_lpc_paid_cases_ytd+ ". LPC paid cases as compared to " +lpc_paid_cases_ytd_growth+ " today MTD business Growth of "+
grth_lpc_paid_cases_mtd+ " % on MTD basis, last year same month we have clocked "+
prev_lpc_paid_cases_mtd+" of LPC paid cases as compared to " +lpc_paid_cases_mtd_growth+ " today."
+ " If you want to see the Zone/region wise business numbers, please specIfy the same.";
}
else if(!"".equalsIgnoreCase(channel) && !"".equalsIgnoreCase(userzone) && !"".equalsIgnoreCase(user_region)
&& "".equalsIgnoreCase(user_clusters) && "".equalsIgnoreCase(period))
{
finalresponse= ""+user_region+" has witnessed LPC paid cases growth of "+grth_lpc_paid_cases_ytd+"% on YTD basis, last year same time we had clocked "+
prev_lpc_paid_cases_ytd+ ". LPC paid cases as compared to " +lpc_paid_cases_ytd_growth+ " today MTD business Growth of "+
grth_lpc_paid_cases_mtd+ "% on MTD basis, last year same month we have clocked "+
prev_lpc_paid_cases_mtd+" of LPC paid cases as compared to " +lpc_paid_cases_mtd_growth+ " today."
+ " If you want to see the Zone/region wise business numbers, please specIfy the same.";
}
else if(!"".equalsIgnoreCase(channel) && !"".equalsIgnoreCase(userzone) && !"".equalsIgnoreCase(user_region)
&& !"".equalsIgnoreCase(user_clusters) && "".equalsIgnoreCase(period))
{
finalresponse= ""+user_clusters+" has witnessed LPC paid cases growth of "+grth_lpc_paid_cases_ytd+"% on YTD basis, last year same time we had clocked "+
prev_lpc_paid_cases_ytd+ ". LPC paid cases as compared to " +lpc_paid_cases_ytd_growth+ " today MTD business Growth of "+
grth_lpc_paid_cases_mtd+ "% on MTD basis, last year same month we have clocked "+
prev_lpc_paid_cases_mtd+" of LPC paid cases as compared to " +lpc_paid_cases_mtd_growth+ " today."
+ ".";
}
else if(!"".equalsIgnoreCase(channel) && "".equalsIgnoreCase(userzone) && !"".equalsIgnoreCase(user_region)
&& !"".equalsIgnoreCase(user_clusters) && "".equalsIgnoreCase(period))
{
finalresponse= ""+user_clusters+" has witnessed LPC paid cases growth of "+grth_lpc_paid_cases_ytd+"% on YTD basis, last year same time we had clocked "+
prev_lpc_paid_cases_ytd+ ". LPC paid cases as compared to " +lpc_paid_cases_ytd_growth+ " today MTD business Growth of "+
grth_lpc_paid_cases_mtd+ "% on MTD basis, last year same month we have clocked "+
prev_lpc_paid_cases_mtd+" of LPC paid cases as compared to " +lpc_paid_cases_mtd_growth+ " today."
+ ".";
}
else if(!"".equalsIgnoreCase(channel) && "".equalsIgnoreCase(userzone) && !"".equalsIgnoreCase(user_region)
&& "".equalsIgnoreCase(user_clusters) && "".equalsIgnoreCase(period))
{
finalresponse= ""+user_region+" has witnessed LPC paid cases growth of "+grth_lpc_paid_cases_ytd+"% on YTD basis, last year same time we had clocked "+
prev_lpc_paid_cases_ytd+ ". LPC paid cases as compared to " +lpc_paid_cases_ytd_growth+ " today MTD business Growth of "+
grth_lpc_paid_cases_mtd+ "% on MTD basis, last year same month we have clocked "+
prev_lpc_paid_cases_mtd+" of LPC paid cases as compared to " +lpc_paid_cases_mtd_growth+ " today."
+ " If you want to see the Zone/region wise business numbers, please specIfy the same.";
}
else if(!"".equalsIgnoreCase(channel) && "".equalsIgnoreCase(userzone) && "".equalsIgnoreCase(user_region)
&& !"".equalsIgnoreCase(user_clusters) && "".equalsIgnoreCase(period))
{
finalresponse= ""+user_clusters+" has witnessed LPC paid cases growth of "+grth_lpc_paid_cases_ytd+"% on YTD basis, last year same time we had clocked "+
prev_lpc_paid_cases_ytd+ ". LPC paid cases as compared to " +lpc_paid_cases_ytd_growth+ " today MTD business Growth of "+
grth_lpc_paid_cases_mtd+ "% on MTD basis, last year same month we have clocked "+
prev_lpc_paid_cases_mtd+" of LPC paid cases as compared to " +lpc_paid_cases_mtd_growth+ " today."
+ ".";
}
else if(!"".equalsIgnoreCase(channel) && "".equalsIgnoreCase(userzone) && "".equalsIgnoreCase(user_region)
&& "".equalsIgnoreCase(user_clusters) && !"".equalsIgnoreCase(period))
{
if("MTD".equalsIgnoreCase(period))
{
finalresponse= channel+" has witnessed LPC paid cases growth of " +grth_lpc_paid_cases_mtd+"% on "+period+" basis, last year same time we had clocked "+
prev_lpc_paid_cases_mtd+ " of LPC paid cases as compared to " +lpc_paid_cases_mtd_growth+ " today "
+ ". If you want to see the Zone/region wise business numbers, please specIfy the same.";
}
else
{
finalresponse= channel+" has witnessed LPC paid cases growth of " +grth_lpc_paid_cases_ytd+"% on "+period+" basis, last year same time we had clocked "+
prev_lpc_paid_cases_ytd+ " of LPC paid cases as compared to " +lpc_paid_cases_ytd_growth+ " today "
+ ". If you want to see the Zone/region wise business numbers, please specIfy the same.";
}
}
else if(!"".equalsIgnoreCase(channel) && !"".equalsIgnoreCase(userzone) && "".equalsIgnoreCase(user_region)
&& "".equalsIgnoreCase(user_clusters) && !"".equalsIgnoreCase(period))
{
if("MTD".equalsIgnoreCase(channel))
{
finalresponse= "Zone "+userzone+" has witnessed LPC paid cases growth of " +grth_lpc_paid_cases_mtd+"% on "+period+" basis, last year same time we had clocked "+
prev_lpc_paid_cases_mtd+ " of LPC paid cases as compared to " +lpc_paid_cases_mtd_growth+ " today "
+ ". If you want to see the Zone/region wise business numbers, please specIfy the same.";
}
else
{
finalresponse= "Zone "+userzone+" has witnessed LPC paid cases growth of " +grth_lpc_paid_cases_ytd+"% on "+period+" basis, last year same time we had clocked "+
prev_lpc_paid_cases_ytd+ " of LPC paid cases as compared to " +lpc_paid_cases_ytd_growth+ " today "
+ ". If you want to see the Zone/region wise business numbers, please specIfy the same.";
}
}
else if(!"".equalsIgnoreCase(channel) && "".equalsIgnoreCase(userzone) && !"".equalsIgnoreCase(user_region)
&& "".equalsIgnoreCase(user_clusters) && !"".equalsIgnoreCase(period))
{
if("MTD".equalsIgnoreCase(period))
{
finalresponse= ""+user_region+" has witnessed LPC paid cases growth of " +grth_lpc_paid_cases_mtd+"% on "+period+" basis, last year same time we had clocked "+
prev_lpc_paid_cases_mtd+ " of LPC paid cases as compared to " +lpc_paid_cases_mtd_growth+ " today "
+ ". If you want to see the Zone/region wise business numbers, please specIfy the same.";
}
else
{
finalresponse= ""+user_region+" has witnessed LPC paid cases growth of " +grth_lpc_paid_cases_ytd+"% on "+period+" basis, last year same time we had clocked "+
prev_lpc_paid_cases_ytd+ " of LPC paid cases as compared to " +lpc_paid_cases_ytd_growth+ " today "
+ ". If you want to see the Zone/region wise business numbers, please specIfy the same.";
}
}
else if(!"".equalsIgnoreCase(channel) && "".equalsIgnoreCase(userzone) && !"".equalsIgnoreCase(user_region)
&& "".equalsIgnoreCase(user_clusters) && !"".equalsIgnoreCase(period))
{
if("MTD".equalsIgnoreCase(period))
{
finalresponse= ""+channel+" has witnessed LPC paid cases growth of " +grth_lpc_paid_cases_mtd+"% on "+period+" basis, last year same time we had clocked "+
prev_lpc_paid_cases_mtd+ " of LPC paid cases as compared to " +lpc_paid_cases_mtd_growth+ " today "
+ ". If you want to see the Zone/region wise business numbers, please specIfy the same.";
}
else
{
finalresponse= ""+channel+" has witnessed LPC paid cases growth of " +grth_lpc_paid_cases_ytd+"% on "+period+" basis, last year same time we had clocked "+
prev_lpc_paid_cases_ytd+ " of LPC paid cases as compared to " +lpc_paid_cases_ytd_growth+ " today "
+ ". If you want to see the Zone/region wise business numbers, please specIfy the same.";
}
}
/*--------------------------------------------------------------------------start*/
else if(!"".equalsIgnoreCase(channel) && "".equalsIgnoreCase(userzone) && "".equalsIgnoreCase(user_region)
&& !"".equalsIgnoreCase(user_clusters) && !"".equalsIgnoreCase(period))
{
if("MTD".equalsIgnoreCase(period))
{
finalresponse= ""+user_clusters+" has witnessed LPC paid cases growth of " +grth_lpc_paid_cases_mtd+"% on "+period+" basis, last year same time we had clocked "+
prev_lpc_paid_cases_mtd+ " of LPC paid cases as compared to " +lpc_paid_cases_mtd_growth+ " today "
+ ".";
}
else
{
finalresponse= ""+user_clusters+" has witnessed LPC paid cases growth of " +grth_lpc_paid_cases_ytd+"% on "+period+" basis, last year same time we had clocked "+
prev_lpc_paid_cases_ytd+ " of LPC paid cases as compared to " +lpc_paid_cases_ytd_growth+ " today "
+ ".";
}
}
/*--------------------------------------------------------------------------end*/
else if(!"".equalsIgnoreCase(channel) && !"".equalsIgnoreCase(userzone) && !"".equalsIgnoreCase(user_region)
&& "".equalsIgnoreCase(user_clusters) && !"".equalsIgnoreCase(period))
{
if("MTD".equalsIgnoreCase(period))
{
finalresponse= ""+user_region+" has witnessed LPC paid cases growth of " +grth_lpc_paid_cases_mtd+"% on "+period+" basis, last year same time we had clocked "+
prev_lpc_paid_cases_mtd+ " of LPC paid cases as compared to " +lpc_paid_cases_mtd_growth+ " today "
+ ". If you want to see the Zone/region wise business numbers, please specIfy the same.";
}
else
{
finalresponse= ""+user_region+" has witnessed LPC paid cases growth of " +grth_lpc_paid_cases_ytd+"% on "+period+" basis, last year same time we had clocked "+
prev_lpc_paid_cases_ytd+ " of LPC paid cases as compared to " +lpc_paid_cases_ytd_growth+ " today "
+ ". If you want to see the Zone/region wise business numbers, please specIfy the same.";
}
}
/*------------------------------------------------------------------start*/
else if(!"".equalsIgnoreCase(channel) && !"".equalsIgnoreCase(userzone) && !"".equalsIgnoreCase(user_region)
&& !"".equalsIgnoreCase(user_clusters) && !"".equalsIgnoreCase(period))
{
if("MTD".equalsIgnoreCase(period))
{
finalresponse= ""+user_clusters+" has witnessed LPC paid cases growth of " +grth_lpc_paid_cases_mtd+"% on "+period+" basis, last year same time we had clocked "+
prev_lpc_paid_cases_mtd+ " of LPC paid cases as compared to " +lpc_paid_cases_mtd_growth+ " today "
+ ".";
}
else
{
finalresponse= ""+user_clusters+" has witnessed LPC paid cases growth of " +grth_lpc_paid_cases_ytd+"% on "+period+" basis, last year same time we had clocked "+
prev_lpc_paid_cases_ytd+ " of LPC paid cases as compared to " +lpc_paid_cases_ytd_growth+ " today "
+ ".";
}
}
/*------------------------------------------------------------------end*/
else
{
if("MTD".equalsIgnoreCase(period))
{
finalresponse= channel+" has witnessed LPC paid cases growth of " +grth_lpc_paid_cases_mtd+"% on "+period+" basis, last year same time we had clocked "+
prev_lpc_paid_cases_mtd+ " of LPC paid cases as compared to " +lpc_paid_cases_mtd_growth+ " today "
+ ". If you want to see the Zone/region wise business numbers, please specIfy the same.";
}
else
{
finalresponse= channel+" has witnessed LPC paid cases growth of " +grth_lpc_paid_cases_ytd+"% on "+period+" basis, last year same time we had clocked "+
prev_lpc_paid_cases_ytd+ " of LPC paid cases as compared to " +lpc_paid_cases_ytd_growth+ " today "
+ ". If you want to see the Zone/region wise business numbers, please specIfy the same.";
}
}
return finalresponse.toString();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.redis.internal.commands.executor.set;
import static org.apache.geode.redis.RedisCommandArgumentsTestHelper.assertAtLeastNArgs;
import static org.apache.geode.redis.internal.RedisConstants.ERROR_WRONG_SLOT;
import static org.apache.geode.redis.internal.RedisConstants.ERROR_WRONG_TYPE;
import static org.apache.geode.test.dunit.rules.RedisClusterStartupRule.BIND_ADDRESS;
import static org.apache.geode.test.dunit.rules.RedisClusterStartupRule.REDIS_CLIENT_TIMEOUT;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static redis.clients.jedis.Protocol.Command.SDIFF;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import redis.clients.jedis.HostAndPort;
import redis.clients.jedis.JedisCluster;
import org.apache.geode.redis.ConcurrentLoopingThreads;
import org.apache.geode.redis.RedisIntegrationTest;
public abstract class AbstractSDiffIntegrationTest implements RedisIntegrationTest {
private JedisCluster jedis;
private static final String SET_KEY = "{tag1}setkey";
private static final String NON_EXISTENT_SET_KEY = "{tag1}nonExistentSet";
@Before
public void setUp() {
jedis = new JedisCluster(new HostAndPort(BIND_ADDRESS, getPort()), REDIS_CLIENT_TIMEOUT);
}
@After
public void tearDown() {
flushAll();
jedis.close();
}
@Test
public void sdiffErrors_givenTooFewArguments() {
assertAtLeastNArgs(jedis, SDIFF, 1);
}
@Test
public void sdiff_returnsAllValuesInSet() {
String[] values = createKeyValuesSet();
assertThat(jedis.sdiff(SET_KEY)).containsExactlyInAnyOrder(values);
}
@Test
public void sdif_withSetsFromDifferentSlots_returnsCrossSlotError() {
String setKeyDifferentSlot = "{tag2}set2";
jedis.sadd(SET_KEY, "member1");
jedis.sadd(setKeyDifferentSlot, "member2");
assertThatThrownBy(() -> jedis.sendCommand(SET_KEY, SDIFF, SET_KEY, setKeyDifferentSlot))
.hasMessage(ERROR_WRONG_SLOT);
}
@Test
public void sdiffWithNonExistentSet_returnsEmptySet() {
assertThat(jedis.sdiff(NON_EXISTENT_SET_KEY)).isEmpty();
}
@Test
public void sdiffWithMultipleNonExistentSet_returnsEmptySet() {
assertThat(jedis.sdiff("{tag1}nonExistentSet1", "{tag1}nonExistentSet2")).isEmpty();
}
@Test
public void sdiffWithNonExistentSetAndSet_returnsAllValuesInSet() {
createKeyValuesSet();
assertThat(jedis.sdiff(NON_EXISTENT_SET_KEY, SET_KEY)).isEmpty();
}
@Test
public void sdiffWithSetAndNonExistentSet_returnsAllValuesInSet() {
String[] values = createKeyValuesSet();
assertThat(jedis.sdiff(SET_KEY, NON_EXISTENT_SET_KEY))
.containsExactlyInAnyOrder(values);
}
@Test
public void sdiffWithSetsWithDifferentValues_returnsFirstSetValues() {
String[] firstValues = createKeyValuesSet();
String[] secondValues = new String[] {"windows", "microsoft", "linux"};
jedis.sadd("{tag1}setkey2", secondValues);
assertThat(jedis.sdiff(SET_KEY, "{tag1}setkey2")).containsExactlyInAnyOrder(firstValues);
}
@Test
public void sdiffWithSetsWithSomeSharedValues_returnsDiffOfSets() {
createKeyValuesSet();
String[] secondValues = new String[] {"apple", "bottoms", "boots", "fur", "peach"};
jedis.sadd("{tag1}setkey2", secondValues);
Set<String> result =
jedis.sdiff(SET_KEY, "{tag1}setkey2");
String[] expected = new String[] {"orange", "plum", "pear"};
assertThat(result).containsExactlyInAnyOrder(expected);
}
@Test
public void sdiffWithSetsWithAllSharedValues_returnsEmptySet() {
String[] values = createKeyValuesSet();
jedis.sadd("{tag1}setkey2", values);
assertThat(jedis.sdiff(SET_KEY, "{tag1}setkey2")).isEmpty();
}
@Test
public void sdiffWithMultipleSets_returnsDiffOfSets() {
String[] values = createKeyValuesSet();
String[] secondValues = new String[] {"apple", "bottoms", "boots", "fur", "peach"};
String[] thirdValues = new String[] {"queen", "opera", "boho", "orange"};
jedis.sadd("{tag1}setkey2", secondValues);
jedis.sadd("{tag1}setkey3", thirdValues);
String[] expected = new String[] {"pear", "plum"};
assertThat(jedis.sdiff(SET_KEY, "{tag1}setkey2", "{tag1}setkey3"))
.containsExactlyInAnyOrder(expected);
}
@Test
public void sdiffSetsNotModified_returnSetValues() {
String[] firstValues = createKeyValuesSet();
String[] secondValues = new String[] {"apple", "bottoms", "boots", "fur", "peach"};
jedis.sadd("{tag1}setkey2", secondValues);
jedis.sdiff(SET_KEY, "{tag1}setkey2");
assertThat(jedis.smembers(SET_KEY)).containsExactlyInAnyOrder(firstValues);
assertThat(jedis.smembers("{tag1}setkey2")).containsExactlyInAnyOrder(secondValues);
}
@Test
public void sdiffNonExistentSetsNotModified_returnEmptySet() {
jedis.sdiff(NON_EXISTENT_SET_KEY, "{tag1}nonExisistent2");
assertThat(jedis.smembers(NON_EXISTENT_SET_KEY)).isEmpty();
assertThat(jedis.smembers("{tag1}nonExisistent2")).isEmpty();
}
@Test
public void sdiffNonExistentSetAndSetNotModified_returnEmptySetAndSetValues() {
String[] firstValues = createKeyValuesSet();
jedis.sdiff(NON_EXISTENT_SET_KEY, SET_KEY);
assertThat(jedis.smembers(NON_EXISTENT_SET_KEY).isEmpty());
assertThat(jedis.smembers(SET_KEY)).containsExactlyInAnyOrder(firstValues);
}
@Test
public void sdiffSetAndNonExistentSetNotModified_returnSetValueAndEmptySet() {
String[] firstValues = createKeyValuesSet();
jedis.sdiff(SET_KEY, NON_EXISTENT_SET_KEY);
assertThat(jedis.smembers(SET_KEY)).containsExactlyInAnyOrder(firstValues);
assertThat(jedis.smembers(NON_EXISTENT_SET_KEY).isEmpty());
}
@Test
public void sdiff_withNonSetKeyAsFirstKey_returnsWrongTypeError() {
String stringKey = "{tag1}ding";
jedis.set(stringKey, "dong");
String[] members = createKeyValuesSet();
String secondSetKey = "{tag1}secondKey";
jedis.sadd(secondSetKey, members);
assertThatThrownBy(() -> jedis.sdiff(stringKey, SET_KEY, secondSetKey))
.hasMessage(ERROR_WRONG_TYPE);
}
@Test
public void sdiff_withNonSetKeyAsThirdKey_returnsWrongTypeError() {
String stringKey = "{tag1}ding";
jedis.set(stringKey, "dong");
String[] members = createKeyValuesSet();
String secondSetKey = "{tag1}secondKey";
jedis.sadd(secondSetKey, members);
assertThatThrownBy(() -> jedis.sdiff(SET_KEY, secondSetKey, stringKey))
.hasMessage(ERROR_WRONG_TYPE);
}
@Test
public void sdiff_withNonSetKeyAsThirdKeyAndNonExistentSetAsFirstKey_returnsWrongTypeError() {
String stringKey = "{tag1}ding";
jedis.set(stringKey, "dong");
jedis.sadd(SET_KEY, "member");
assertThatThrownBy(() -> jedis.sdiff(NON_EXISTENT_SET_KEY, SET_KEY, stringKey))
.hasMessage(ERROR_WRONG_TYPE);
}
@Test
public void ensureSetConsistency_whenRunningConcurrently() {
String[] values = new String[] {"pear", "apple", "plum", "orange", "peach"};
Set<String> valuesList = new HashSet<>(Arrays.asList(values));
jedis.sadd("{tag1}firstset", values);
jedis.sadd("{tag1}secondset", values);
final AtomicReference<Set<String>> sdiffResultReference = new AtomicReference<>();
new ConcurrentLoopingThreads(1000,
i -> jedis.srem("{tag1}secondset", values),
i -> sdiffResultReference.set(jedis.sdiff("{tag1}firstset", "{tag1}secondset")))
.runWithAction(() -> {
assertThat(sdiffResultReference).satisfiesAnyOf(
sdiffResult -> assertThat(sdiffResult.get()).isEmpty(),
sdiffResult -> assertThat(sdiffResult.get())
.containsExactlyInAnyOrderElementsOf(valuesList));
jedis.sadd("{tag1}secondset", values);
});
}
private String[] createKeyValuesSet() {
String[] values = new String[] {"pear", "apple", "plum", "orange", "peach"};
jedis.sadd("{tag1}setkey", values);
return values;
}
}
| |
/* Copyright 2004-2005 Graeme Rocher
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.grails.web.mapping;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.codehaus.groovy.grails.commons.GrailsControllerClass;
import org.codehaus.groovy.grails.validation.ConstrainedProperty;
import org.springframework.core.style.ToStringCreator;
import com.googlecode.concurrentlinkedhashmap.ConcurrentLinkedHashMap;
import com.googlecode.concurrentlinkedhashmap.Weigher;
/**
* Default implementation of the UrlMappingsHolder interface that takes a list of mappings and
* then sorts them according to their precedence rules as defined in the implementation of Comparable.
*
* @see org.codehaus.groovy.grails.web.mapping.UrlMapping
* @see Comparable
*
* @author Graeme Rocher
* @since 0.4
*/
@SuppressWarnings({"serial","rawtypes"})
public class DefaultUrlMappingsHolder implements UrlMappingsHolder {
private static final transient Log LOG = LogFactory.getLog(DefaultUrlMappingsHolder.class);
private static final int DEFAULT_MAX_WEIGHTED_CAPACITY = 5000;
private int maxWeightedCacheCapacity = DEFAULT_MAX_WEIGHTED_CAPACITY;
private Map<String, UrlMappingInfo> cachedMatches;
private Map<String, List<UrlMappingInfo>> cachedListMatches;
private enum CustomListWeigher implements Weigher<List<UrlMappingInfo>> {
INSTANCE;
public int weightOf(List<UrlMappingInfo> values) {
return values.size() + 1;
}
}
private List<UrlMapping> urlMappings = new ArrayList<UrlMapping>();
private UrlMapping[] mappings;
private List excludePatterns;
private Map<UrlMappingKey, UrlMapping> mappingsLookup = new HashMap<UrlMappingKey, UrlMapping>();
private Map<String, UrlMapping> namedMappings = new HashMap<String, UrlMapping>();
private UrlMappingsList mappingsListLookup = new UrlMappingsList();
private Set<String> DEFAULT_CONTROLLER_PARAMS = new HashSet<String>() {{
add(UrlMapping.CONTROLLER);
add(UrlMapping.ACTION);
}};
private Set<String> DEFAULT_ACTION_PARAMS = new HashSet<String>() {{
add(UrlMapping.ACTION);
}};
private UrlCreatorCache urlCreatorCache;
// capacity of the UrlCreatoreCache is the estimated number of char's stored in cached objects
private int urlCreatorMaxWeightedCacheCapacity = 160000;
public DefaultUrlMappingsHolder(List<UrlMapping> mappings) {
this(mappings, null, false);
}
public DefaultUrlMappingsHolder(List<UrlMapping> mappings, List excludePatterns) {
this(mappings, excludePatterns, false);
}
public DefaultUrlMappingsHolder(List<UrlMapping> mappings, List excludePatterns, boolean doNotCallInit) {
urlMappings = mappings;
this.excludePatterns = excludePatterns;
if (!doNotCallInit) {
initialize();
}
}
public void initialize() {
sortMappings();
cachedMatches = new ConcurrentLinkedHashMap.Builder<String, UrlMappingInfo>()
.maximumWeightedCapacity(maxWeightedCacheCapacity)
.build();
cachedListMatches = new ConcurrentLinkedHashMap.Builder<String, List<UrlMappingInfo>>()
.maximumWeightedCapacity(maxWeightedCacheCapacity)
.weigher(CustomListWeigher.INSTANCE)
.build();
if (urlCreatorMaxWeightedCacheCapacity > 0) {
urlCreatorCache = new UrlCreatorCache(urlCreatorMaxWeightedCacheCapacity);
}
mappings = urlMappings.toArray(new UrlMapping[urlMappings.size()]);
for (UrlMapping mapping : mappings) {
String mappingName = mapping.getMappingName();
if (mappingName != null) {
namedMappings.put(mappingName, mapping);
}
String controllerName = mapping.getControllerName() instanceof String ? mapping.getControllerName().toString() : null;
String actionName = mapping.getActionName() instanceof String ? mapping.getActionName().toString() : null;
ConstrainedProperty[] params = mapping.getConstraints();
Set<String> requiredParams = new HashSet<String>();
int optionalIndex = -1;
for (int j = 0; j < params.length; j++) {
ConstrainedProperty param = params[j];
if (!param.isNullable()) {
requiredParams.add(param.getPropertyName());
}
else {
optionalIndex = j;
break;
}
}
UrlMappingKey key = new UrlMappingKey(controllerName, actionName, requiredParams);
mappingsLookup.put(key, mapping);
UrlMappingsListKey listKey = new UrlMappingsListKey(controllerName, actionName);
mappingsListLookup.put(listKey, key);
if (LOG.isDebugEnabled()) {
LOG.debug("Reverse mapping: " + key + " -> " + mapping);
}
Set<String> requiredParamsAndOptionals = new HashSet<String>(requiredParams);
if (optionalIndex > -1) {
for (int j = optionalIndex; j < params.length; j++) {
ConstrainedProperty param = params[j];
requiredParamsAndOptionals.add(param.getPropertyName());
key = new UrlMappingKey(controllerName, actionName, new HashSet<String>(requiredParamsAndOptionals));
mappingsLookup.put(key, mapping);
listKey = new UrlMappingsListKey(controllerName, actionName);
mappingsListLookup.put(listKey, key);
if (LOG.isDebugEnabled()) {
LOG.debug("Reverse mapping: " + key + " -> " + mapping);
}
}
}
}
}
@SuppressWarnings("unchecked")
private void sortMappings() {
List<ResponseCodeUrlMapping> responseCodeUrlMappings = new ArrayList<ResponseCodeUrlMapping>();
Iterator<UrlMapping> iter = urlMappings.iterator();
while (iter.hasNext()) {
UrlMapping mapping = iter.next();
if (mapping instanceof ResponseCodeUrlMapping) {
responseCodeUrlMappings.add((ResponseCodeUrlMapping)mapping);
iter.remove();
}
}
Collections.sort(urlMappings);
urlMappings.addAll(responseCodeUrlMappings);
Collections.reverse(urlMappings);
}
public UrlMapping[] getUrlMappings() {
return mappings;
}
public List getExcludePatterns() {
return excludePatterns;
}
/**
* @see UrlMappingsHolder#getReverseMapping(String, String, java.util.Map)
*/
public UrlCreator getReverseMapping(final String controller, final String action, Map params) {
if (params == null) params = Collections.EMPTY_MAP;
if (urlCreatorCache != null) {
UrlCreatorCache.ReverseMappingKey key=urlCreatorCache.createKey(controller, action, params);
UrlCreator creator=urlCreatorCache.lookup(key);
if (creator==null) {
creator=resolveUrlCreator(controller, action, params);
creator=urlCreatorCache.putAndDecorate(key, creator);
}
// preserve previous side-effect, remove mappingName from params
params.remove("mappingName");
return creator;
}
// cache is disabled
return resolveUrlCreator(controller, action, params);
}
@SuppressWarnings("unchecked")
private UrlCreator resolveUrlCreator(final String controller,
final String action, Map params) {
UrlMapping mapping = null;
mapping = namedMappings.get(params.remove("mappingName"));
if (mapping == null) {
mapping = lookupMapping(controller, action, params);
}
if (mapping == null || (mapping instanceof ResponseCodeUrlMapping)) {
mapping = mappingsLookup.get(new UrlMappingKey(controller, action, Collections.EMPTY_SET));
}
if (mapping == null || (mapping instanceof ResponseCodeUrlMapping)) {
Set<String> lookupParams = new HashSet<String>(DEFAULT_ACTION_PARAMS);
Set<String> paramKeys = new HashSet<String>(params.keySet());
paramKeys.removeAll(lookupParams);
lookupParams.addAll(paramKeys);
mapping = mappingsLookup.get(new UrlMappingKey(controller, null, lookupParams));
if (mapping == null) {
lookupParams.removeAll(paramKeys);
mapping = mappingsLookup.get(new UrlMappingKey(controller, null, lookupParams));
}
}
if (mapping == null || (mapping instanceof ResponseCodeUrlMapping)) {
Set<String> lookupParams = new HashSet<String>(DEFAULT_CONTROLLER_PARAMS);
Set<String> paramKeys = new HashSet<String>(params.keySet());
paramKeys.removeAll(lookupParams);
lookupParams.addAll(paramKeys);
mapping = mappingsLookup.get(new UrlMappingKey(null, null, lookupParams));
if (mapping == null) {
lookupParams.removeAll(paramKeys);
mapping = mappingsLookup.get(new UrlMappingKey(null, null, lookupParams));
}
}
UrlCreator creator;
if (mapping == null || (mapping instanceof ResponseCodeUrlMapping)) {
creator=new DefaultUrlCreator(controller, action);
} else {
creator=mapping;
}
return creator;
}
/**
* Performs a match uses reverse mappings to looks up a mapping from the
* controller, action and params. This is refactored to use a list of mappings
* identified by only controller and action and then matches the mapping to select
* the mapping that best matches the params (most possible matches).
*
* @param controller The controller name
* @param action The action name
* @param params The params
* @return A UrlMapping instance or null
*/
@SuppressWarnings("unchecked")
protected UrlMapping lookupMapping(String controller, String action, Map params) {
final UrlMappingsListKey lookupKey = new UrlMappingsListKey(controller, action);
SortedSet mappingKeysSet = mappingsListLookup.get(lookupKey);
final String actionName = lookupKey.action;
boolean secondAttempt = false;
final boolean isIndexAction = GrailsControllerClass.INDEX_ACTION.equals(actionName);
if (null == mappingKeysSet && actionName != null) {
lookupKey.action=null;
mappingKeysSet = mappingsListLookup.get(lookupKey);
secondAttempt = true;
}
if (null == mappingKeysSet) return null;
UrlMappingKey[] mappingKeys = (UrlMappingKey[]) mappingKeysSet.toArray(new UrlMappingKey[mappingKeysSet.size()]);
for (int i = mappingKeys.length; i > 0; i--) {
UrlMappingKey mappingKey = mappingKeys[i - 1];
if (params.keySet().containsAll(mappingKey.paramNames)) {
final UrlMapping mapping = mappingsLookup.get(mappingKey);
if (canInferAction(actionName, secondAttempt, isIndexAction, mapping)) {
return mapping;
}
if (!secondAttempt) {
return mapping;
}
}
}
return null;
}
private boolean canInferAction(String actionName, boolean secondAttempt, boolean indexAction, UrlMapping mapping) {
return secondAttempt && (indexAction || mapping.hasRuntimeVariable(GrailsControllerClass.ACTION) || (mapping.isRestfulMapping() && UrlMappingEvaluator.DEFAULT_REST_MAPPING.containsValue(actionName)));
}
/**
* @see org.codehaus.groovy.grails.web.mapping.UrlMappingsHolder#match(String)
*/
public UrlMappingInfo match(String uri) {
UrlMappingInfo info = null;
if (cachedMatches.containsKey(uri)) {
return cachedMatches.get(uri);
}
for (UrlMapping mapping : mappings) {
if (LOG.isDebugEnabled()) {
LOG.debug("Attempting to match URI [" + uri + "] with pattern [" + mapping.getUrlData().getUrlPattern() + "]");
}
info = mapping.match(uri);
if (info != null) {
cachedMatches.put(uri, info);
break;
}
}
return info;
}
public UrlMappingInfo[] matchAll(String uri) {
List<UrlMappingInfo> matchingUrls = new ArrayList<UrlMappingInfo>();
if (cachedListMatches.containsKey(uri)) {
matchingUrls = cachedListMatches.get(uri);
}
else {
for (UrlMapping mapping : mappings) {
if (LOG.isDebugEnabled()) {
LOG.debug("Attempting to match URI [" + uri + "] with pattern [" + mapping.getUrlData().getUrlPattern() + "]");
}
UrlMappingInfo current = mapping.match(uri);
if (current != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Matched URI [" + uri + "] with pattern [" + mapping.getUrlData().getUrlPattern() + "], adding to posibilities");
}
matchingUrls.add(current);
}
}
cachedListMatches.put(uri, matchingUrls);
}
return matchingUrls.toArray(new UrlMappingInfo[matchingUrls.size()]);
}
public UrlMappingInfo[] matchAll(String uri, String httpMethod) {
return matchAll(uri);
}
public UrlMappingInfo matchStatusCode(int responseCode) {
for (UrlMapping mapping : mappings) {
if (mapping instanceof ResponseCodeUrlMapping) {
ResponseCodeUrlMapping responseCodeUrlMapping = (ResponseCodeUrlMapping) mapping;
if (responseCodeUrlMapping.getExceptionType() != null) continue;
final UrlMappingInfo current = responseCodeUrlMapping.match(responseCode);
if (current != null) return current;
}
}
return null;
}
public UrlMappingInfo matchStatusCode(int responseCode, Throwable e) {
for (UrlMapping mapping : mappings) {
if (mapping instanceof ResponseCodeUrlMapping) {
ResponseCodeUrlMapping responseCodeUrlMapping = (ResponseCodeUrlMapping) mapping;
final UrlMappingInfo current = responseCodeUrlMapping.match(responseCode);
if (current != null) {
if (responseCodeUrlMapping.getExceptionType() != null &&
responseCodeUrlMapping.getExceptionType().isInstance(e)) {
return current;
}
}
}
}
return null;
}
@Override
public String toString() {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
pw.println("URL Mappings");
pw.println("------------");
for (UrlMapping mapping : mappings) {
pw.println(mapping);
}
pw.flush();
return sw.toString();
}
/**
* A class used as a key to lookup a UrlMapping based on controller, action and parameter names
*/
@SuppressWarnings("unchecked")
class UrlMappingKey implements Comparable {
String controller;
String action;
Set<String> paramNames = Collections.EMPTY_SET;
public UrlMappingKey(String controller, String action, Set<String> paramNames) {
this.controller = controller;
this.action = action;
this.paramNames = paramNames;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
UrlMappingKey that = (UrlMappingKey) o;
if (action != null && !action.equals(that.action)) return false;
if (controller != null && !controller.equals(that.controller)) return false;
if (!paramNames.equals(that.paramNames)) return false;
return true;
}
@Override
public int hashCode() {
int result;
result = (controller != null ? controller.hashCode() : 0);
result = 31 * result + (action != null ? action.hashCode() : 0);
result = 31 * result + paramNames.hashCode();
return result;
}
@Override
public String toString() {
return new ToStringCreator(this).append("controller", controller)
.append("action",action)
.append("params", paramNames)
.toString();
}
public int compareTo(Object o) {
final int BEFORE = -1;
final int EQUAL = 0;
final int AFTER = 1;
//this optimization is usually worthwhile, and can always be added
if (this == o) return EQUAL;
final UrlMappingKey other = (UrlMappingKey)o;
if (paramNames.size() < other.paramNames.size()) return BEFORE;
if (paramNames.size() > other.paramNames.size()) return AFTER;
int comparison = controller != null ? controller.compareTo(other.controller) : EQUAL;
if (comparison != EQUAL) return comparison;
comparison = action != null ? action.compareTo(other.action) : EQUAL;
if (comparison != EQUAL) return comparison;
return EQUAL;
}
}
/**
* A class used as a key to lookup a all UrlMappings based on only controller and action.
*/
class UrlMappingsListKey {
String controller;
String action;
public UrlMappingsListKey(String controller, String action) {
this.controller = controller;
this.action = action;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
UrlMappingsListKey that = (UrlMappingsListKey) o;
if (action != null && !action.equals(that.action)) return false;
if (controller != null && !controller.equals(that.controller)) return false;
return true;
}
@Override
public int hashCode() {
int result;
result = (controller != null ? controller.hashCode() : 0);
result = 31 * result + (action != null ? action.hashCode() : 0);
return result;
}
@Override
public String toString() {
return new ToStringCreator(this).append("controller", controller)
.append("action",action)
.toString();
}
}
class UrlMappingsList {
// A map from a UrlMappingsListKey to a list of UrlMappingKeys
private Map<UrlMappingsListKey, SortedSet<UrlMappingKey>> lookup =
new HashMap<UrlMappingsListKey, SortedSet<UrlMappingKey>>();
public void put(UrlMappingsListKey key, UrlMappingKey mapping) {
SortedSet<UrlMappingKey> mappingsList = lookup.get(key);
if (null == mappingsList) {
mappingsList = new TreeSet<UrlMappingKey>();
lookup.put(key, mappingsList);
}
mappingsList.add(mapping);
}
public SortedSet<UrlMappingKey> get(UrlMappingsListKey key) {
return lookup.get(key);
}
}
public void setMaxWeightedCacheCapacity(int maxWeightedCacheCapacity) {
this.maxWeightedCacheCapacity = maxWeightedCacheCapacity;
}
public void setUrlCreatorMaxWeightedCacheCapacity(int urlCreatorMaxWeightedCacheCapacity) {
this.urlCreatorMaxWeightedCacheCapacity = urlCreatorMaxWeightedCacheCapacity;
}
}
| |
/*
* File: AbstractPairwiseMultipleHypothesisComparison.java
* Authors: Kevin R. Dixon
* Company: Sandia National Laboratories
* Project: Cognitive Foundry
*
* Copyright Jun 1, 2011, Sandia Corporation.
* Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
* license for use of this work by or on behalf of the U.S. Government.
* Export of this program may require a license from the United States
* Government. See CopyrightHistory.txt for complete details.
*
*/
package gov.sandia.cognition.statistics.method;
import gov.sandia.cognition.collection.CollectionUtil;
import gov.sandia.cognition.math.matrix.Matrix;
import gov.sandia.cognition.math.matrix.MatrixFactory;
import gov.sandia.cognition.util.ObjectUtil;
import java.util.ArrayList;
import java.util.Collection;
/**
* A multiple-hypothesis comparison algorithm based on making multiple
* pair-wise null-hypothesis comparisons.
* @param <StatisticType>
* Type of statistic returned by the test
* @author Kevin R. Dixon
* @since 3.3.0
*/
public abstract class AbstractPairwiseMultipleHypothesisComparison<StatisticType extends AbstractPairwiseMultipleHypothesisComparison.Statistic>
extends AbstractMultipleHypothesisComparison<Collection<? extends Number>, StatisticType>
{
/**
* Default pair-wise confidence test: Student's Paired t-test.
*/
public static final NullHypothesisEvaluator<Collection<? extends Number>> DEFAULT_PAIRWISE_TEST =
StudentTConfidence.INSTANCE;
// WilcoxonSignedRankConfidence.INSTANCE;
/**
* Confidence test used for pair-wise null-hypothesis tests.
*/
protected NullHypothesisEvaluator<Collection<? extends Number>> pairwiseTest;
/**
* Creates a new instance of BonferroniCorrection
* @param pairwiseTest
* Confidence test used for pair-wise null-hypothesis tests.
*/
public AbstractPairwiseMultipleHypothesisComparison(
final NullHypothesisEvaluator<Collection<? extends Number>> pairwiseTest)
{
this.setPairwiseTest( pairwiseTest );
}
@Override
public AbstractPairwiseMultipleHypothesisComparison<StatisticType> clone()
{
@SuppressWarnings("unchecked")
AbstractPairwiseMultipleHypothesisComparison<StatisticType> clone =
(AbstractPairwiseMultipleHypothesisComparison<StatisticType>) super.clone();
clone.setPairwiseTest( ObjectUtil.cloneSafe( this.getPairwiseTest() ) );
return clone;
}
/**
* Getter for pairwiseTest
* @return
* Confidence test used for pair-wise null-hypothesis tests.
*/
public NullHypothesisEvaluator<Collection<? extends Number>> getPairwiseTest()
{
return this.pairwiseTest;
}
/**
* Setter for pairwiseTest
* @param pairwiseTest
* Confidence test used for pair-wise null-hypothesis tests.
*/
public void setPairwiseTest(
final NullHypothesisEvaluator<Collection<? extends Number>> pairwiseTest)
{
this.pairwiseTest = pairwiseTest;
}
/**
* Result from a pairwise multiple-comparison statistic.
*/
public static abstract class Statistic
extends AbstractMultipleHypothesisComparison.Statistic
{
/**
* Results from the pair-wise confidence tests.
*/
protected ArrayList<ArrayList<ConfidenceStatistic>> pairwiseTestStatistics;
/**
* Creates a new instance of StudentizedMultipleComparisonStatistic
* @param data
* Data from each treatment to consider
* @param uncompensatedAlpha
* Uncompensated alpha (p-value threshold) for the multiple comparison
* test
* @param pairwiseTest
* Confidence test used for pair-wise null-hypothesis tests.
*/
public Statistic(
final Collection<? extends Collection<? extends Number>> data,
final double uncompensatedAlpha,
final NullHypothesisEvaluator<Collection<? extends Number>> pairwiseTest )
{
this.treatmentCount = data.size();
this.uncompensatedAlpha = uncompensatedAlpha;
this.computePairwiseTestResults(data, pairwiseTest);
}
@Override
public AbstractPairwiseMultipleHypothesisComparison.Statistic clone()
{
AbstractPairwiseMultipleHypothesisComparison.Statistic clone =
(AbstractPairwiseMultipleHypothesisComparison.Statistic) super.clone();
clone.pairwiseTestStatistics = ObjectUtil.cloneSmartElementsAsArrayList(
this.getPairwiseTestStatistics() );
return clone;
}
/**
* Computes the pair-wise confidence test results
* @param data
* Data from each treatment to consider
* @param pairwiseTest
* Confidence test used for pair-wise null-hypothesis tests.
*/
protected void computePairwiseTestResults(
final Collection<? extends Collection<? extends Number>> data,
final NullHypothesisEvaluator<Collection<? extends Number>> pairwiseTest )
{
ArrayList<? extends Collection<? extends Number>> treatments =
CollectionUtil.asArrayList(data);
final int K = treatments.size();
Matrix Z = MatrixFactory.getDefault().createMatrix(K, K);
Matrix P = MatrixFactory.getDefault().createMatrix(K, K);
ArrayList<ArrayList<ConfidenceStatistic>> stats =
new ArrayList<ArrayList<ConfidenceStatistic>>( K );
for( int i = 0; i < K; i++ )
{
ArrayList<ConfidenceStatistic> comp =
new ArrayList<ConfidenceStatistic>( K );
for( int j = 0; j < K; j++ )
{
comp.add( null );
}
stats.add( comp );
}
for( int i = 0; i < K; i++ )
{
// Comparisons to ourselves are perfect
Z.setElement(i, i, 0.0);
P.setElement(i, i, 1.0);
Collection<? extends Number> datai = treatments.get(i);
for( int j = i+1; j < K; j++ )
{
Collection<? extends Number> dataj = treatments.get(j);
ConfidenceStatistic comparison =
pairwiseTest.evaluateNullHypothesis(datai, dataj);
final double pij = comparison.getNullHypothesisProbability();
final double zij = comparison.getTestStatistic();
Z.setElement(i, j, zij);
Z.setElement(j, i, zij);
P.setElement(i, j, pij);
P.setElement(j, i, pij);
stats.get(i).set(j, comparison );
stats.get(j).set(i, comparison );
}
}
this.testStatistics = Z;
this.nullHypothesisProbabilities = P;
this.pairwiseTestStatistics = stats;
}
/**
* Getter for pairwiseTestStatistics
* @return
* Results from the pair-wise confidence tests.
*/
public ArrayList<ArrayList<ConfidenceStatistic>> getPairwiseTestStatistics()
{
return this.pairwiseTestStatistics;
}
@Override
public boolean acceptNullHypothesis(
int i,
int j)
{
return this.getNullHypothesisProbability(i, j) >= this.getAdjustedAlpha(i, j);
}
/**
* Gets the adjusted alpha (p-value threshold) for the given comparison
* @param i
* First treatment to compare
* @param j
* Second treatment to compare
* @return
* Adjusted alpha (p-value threshold) for the given comparison
*/
public abstract double getAdjustedAlpha(
int i,
int j );
}
}
| |
/*
* Copyright 2014 The Ideal Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file or at
* https://developers.google.com/open-source/licenses/bsd
*/
package ideal.showcase.coach.webforms;
import ideal.library.elements.*;
import javax.annotation.Nullable;
import ideal.library.texts.*;
import ideal.runtime.elements.*;
import ideal.runtime.texts.*;
import ideal.runtime.channels.*;
import ideal.machine.elements.*;
import ideal.machine.channels.*;
import ideal.development.elements.*;
import ideal.development.values.*;
import ideal.showcase.coach.reflections.*;
import ideal.showcase.coach.forms.*;
import ideal.showcase.coach.forms.select_input.option;
import java.io.*;
public class view_renderer extends widget_visitor<String> {
private final render_context context;
private final input_id_generator id_generator;
public view_renderer(render_context context) {
this.context = context;
this.id_generator = new input_id_generator();
}
public String render(widget the_widget) {
return visit(the_widget);
}
@Override
public String visit_label(label the_label) {
return escape_html(the_label.content);
}
@Override
public String visit_html_text(html_text the_html_text) {
string_writer the_writer = new string_writer();
markup_formatter out = new markup_formatter(the_writer);
out.write(the_html_text.the_text);
return utilities.s(the_writer.elements());
}
@Override
public String visit_raw_html(raw_html the_raw_html) {
return utilities.s(the_raw_html.content);
}
@Override
public String visit_bold(bold the_bold) {
return "<b>" + escape_html(the_bold.content) + "</b>";
}
@Override
public String visit_javascript(javascript the_javascript) {
return "<script language='JavaScript'>\n" +
"// <![CDATA[\n" +
the_javascript.content + "\n" +
"// ]]>\n" +
"</script>\n";
}
@Override
public String visit_div(div the_div) {
return "<div class='" + escape_html(the_div.style) + "'>" + render(the_div.content) + "</div>";
}
@Override
public String visit_text_input(text_input the_text_input) {
return "<input type=\"text\" name=\"" + id_generator.next_escaped_id() +
"\" value=\"" + escape_html(the_text_input.model.get().unwrap()) + "\" />\n";
}
@Override
public String visit_textarea_input(textarea_input the_textarea_input) {
return "<textarea rows=\"30\" cols=\"80\" name=\"" +
id_generator.next_escaped_id() + "\">" +
escape_html(the_textarea_input.model.get().unwrap()) + "</textarea>\n";
}
@Override
public String visit_select_input(select_input the_select_input) {
StringBuilder sb = new StringBuilder();
sb.append("<select name=\"" + id_generator.next_escaped_id() + "\">\n");
readonly_list<option> the_options = the_select_input.the_options;
for (int i = 0; i < the_options.size(); ++i) {
option opt = the_options.get(i);
sb.append("<option value=\"" + i + "\"");
if (runtime_util.values_equal(opt.value, the_select_input.model.get())) {
sb.append(" selected=\"selected\"");
}
sb.append(">");
sb.append(escape_html(opt.name));
sb.append("</option>\n");
}
sb.append("</select>\n");
return sb.toString();
}
@Override
public String visit_form(form the_form) {
StringBuilder sb = new StringBuilder();
sb.append(render_form_start(the_form));
sb.append(render(the_form.content));
sb.append(render_form_end());
return sb.toString();
}
public String render_form_start(form the_form) {
return "<form action=\"" + escape_html(the_form.action) +
"\" method=\"" + form.FORM_METHOD + "\">\n";
}
public String render_form_end() {
return "</form>\n";
}
public String render_hidden_input(identifier the_name, string value) {
return "<input type=\"hidden\" name=\"" + escape_html(the_name) +
"\" value=\"" + escape_html(utilities.s(value)) + "\" />\n";
}
@Override
public String visit_vbox(vbox the_vbox) {
StringBuilder sb = new StringBuilder("<table>\n");
readonly_list<widget> rows = the_vbox.rows;
for (int i = 0; i < rows.size(); ++i) {
widget row = rows.get(i);
sb.append("<tr>");
if (row instanceof hbox) {
sb.append(render(row));
} else {
sb.append("<td>\n");
sb.append(render(row));
sb.append("</td>\n");
}
sb.append("</tr>\n");
}
sb.append("</table>\n");
return sb.toString();
}
@Override
public String visit_hbox(hbox the_hbox) {
StringBuilder sb = new StringBuilder();
readonly_list<widget> columns = the_hbox.columns;
for (int i = 0; i < columns.size(); ++i) {
widget column = columns.get(i);
sb.append("<td>");
sb.append(render(column));
sb.append("</td>\n");
}
return sb.toString();
}
@Override
public String visit_link(link the_link) {
StringBuilder sb = new StringBuilder("<a href=\"");
sb.append(escape_html(context.to_uri(the_link.target)));
sb.append("\">");
sb.append(escape_html(the_link.name));
sb.append("</a>");
return sb.toString();
}
@Override
public String visit_simple_link(simple_link the_simple_link) {
StringBuilder sb = new StringBuilder("<a href=\"");
sb.append(escape_html(the_simple_link.href));
sb.append("\">");
sb.append(escape_html(the_simple_link.name));
sb.append("</a>");
return sb.toString();
}
@Override
public String visit_button(button the_button) {
return "<input type=\"submit\" name=\"" + escape_html(context.to_button_id(the_button.target)) +
"\" value=\"" + escape_html(the_button.name) + "\" />\n";
}
private static String escape_html(identifier id) {
return escape_html(id.to_string());
}
public static String escape_html(string s) {
return utilities.s(runtime_util.escape_markup(s));
}
public static String escape_html(String s) {
return escape_html(new base_string(s));
}
}
| |
package com.github.alezhka.wavesj.utils.crypto.primitives.curve25519;
public class sc_reduce {
//CONVERT #include "sc.h"
//CONVERT #include "long.h"
//CONVERT #include "crypto_uint32.h"
//CONVERT #include "long.h"
public static long load_3(byte[] in, int index) {
long result;
result = ((long) in[index + 0]) & 0xFF;
result |= (((long) in[index + 1]) << 8) & 0xFF00;
result |= (((long) in[index + 2]) << 16) & 0xFF0000;
return result;
}
public static long load_4(byte[] in, int index) {
long result;
result = (((long) in[index + 0]) & 0xFF);
result |= ((((long) in[index + 1]) << 8) & 0xFF00);
result |= ((((long) in[index + 2]) << 16) & 0xFF0000);
result |= ((((long) in[index + 3]) << 24) & 0xFF000000L);
return result;
}
/*
Input:
s[0]+256*s[1]+...+256^63*s[63] = s
Output:
s[0]+256*s[1]+...+256^31*s[31] = s mod l
where l = 2^252 + 27742317777372353535851937790883648493.
Overwrites s in place.
*/
public static void sc_reduce(byte[] s) {
long s0 = 2097151 & load_3(s, 0);
long s1 = 2097151 & (load_4(s, 2) >>> 5);
long s2 = 2097151 & (load_3(s, 5) >>> 2);
long s3 = 2097151 & (load_4(s, 7) >>> 7);
long s4 = 2097151 & (load_4(s, 10) >>> 4);
long s5 = 2097151 & (load_3(s, 13) >>> 1);
long s6 = 2097151 & (load_4(s, 15) >>> 6);
long s7 = 2097151 & (load_3(s, 18) >>> 3);
long s8 = 2097151 & load_3(s, 21);
long s9 = 2097151 & (load_4(s, 23) >>> 5);
long s10 = 2097151 & (load_3(s, 26) >>> 2);
long s11 = 2097151 & (load_4(s, 28) >>> 7);
long s12 = 2097151 & (load_4(s, 31) >>> 4);
long s13 = 2097151 & (load_3(s, 34) >>> 1);
long s14 = 2097151 & (load_4(s, 36) >>> 6);
long s15 = 2097151 & (load_3(s, 39) >>> 3);
long s16 = 2097151 & load_3(s, 42);
long s17 = 2097151 & (load_4(s, 44) >>> 5);
long s18 = 2097151 & (load_3(s, 47) >>> 2);
long s19 = 2097151 & (load_4(s, 49) >>> 7);
long s20 = 2097151 & (load_4(s, 52) >>> 4);
long s21 = 2097151 & (load_3(s, 55) >>> 1);
long s22 = 2097151 & (load_4(s, 57) >>> 6);
long s23 = (load_4(s, 60) >>> 3);
long carry0;
long carry1;
long carry2;
long carry3;
long carry4;
long carry5;
long carry6;
long carry7;
long carry8;
long carry9;
long carry10;
long carry11;
long carry12;
long carry13;
long carry14;
long carry15;
long carry16;
s11 += s23 * 666643;
s12 += s23 * 470296;
s13 += s23 * 654183;
s14 -= s23 * 997805;
s15 += s23 * 136657;
s16 -= s23 * 683901;
s23 = 0;
s10 += s22 * 666643;
s11 += s22 * 470296;
s12 += s22 * 654183;
s13 -= s22 * 997805;
s14 += s22 * 136657;
s15 -= s22 * 683901;
s22 = 0;
s9 += s21 * 666643;
s10 += s21 * 470296;
s11 += s21 * 654183;
s12 -= s21 * 997805;
s13 += s21 * 136657;
s14 -= s21 * 683901;
s21 = 0;
s8 += s20 * 666643;
s9 += s20 * 470296;
s10 += s20 * 654183;
s11 -= s20 * 997805;
s12 += s20 * 136657;
s13 -= s20 * 683901;
s20 = 0;
s7 += s19 * 666643;
s8 += s19 * 470296;
s9 += s19 * 654183;
s10 -= s19 * 997805;
s11 += s19 * 136657;
s12 -= s19 * 683901;
s19 = 0;
s6 += s18 * 666643;
s7 += s18 * 470296;
s8 += s18 * 654183;
s9 -= s18 * 997805;
s10 += s18 * 136657;
s11 -= s18 * 683901;
s18 = 0;
carry6 = (s6 + (1 << 20)) >> 21;
s7 += carry6;
s6 -= carry6 << 21;
carry8 = (s8 + (1 << 20)) >> 21;
s9 += carry8;
s8 -= carry8 << 21;
carry10 = (s10 + (1 << 20)) >> 21;
s11 += carry10;
s10 -= carry10 << 21;
carry12 = (s12 + (1 << 20)) >> 21;
s13 += carry12;
s12 -= carry12 << 21;
carry14 = (s14 + (1 << 20)) >> 21;
s15 += carry14;
s14 -= carry14 << 21;
carry16 = (s16 + (1 << 20)) >> 21;
s17 += carry16;
s16 -= carry16 << 21;
carry7 = (s7 + (1 << 20)) >> 21;
s8 += carry7;
s7 -= carry7 << 21;
carry9 = (s9 + (1 << 20)) >> 21;
s10 += carry9;
s9 -= carry9 << 21;
carry11 = (s11 + (1 << 20)) >> 21;
s12 += carry11;
s11 -= carry11 << 21;
carry13 = (s13 + (1 << 20)) >> 21;
s14 += carry13;
s13 -= carry13 << 21;
carry15 = (s15 + (1 << 20)) >> 21;
s16 += carry15;
s15 -= carry15 << 21;
s5 += s17 * 666643;
s6 += s17 * 470296;
s7 += s17 * 654183;
s8 -= s17 * 997805;
s9 += s17 * 136657;
s10 -= s17 * 683901;
s17 = 0;
s4 += s16 * 666643;
s5 += s16 * 470296;
s6 += s16 * 654183;
s7 -= s16 * 997805;
s8 += s16 * 136657;
s9 -= s16 * 683901;
s16 = 0;
s3 += s15 * 666643;
s4 += s15 * 470296;
s5 += s15 * 654183;
s6 -= s15 * 997805;
s7 += s15 * 136657;
s8 -= s15 * 683901;
s15 = 0;
s2 += s14 * 666643;
s3 += s14 * 470296;
s4 += s14 * 654183;
s5 -= s14 * 997805;
s6 += s14 * 136657;
s7 -= s14 * 683901;
s14 = 0;
s1 += s13 * 666643;
s2 += s13 * 470296;
s3 += s13 * 654183;
s4 -= s13 * 997805;
s5 += s13 * 136657;
s6 -= s13 * 683901;
s13 = 0;
s0 += s12 * 666643;
s1 += s12 * 470296;
s2 += s12 * 654183;
s3 -= s12 * 997805;
s4 += s12 * 136657;
s5 -= s12 * 683901;
s12 = 0;
carry0 = (s0 + (1 << 20)) >> 21;
s1 += carry0;
s0 -= carry0 << 21;
carry2 = (s2 + (1 << 20)) >> 21;
s3 += carry2;
s2 -= carry2 << 21;
carry4 = (s4 + (1 << 20)) >> 21;
s5 += carry4;
s4 -= carry4 << 21;
carry6 = (s6 + (1 << 20)) >> 21;
s7 += carry6;
s6 -= carry6 << 21;
carry8 = (s8 + (1 << 20)) >> 21;
s9 += carry8;
s8 -= carry8 << 21;
carry10 = (s10 + (1 << 20)) >> 21;
s11 += carry10;
s10 -= carry10 << 21;
carry1 = (s1 + (1 << 20)) >> 21;
s2 += carry1;
s1 -= carry1 << 21;
carry3 = (s3 + (1 << 20)) >> 21;
s4 += carry3;
s3 -= carry3 << 21;
carry5 = (s5 + (1 << 20)) >> 21;
s6 += carry5;
s5 -= carry5 << 21;
carry7 = (s7 + (1 << 20)) >> 21;
s8 += carry7;
s7 -= carry7 << 21;
carry9 = (s9 + (1 << 20)) >> 21;
s10 += carry9;
s9 -= carry9 << 21;
carry11 = (s11 + (1 << 20)) >> 21;
s12 += carry11;
s11 -= carry11 << 21;
s0 += s12 * 666643;
s1 += s12 * 470296;
s2 += s12 * 654183;
s3 -= s12 * 997805;
s4 += s12 * 136657;
s5 -= s12 * 683901;
s12 = 0;
carry0 = s0 >> 21;
s1 += carry0;
s0 -= carry0 << 21;
carry1 = s1 >> 21;
s2 += carry1;
s1 -= carry1 << 21;
carry2 = s2 >> 21;
s3 += carry2;
s2 -= carry2 << 21;
carry3 = s3 >> 21;
s4 += carry3;
s3 -= carry3 << 21;
carry4 = s4 >> 21;
s5 += carry4;
s4 -= carry4 << 21;
carry5 = s5 >> 21;
s6 += carry5;
s5 -= carry5 << 21;
carry6 = s6 >> 21;
s7 += carry6;
s6 -= carry6 << 21;
carry7 = s7 >> 21;
s8 += carry7;
s7 -= carry7 << 21;
carry8 = s8 >> 21;
s9 += carry8;
s8 -= carry8 << 21;
carry9 = s9 >> 21;
s10 += carry9;
s9 -= carry9 << 21;
carry10 = s10 >> 21;
s11 += carry10;
s10 -= carry10 << 21;
carry11 = s11 >> 21;
s12 += carry11;
s11 -= carry11 << 21;
s0 += s12 * 666643;
s1 += s12 * 470296;
s2 += s12 * 654183;
s3 -= s12 * 997805;
s4 += s12 * 136657;
s5 -= s12 * 683901;
s12 = 0;
carry0 = s0 >> 21;
s1 += carry0;
s0 -= carry0 << 21;
carry1 = s1 >> 21;
s2 += carry1;
s1 -= carry1 << 21;
carry2 = s2 >> 21;
s3 += carry2;
s2 -= carry2 << 21;
carry3 = s3 >> 21;
s4 += carry3;
s3 -= carry3 << 21;
carry4 = s4 >> 21;
s5 += carry4;
s4 -= carry4 << 21;
carry5 = s5 >> 21;
s6 += carry5;
s5 -= carry5 << 21;
carry6 = s6 >> 21;
s7 += carry6;
s6 -= carry6 << 21;
carry7 = s7 >> 21;
s8 += carry7;
s7 -= carry7 << 21;
carry8 = s8 >> 21;
s9 += carry8;
s8 -= carry8 << 21;
carry9 = s9 >> 21;
s10 += carry9;
s9 -= carry9 << 21;
carry10 = s10 >> 21;
s11 += carry10;
s10 -= carry10 << 21;
s[0] = (byte) (s0 >> 0);
s[1] = (byte) (s0 >> 8);
s[2] = (byte) ((s0 >> 16) | (s1 << 5));
s[3] = (byte) (s1 >> 3);
s[4] = (byte) (s1 >> 11);
s[5] = (byte) ((s1 >> 19) | (s2 << 2));
s[6] = (byte) (s2 >> 6);
s[7] = (byte) ((s2 >> 14) | (s3 << 7));
s[8] = (byte) (s3 >> 1);
s[9] = (byte) (s3 >> 9);
s[10] = (byte) ((s3 >> 17) | (s4 << 4));
s[11] = (byte) (s4 >> 4);
s[12] = (byte) (s4 >> 12);
s[13] = (byte) ((s4 >> 20) | (s5 << 1));
s[14] = (byte) (s5 >> 7);
s[15] = (byte) ((s5 >> 15) | (s6 << 6));
s[16] = (byte) (s6 >> 2);
s[17] = (byte) (s6 >> 10);
s[18] = (byte) ((s6 >> 18) | (s7 << 3));
s[19] = (byte) (s7 >> 5);
s[20] = (byte) (s7 >> 13);
s[21] = (byte) (s8 >> 0);
s[22] = (byte) (s8 >> 8);
s[23] = (byte) ((s8 >> 16) | (s9 << 5));
s[24] = (byte) (s9 >> 3);
s[25] = (byte) (s9 >> 11);
s[26] = (byte) ((s9 >> 19) | (s10 << 2));
s[27] = (byte) (s10 >> 6);
s[28] = (byte) ((s10 >> 14) | (s11 << 7));
s[29] = (byte) (s11 >> 1);
s[30] = (byte) (s11 >> 9);
s[31] = (byte) (s11 >> 17);
}
}
| |
/*
* Copyright 2015 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.store.primitives.impl;
import com.google.common.collect.Sets;
import net.kuujo.copycat.resource.internal.AbstractResource;
import net.kuujo.copycat.resource.internal.ResourceManager;
import net.kuujo.copycat.state.StateMachine;
import net.kuujo.copycat.state.internal.DefaultStateMachine;
import net.kuujo.copycat.util.concurrent.Futures;
import net.kuujo.copycat.util.function.TriConsumer;
import org.onlab.util.Match;
import org.onosproject.store.service.Versioned;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.function.Consumer;
import java.util.function.Supplier;
/**
* Default database.
*/
public class DefaultDatabase extends AbstractResource<Database> implements Database {
private final StateMachine<DatabaseState<String, byte[]>> stateMachine;
private DatabaseProxy<String, byte[]> proxy;
private final Set<Consumer<StateMachineUpdate>> consumers = Sets.newCopyOnWriteArraySet();
private final TriConsumer<String, Object, Object> watcher = new InternalStateMachineWatcher();
@SuppressWarnings({"unchecked", "rawtypes"})
public DefaultDatabase(ResourceManager context) {
super(context);
this.stateMachine = new DefaultStateMachine(context,
DatabaseState.class,
DefaultDatabaseState.class,
DefaultDatabase.class.getClassLoader());
this.stateMachine.addStartupTask(() -> {
stateMachine.registerWatcher(watcher);
return CompletableFuture.completedFuture(null);
});
this.stateMachine.addShutdownTask(() -> {
stateMachine.unregisterWatcher(watcher);
return CompletableFuture.completedFuture(null);
});
}
/**
* If the database is closed, returning a failed CompletableFuture. Otherwise, calls the given supplier to
* return the completed future result.
*
* @param supplier The supplier to call if the database is open.
* @param <T> The future result type.
* @return A completable future that if this database is closed is immediately failed.
*/
protected <T> CompletableFuture<T> checkOpen(Supplier<CompletableFuture<T>> supplier) {
if (proxy == null) {
return Futures.exceptionalFuture(new IllegalStateException("Database closed"));
}
return supplier.get();
}
@Override
public CompletableFuture<Set<String>> maps() {
return checkOpen(() -> proxy.maps());
}
@Override
public CompletableFuture<Map<String, Long>> counters() {
return checkOpen(() -> proxy.counters());
}
@Override
public CompletableFuture<Integer> mapSize(String mapName) {
return checkOpen(() -> proxy.mapSize(mapName));
}
@Override
public CompletableFuture<Boolean> mapIsEmpty(String mapName) {
return checkOpen(() -> proxy.mapIsEmpty(mapName));
}
@Override
public CompletableFuture<Boolean> mapContainsKey(String mapName, String key) {
return checkOpen(() -> proxy.mapContainsKey(mapName, key));
}
@Override
public CompletableFuture<Boolean> mapContainsValue(String mapName, byte[] value) {
return checkOpen(() -> proxy.mapContainsValue(mapName, value));
}
@Override
public CompletableFuture<Versioned<byte[]>> mapGet(String mapName, String key) {
return checkOpen(() -> proxy.mapGet(mapName, key));
}
@Override
public CompletableFuture<Result<UpdateResult<String, byte[]>>> mapUpdate(
String mapName, String key, Match<byte[]> valueMatch, Match<Long> versionMatch, byte[] value) {
return checkOpen(() -> proxy.mapUpdate(mapName, key, valueMatch, versionMatch, value));
}
@Override
public CompletableFuture<Result<Void>> mapClear(String mapName) {
return checkOpen(() -> proxy.mapClear(mapName));
}
@Override
public CompletableFuture<Set<String>> mapKeySet(String mapName) {
return checkOpen(() -> proxy.mapKeySet(mapName));
}
@Override
public CompletableFuture<Collection<Versioned<byte[]>>> mapValues(String mapName) {
return checkOpen(() -> proxy.mapValues(mapName));
}
@Override
public CompletableFuture<Set<Map.Entry<String, Versioned<byte[]>>>> mapEntrySet(String mapName) {
return checkOpen(() -> proxy.mapEntrySet(mapName));
}
@Override
public CompletableFuture<Long> counterGet(String counterName) {
return checkOpen(() -> proxy.counterGet(counterName));
}
@Override
public CompletableFuture<Long> counterAddAndGet(String counterName, long delta) {
return checkOpen(() -> proxy.counterAddAndGet(counterName, delta));
}
@Override
public CompletableFuture<Long> counterGetAndAdd(String counterName, long delta) {
return checkOpen(() -> proxy.counterGetAndAdd(counterName, delta));
}
@Override
public CompletableFuture<Void> counterSet(String counterName, long value) {
return checkOpen(() -> proxy.counterSet(counterName, value));
}
@Override
public CompletableFuture<Boolean> counterCompareAndSet(String counterName, long expectedValue, long update) {
return checkOpen(() -> proxy.counterCompareAndSet(counterName, expectedValue, update));
}
@Override
public CompletableFuture<Long> queueSize(String queueName) {
return checkOpen(() -> proxy.queueSize(queueName));
}
@Override
public CompletableFuture<Void> queuePush(String queueName, byte[] entry) {
return checkOpen(() -> proxy.queuePush(queueName, entry));
}
@Override
public CompletableFuture<byte[]> queuePop(String queueName) {
return checkOpen(() -> proxy.queuePop(queueName));
}
@Override
public CompletableFuture<byte[]> queuePeek(String queueName) {
return checkOpen(() -> proxy.queuePeek(queueName));
}
@Override
public CompletableFuture<CommitResponse> prepareAndCommit(Transaction transaction) {
return checkOpen(() -> proxy.prepareAndCommit(transaction));
}
@Override
public CompletableFuture<Boolean> prepare(Transaction transaction) {
return checkOpen(() -> proxy.prepare(transaction));
}
@Override
public CompletableFuture<CommitResponse> commit(Transaction transaction) {
return checkOpen(() -> proxy.commit(transaction));
}
@Override
public CompletableFuture<Boolean> rollback(Transaction transaction) {
return checkOpen(() -> proxy.rollback(transaction));
}
@Override
@SuppressWarnings("unchecked")
public synchronized CompletableFuture<Database> open() {
return runStartupTasks()
.thenCompose(v -> stateMachine.open())
.thenRun(() -> {
this.proxy = stateMachine.createProxy(DatabaseProxy.class, this.getClass().getClassLoader());
})
.thenApply(v -> null);
}
@Override
public synchronized CompletableFuture<Void> close() {
proxy = null;
return stateMachine.close()
.thenCompose(v -> runShutdownTasks());
}
@Override
public int hashCode() {
return name().hashCode();
}
@Override
public boolean equals(Object other) {
if (other instanceof Database) {
return name().equals(((Database) other).name());
}
return false;
}
@Override
public void registerConsumer(Consumer<StateMachineUpdate> consumer) {
consumers.add(consumer);
}
@Override
public void unregisterConsumer(Consumer<StateMachineUpdate> consumer) {
consumers.remove(consumer);
}
private class InternalStateMachineWatcher implements TriConsumer<String, Object, Object> {
@Override
public void accept(String name, Object input, Object output) {
StateMachineUpdate update = new StateMachineUpdate(name, input, output);
consumers.forEach(consumer -> consumer.accept(update));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.tools;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.UnknownHostException;
import java.util.*;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import org.apache.commons.cli.*;
import com.datastax.driver.core.AuthProvider;
import com.datastax.driver.core.PlainTextAuthProvider;
import com.datastax.driver.core.SSLOptions;
import javax.net.ssl.SSLContext;
import org.apache.cassandra.config.*;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.sstable.SSTableLoader;
import org.apache.cassandra.security.SSLFactory;
import org.apache.cassandra.streaming.*;
import org.apache.cassandra.utils.JVMStabilityInspector;
import org.apache.cassandra.utils.NativeSSTableLoaderClient;
import org.apache.cassandra.utils.OutputHandler;
public class BulkLoader
{
private static final String TOOL_NAME = "sstableloader";
private static final String VERBOSE_OPTION = "verbose";
private static final String HELP_OPTION = "help";
private static final String NOPROGRESS_OPTION = "no-progress";
private static final String IGNORE_NODES_OPTION = "ignore";
private static final String INITIAL_HOST_ADDRESS_OPTION = "nodes";
private static final String NATIVE_PORT_OPTION = "port";
private static final String USER_OPTION = "username";
private static final String PASSWD_OPTION = "password";
private static final String AUTH_PROVIDER_OPTION = "auth-provider";
private static final String THROTTLE_MBITS = "throttle";
private static final String INTER_DC_THROTTLE_MBITS = "inter-dc-throttle";
/* client encryption options */
private static final String SSL_TRUSTSTORE = "truststore";
private static final String SSL_TRUSTSTORE_PW = "truststore-password";
private static final String SSL_KEYSTORE = "keystore";
private static final String SSL_KEYSTORE_PW = "keystore-password";
private static final String SSL_PROTOCOL = "ssl-protocol";
private static final String SSL_ALGORITHM = "ssl-alg";
private static final String SSL_STORE_TYPE = "store-type";
private static final String SSL_CIPHER_SUITES = "ssl-ciphers";
private static final String CONNECTIONS_PER_HOST = "connections-per-host";
private static final String CONFIG_PATH = "conf-path";
public static void main(String args[])
{
Config.setClientMode(true);
LoaderOptions options = LoaderOptions.parseArgs(args).validateArguments();
OutputHandler handler = new OutputHandler.SystemOutput(options.verbose, options.debug);
SSTableLoader loader = new SSTableLoader(
options.directory,
new ExternalClient(
options.hosts,
options.nativePort,
options.authProvider,
options.storagePort,
options.sslStoragePort,
options.serverEncOptions,
buildSSLOptions((EncryptionOptions.ClientEncryptionOptions)options.encOptions)),
handler,
options.connectionsPerHost);
DatabaseDescriptor.setStreamThroughputOutboundMegabitsPerSec(options.throttle);
DatabaseDescriptor.setInterDCStreamThroughputOutboundMegabitsPerSec(options.interDcThrottle);
StreamResultFuture future = null;
ProgressIndicator indicator = new ProgressIndicator();
try
{
if (options.noProgress)
{
future = loader.stream(options.ignores);
}
else
{
future = loader.stream(options.ignores, indicator);
}
}
catch (Exception e)
{
JVMStabilityInspector.inspectThrowable(e);
System.err.println(e.getMessage());
if (e.getCause() != null)
System.err.println(e.getCause());
e.printStackTrace(System.err);
System.exit(1);
}
try
{
future.get();
if (!options.noProgress)
indicator.printSummary(options.connectionsPerHost);
// Give sockets time to gracefully close
Thread.sleep(1000);
System.exit(0); // We need that to stop non daemonized threads
}
catch (Exception e)
{
System.err.println("Streaming to the following hosts failed:");
System.err.println(loader.getFailedHosts());
e.printStackTrace(System.err);
System.exit(1);
}
}
// Return true when everything is at 100%
static class ProgressIndicator implements StreamEventHandler
{
private long start;
private long lastProgress;
private long lastTime;
private int peak = 0;
private int totalFiles = 0;
private final Multimap<InetAddress, SessionInfo> sessionsByHost = HashMultimap.create();
public ProgressIndicator()
{
start = lastTime = System.nanoTime();
}
public void onSuccess(StreamState finalState)
{
}
public void onFailure(Throwable t)
{
}
public synchronized void handleStreamEvent(StreamEvent event)
{
if (event.eventType == StreamEvent.Type.STREAM_PREPARED)
{
SessionInfo session = ((StreamEvent.SessionPreparedEvent) event).session;
sessionsByHost.put(session.peer, session);
}
else if (event.eventType == StreamEvent.Type.FILE_PROGRESS || event.eventType == StreamEvent.Type.STREAM_COMPLETE)
{
ProgressInfo progressInfo = null;
if (event.eventType == StreamEvent.Type.FILE_PROGRESS)
{
progressInfo = ((StreamEvent.ProgressEvent) event).progress;
}
long time = System.nanoTime();
long deltaTime = time - lastTime;
StringBuilder sb = new StringBuilder();
sb.append("\rprogress: ");
long totalProgress = 0;
long totalSize = 0;
boolean updateTotalFiles = totalFiles == 0;
// recalculate progress across all sessions in all hosts and display
for (InetAddress peer : sessionsByHost.keySet())
{
sb.append("[").append(peer).append("]");
for (SessionInfo session : sessionsByHost.get(peer))
{
long size = session.getTotalSizeToSend();
long current = 0;
int completed = 0;
if (progressInfo != null && session.peer.equals(progressInfo.peer) && (session.sessionIndex == progressInfo.sessionIndex))
{
session.updateProgress(progressInfo);
}
for (ProgressInfo progress : session.getSendingFiles())
{
if (progress.isCompleted())
completed++;
current += progress.currentBytes;
}
totalProgress += current;
totalSize += size;
sb.append(session.sessionIndex).append(":");
sb.append(completed).append("/").append(session.getTotalFilesToSend());
sb.append(" ").append(String.format("%-3d", size == 0 ? 100L : current * 100L / size)).append("% ");
if (updateTotalFiles)
totalFiles += session.getTotalFilesToSend();
}
}
lastTime = time;
long deltaProgress = totalProgress - lastProgress;
lastProgress = totalProgress;
sb.append("total: ").append(totalSize == 0 ? 100L : totalProgress * 100L / totalSize).append("% ");
sb.append(String.format("%-3d", mbPerSec(deltaProgress, deltaTime))).append("MB/s");
int average = mbPerSec(totalProgress, (time - start));
if (average > peak)
peak = average;
sb.append("(avg: ").append(average).append(" MB/s)");
System.out.print(sb.toString());
}
}
private int mbPerSec(long bytes, long timeInNano)
{
double bytesPerNano = ((double)bytes) / timeInNano;
return (int)((bytesPerNano * 1000 * 1000 * 1000) / (1024 * 1024));
}
private void printSummary(int connectionsPerHost)
{
long end = System.nanoTime();
long durationMS = ((end - start) / (1000000));
int average = mbPerSec(lastProgress, (end - start));
StringBuilder sb = new StringBuilder();
sb.append("\nSummary statistics: \n");
sb.append(String.format(" %-30s: %-10d%n", "Connections per host: ", connectionsPerHost));
sb.append(String.format(" %-30s: %-10d%n", "Total files transferred: ", totalFiles));
sb.append(String.format(" %-30s: %-10d%n", "Total bytes transferred: ", lastProgress));
sb.append(String.format(" %-30s: %-10d%n", "Total duration (ms): ", durationMS));
sb.append(String.format(" %-30s: %-10d%n", "Average transfer rate (MB/s): ", + average));
sb.append(String.format(" %-30s: %-10d%n", "Peak transfer rate (MB/s): ", + peak));
System.out.println(sb.toString());
}
}
private static SSLOptions buildSSLOptions(EncryptionOptions.ClientEncryptionOptions clientEncryptionOptions)
{
if (!clientEncryptionOptions.enabled)
return null;
SSLContext sslContext;
try
{
sslContext = SSLFactory.createSSLContext(clientEncryptionOptions, true);
}
catch (IOException e)
{
throw new RuntimeException("Could not create SSL Context.", e);
}
return new SSLOptions(sslContext, clientEncryptionOptions.cipher_suites);
}
static class ExternalClient extends NativeSSTableLoaderClient
{
private final int storagePort;
private final int sslStoragePort;
private final EncryptionOptions.ServerEncryptionOptions serverEncOptions;
public ExternalClient(Set<InetAddress> hosts,
int port,
AuthProvider authProvider,
int storagePort,
int sslStoragePort,
EncryptionOptions.ServerEncryptionOptions serverEncryptionOptions,
SSLOptions sslOptions)
{
super(hosts, port, authProvider, sslOptions);
this.storagePort = storagePort;
this.sslStoragePort = sslStoragePort;
this.serverEncOptions = serverEncryptionOptions;
}
@Override
public StreamConnectionFactory getConnectionFactory()
{
return new BulkLoadConnectionFactory(storagePort, sslStoragePort, serverEncOptions, false);
}
}
static class LoaderOptions
{
public final File directory;
public boolean debug;
public boolean verbose;
public boolean noProgress;
public int nativePort = 9042;
public String user;
public String passwd;
public String authProviderName;
public AuthProvider authProvider;
public int throttle = 0;
public int interDcThrottle = 0;
public int storagePort;
public int sslStoragePort;
public EncryptionOptions encOptions = new EncryptionOptions.ClientEncryptionOptions();
public int connectionsPerHost = 1;
public EncryptionOptions.ServerEncryptionOptions serverEncOptions = new EncryptionOptions.ServerEncryptionOptions();
public final Set<InetAddress> hosts = new HashSet<>();
public final Set<InetAddress> ignores = new HashSet<>();
LoaderOptions(File directory)
{
this.directory = directory;
}
public static LoaderOptions parseArgs(String cmdArgs[])
{
CommandLineParser parser = new GnuParser();
CmdLineOptions options = getCmdLineOptions();
try
{
CommandLine cmd = parser.parse(options, cmdArgs, false);
if (cmd.hasOption(HELP_OPTION))
{
printUsage(options);
System.exit(0);
}
String[] args = cmd.getArgs();
if (args.length == 0)
{
System.err.println("Missing sstable directory argument");
printUsage(options);
System.exit(1);
}
if (args.length > 1)
{
System.err.println("Too many arguments");
printUsage(options);
System.exit(1);
}
String dirname = args[0];
File dir = new File(dirname);
if (!dir.exists())
errorMsg("Unknown directory: " + dirname, options);
if (!dir.isDirectory())
errorMsg(dirname + " is not a directory", options);
LoaderOptions opts = new LoaderOptions(dir);
opts.verbose = cmd.hasOption(VERBOSE_OPTION);
opts.noProgress = cmd.hasOption(NOPROGRESS_OPTION);
if (cmd.hasOption(NATIVE_PORT_OPTION))
opts.nativePort = Integer.parseInt(cmd.getOptionValue(NATIVE_PORT_OPTION));
if (cmd.hasOption(USER_OPTION))
opts.user = cmd.getOptionValue(USER_OPTION);
if (cmd.hasOption(PASSWD_OPTION))
opts.passwd = cmd.getOptionValue(PASSWD_OPTION);
if (cmd.hasOption(AUTH_PROVIDER_OPTION))
opts.authProviderName = cmd.getOptionValue(AUTH_PROVIDER_OPTION);
if (cmd.hasOption(INITIAL_HOST_ADDRESS_OPTION))
{
String[] nodes = cmd.getOptionValue(INITIAL_HOST_ADDRESS_OPTION).split(",");
try
{
for (String node : nodes)
{
opts.hosts.add(InetAddress.getByName(node.trim()));
}
}
catch (UnknownHostException e)
{
errorMsg("Unknown host: " + e.getMessage(), options);
}
}
else
{
System.err.println("Initial hosts must be specified (-d)");
printUsage(options);
System.exit(1);
}
if (cmd.hasOption(IGNORE_NODES_OPTION))
{
String[] nodes = cmd.getOptionValue(IGNORE_NODES_OPTION).split(",");
try
{
for (String node : nodes)
{
opts.ignores.add(InetAddress.getByName(node.trim()));
}
}
catch (UnknownHostException e)
{
errorMsg("Unknown host: " + e.getMessage(), options);
}
}
if (cmd.hasOption(CONNECTIONS_PER_HOST))
opts.connectionsPerHost = Integer.parseInt(cmd.getOptionValue(CONNECTIONS_PER_HOST));
// try to load config file first, so that values can be rewritten with other option values.
// otherwise use default config.
Config config;
if (cmd.hasOption(CONFIG_PATH))
{
File configFile = new File(cmd.getOptionValue(CONFIG_PATH));
if (!configFile.exists())
{
errorMsg("Config file not found", options);
}
config = new YamlConfigurationLoader().loadConfig(configFile.toURI().toURL());
}
else
{
config = new Config();
}
opts.storagePort = config.storage_port;
opts.sslStoragePort = config.ssl_storage_port;
opts.throttle = config.stream_throughput_outbound_megabits_per_sec;
opts.interDcThrottle = config.inter_dc_stream_throughput_outbound_megabits_per_sec;
opts.encOptions = config.client_encryption_options;
opts.serverEncOptions = config.server_encryption_options;
if (cmd.hasOption(THROTTLE_MBITS))
{
opts.throttle = Integer.parseInt(cmd.getOptionValue(THROTTLE_MBITS));
}
if (cmd.hasOption(INTER_DC_THROTTLE_MBITS))
{
opts.interDcThrottle = Integer.parseInt(cmd.getOptionValue(INTER_DC_THROTTLE_MBITS));
}
if (cmd.hasOption(SSL_TRUSTSTORE))
{
opts.encOptions.truststore = cmd.getOptionValue(SSL_TRUSTSTORE);
}
if (cmd.hasOption(SSL_TRUSTSTORE_PW))
{
opts.encOptions.truststore_password = cmd.getOptionValue(SSL_TRUSTSTORE_PW);
}
if (cmd.hasOption(SSL_KEYSTORE))
{
opts.encOptions.keystore = cmd.getOptionValue(SSL_KEYSTORE);
// if a keystore was provided, lets assume we'll need to use it
opts.encOptions.require_client_auth = true;
}
if (cmd.hasOption(SSL_KEYSTORE_PW))
{
opts.encOptions.keystore_password = cmd.getOptionValue(SSL_KEYSTORE_PW);
}
if (cmd.hasOption(SSL_PROTOCOL))
{
opts.encOptions.protocol = cmd.getOptionValue(SSL_PROTOCOL);
}
if (cmd.hasOption(SSL_ALGORITHM))
{
opts.encOptions.algorithm = cmd.getOptionValue(SSL_ALGORITHM);
}
if (cmd.hasOption(SSL_STORE_TYPE))
{
opts.encOptions.store_type = cmd.getOptionValue(SSL_STORE_TYPE);
}
if (cmd.hasOption(SSL_CIPHER_SUITES))
{
opts.encOptions.cipher_suites = cmd.getOptionValue(SSL_CIPHER_SUITES).split(",");
}
return opts;
}
catch (ParseException | ConfigurationException | MalformedURLException e)
{
errorMsg(e.getMessage(), options);
return null;
}
}
public LoaderOptions validateArguments()
{
// Both username and password need to be provided
if ((user != null) != (passwd != null))
errorMsg("Username and password must both be provided", getCmdLineOptions());
if (user != null)
{
// Support for 3rd party auth providers that support plain text credentials.
// In this case the auth provider must provide a constructor of the form:
//
// public MyAuthProvider(String username, String password)
if (authProviderName != null)
{
try
{
Class authProviderClass = Class.forName(authProviderName);
Constructor constructor = authProviderClass.getConstructor(String.class, String.class);
authProvider = (AuthProvider)constructor.newInstance(user, passwd);
}
catch (ClassNotFoundException e)
{
errorMsg("Unknown auth provider: " + e.getMessage(), getCmdLineOptions());
}
catch (NoSuchMethodException e)
{
errorMsg("Auth provider does not support plain text credentials: " + e.getMessage(), getCmdLineOptions());
}
catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e)
{
errorMsg("Could not create auth provider with plain text credentials: " + e.getMessage(), getCmdLineOptions());
}
}
else
{
// If a 3rd party auth provider wasn't provided use the driver plain text provider
authProvider = new PlainTextAuthProvider(user, passwd);
}
}
// Alternate support for 3rd party auth providers that don't use plain text credentials.
// In this case the auth provider must provide a nullary constructor of the form:
//
// public MyAuthProvider()
else if (authProviderName != null)
{
try
{
authProvider = (AuthProvider)Class.forName(authProviderName).newInstance();
}
catch (ClassNotFoundException | InstantiationException | IllegalAccessException e)
{
errorMsg("Unknown auth provider" + e.getMessage(), getCmdLineOptions());
}
}
return this;
}
private static void errorMsg(String msg, CmdLineOptions options)
{
System.err.println(msg);
printUsage(options);
System.exit(1);
}
private static CmdLineOptions getCmdLineOptions()
{
CmdLineOptions options = new CmdLineOptions();
options.addOption("v", VERBOSE_OPTION, "verbose output");
options.addOption("h", HELP_OPTION, "display this help message");
options.addOption(null, NOPROGRESS_OPTION, "don't display progress");
options.addOption("i", IGNORE_NODES_OPTION, "NODES", "don't stream to this (comma separated) list of nodes");
options.addOption("d", INITIAL_HOST_ADDRESS_OPTION, "initial hosts", "Required. try to connect to these hosts (comma separated) initially for ring information");
options.addOption("p", NATIVE_PORT_OPTION, "rpc port", "port used for native connection (default 9042)");
options.addOption("t", THROTTLE_MBITS, "throttle", "throttle speed in Mbits (default unlimited)");
options.addOption("idct", INTER_DC_THROTTLE_MBITS, "inter-dc-throttle", "inter-datacenter throttle speed in Mbits (default unlimited)");
options.addOption("u", USER_OPTION, "username", "username for cassandra authentication");
options.addOption("pw", PASSWD_OPTION, "password", "password for cassandra authentication");
options.addOption("ap", AUTH_PROVIDER_OPTION, "auth provider", "custom AuthProvider class name for cassandra authentication");
options.addOption("cph", CONNECTIONS_PER_HOST, "connectionsPerHost", "number of concurrent connections-per-host.");
// ssl connection-related options
options.addOption("ts", SSL_TRUSTSTORE, "TRUSTSTORE", "Client SSL: full path to truststore");
options.addOption("tspw", SSL_TRUSTSTORE_PW, "TRUSTSTORE-PASSWORD", "Client SSL: password of the truststore");
options.addOption("ks", SSL_KEYSTORE, "KEYSTORE", "Client SSL: full path to keystore");
options.addOption("kspw", SSL_KEYSTORE_PW, "KEYSTORE-PASSWORD", "Client SSL: password of the keystore");
options.addOption("prtcl", SSL_PROTOCOL, "PROTOCOL", "Client SSL: connections protocol to use (default: TLS)");
options.addOption("alg", SSL_ALGORITHM, "ALGORITHM", "Client SSL: algorithm (default: SunX509)");
options.addOption("st", SSL_STORE_TYPE, "STORE-TYPE", "Client SSL: type of store");
options.addOption("ciphers", SSL_CIPHER_SUITES, "CIPHER-SUITES", "Client SSL: comma-separated list of encryption suites to use");
options.addOption("f", CONFIG_PATH, "path to config file", "cassandra.yaml file path for streaming throughput and client/server SSL.");
return options;
}
public static void printUsage(Options options)
{
String usage = String.format("%s [options] <dir_path>", TOOL_NAME);
String header = System.lineSeparator() +
"Bulk load the sstables found in the directory <dir_path> to the configured cluster." +
"The parent directories of <dir_path> are used as the target keyspace/table name. " +
"So for instance, to load an sstable named Standard1-g-1-Data.db into Keyspace1/Standard1, " +
"you will need to have the files Standard1-g-1-Data.db and Standard1-g-1-Index.db into a directory /path/to/Keyspace1/Standard1/.";
String footer = System.lineSeparator() +
"You can provide cassandra.yaml file with -f command line option to set up streaming throughput, client and server encryption options. " +
"Only stream_throughput_outbound_megabits_per_sec, inter_dc_stream_throughput_outbound_megabits_per_sec, server_encryption_options and client_encryption_options are read from yaml. " +
"You can override options read from cassandra.yaml with corresponding command line options.";
new HelpFormatter().printHelp(usage, header, options, footer);
}
}
public static class CmdLineOptions extends Options
{
/**
* Add option with argument and argument name
* @param opt shortcut for option name
* @param longOpt complete option name
* @param argName argument name
* @param description description of the option
* @return updated Options object
*/
public Options addOption(String opt, String longOpt, String argName, String description)
{
Option option = new Option(opt, longOpt, true, description);
option.setArgName(argName);
return addOption(option);
}
/**
* Add option without argument
* @param opt shortcut for option name
* @param longOpt complete option name
* @param description description of the option
* @return updated Options object
*/
public Options addOption(String opt, String longOpt, String description)
{
return addOption(new Option(opt, longOpt, false, description));
}
}
}
| |
package com.twu.biblioteca.app;
import com.twu.biblioteca.app.Library;
import com.twu.biblioteca.domain.Book;
import com.twu.biblioteca.domain.Customer;
import com.twu.biblioteca.exceptions.CustomerRequiredException;
import com.twu.biblioteca.exceptions.LibraryItemNotAvailableException;
import com.twu.biblioteca.exceptions.LibraryItemNotCheckedOutException;
import com.twu.biblioteca.exceptions.LibraryItemNotFoundException;
import org.hamcrest.CoreMatchers;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.mockito.Mockito.*;
import static org.mockito.MockitoAnnotations.initMocks;
/**
* Created by Matt on 23/02/15.
*/
public class LibraryTests {
private Library<Book> library;
@Mock
private Book greatExpectations;
@Mock
private Book pickwickPapers;
@Mock
private Book bleakHouse;
private Set<Book> books;
@Mock
private Customer customer;
@Before
public void setup() {
initMocks(this);
when(greatExpectations.getTitle()).thenReturn("Great Expectations");
when(greatExpectations.compareTo(any(Book.class))).thenCallRealMethod();
when(pickwickPapers.getTitle()).thenReturn("The Pickwick Papers");
when(pickwickPapers.compareTo(any(Book.class))).thenCallRealMethod();
books = new HashSet<>();
books.add(greatExpectations);
books.add(pickwickPapers);
library = new Library<>(books, Book.class);
}
@Test(expected = IllegalArgumentException.class)
public void testLibraryConstructorDisallowsNullItems() {
new Library<>(null, Book.class);
}
@Test(expected = IllegalArgumentException.class)
public void testLibraryConstructorDisallowsNullItemsClass() {
new Library<>(books, null);
}
@Test(expected = IllegalArgumentException.class)
public void testLibraryConstructorDisallowsEmptyItems() {
new Library(new HashSet<>(), Book.class);
}
@Test
public void testGetListOfItems() {
when(greatExpectations.isAvailable()).thenReturn(true);
when(pickwickPapers.isAvailable()).thenReturn(true);
assertThat(library.getItems(), is(Arrays.asList(greatExpectations, pickwickPapers)));
}
@Test
public void testVerifyItemExists() throws LibraryItemNotFoundException {
library.verifyItemExists(greatExpectations);
}
@Test(expected = LibraryItemNotFoundException.class)
public void testVerifyItemExistsThrowsExceptionIfItDoesntExist() throws LibraryItemNotFoundException {
library.verifyItemExists(mock(Book.class));
}
@Test
public void testCheckoutItemThatExistsAndIsAvailable() throws LibraryItemNotFoundException, LibraryItemNotAvailableException, CustomerRequiredException {
library.checkoutItem(greatExpectations, customer);
verify(greatExpectations, times(1)).checkOut(customer);
}
@Test(expected = LibraryItemNotAvailableException.class)
public void testCheckoutItemThatExistsButIsUnavailable() throws LibraryItemNotFoundException, LibraryItemNotAvailableException, CustomerRequiredException {
doThrow(new LibraryItemNotAvailableException()).when(greatExpectations).checkOut(customer);
library.checkoutItem(greatExpectations, customer);
}
@Test(expected = LibraryItemNotFoundException.class)
public void testCheckoutItemThatDoesNotExist() throws LibraryItemNotFoundException, LibraryItemNotAvailableException, CustomerRequiredException {
library.checkoutItem(bleakHouse, customer);
}
@Test(expected = LibraryItemNotFoundException.class)
public void testCheckoutNullItem() throws LibraryItemNotFoundException, LibraryItemNotAvailableException, CustomerRequiredException {
library.checkoutItem(null, customer);
}
@Test
public void testCheckoutItemByTitle() throws LibraryItemNotFoundException, LibraryItemNotAvailableException, CustomerRequiredException {
library.checkoutItemByTitle("Great Expectations", customer);
verify(greatExpectations, times(1)).checkOut(customer);
}
@Test(expected = LibraryItemNotFoundException.class)
public void testCheckOutItemByTitleThatDoesntExist() throws LibraryItemNotFoundException, LibraryItemNotAvailableException, CustomerRequiredException {
library.checkoutItemByTitle("Hard Times", customer);
}
@Test(expected = LibraryItemNotAvailableException.class)
public void testCheckOutItemByTitleThatIsntAvailable() throws LibraryItemNotFoundException, LibraryItemNotAvailableException, CustomerRequiredException {
doThrow(new LibraryItemNotAvailableException()).when(greatExpectations).checkOut(customer);
library.checkoutItemByTitle("Great Expectations", customer);
}
@Test
public void testCheckedOutItemDoesNotShowInItemList() throws LibraryItemNotFoundException, LibraryItemNotAvailableException, CustomerRequiredException {
when(greatExpectations.isAvailable()).thenReturn(false);
when(pickwickPapers.isAvailable()).thenReturn(true);
assertThat(library.getItems(), is(Arrays.asList(pickwickPapers)));
}
@Test
public void testReturnItemThatExistsAndIsCheckedOut() throws LibraryItemNotFoundException, LibraryItemNotAvailableException, LibraryItemNotCheckedOutException, CustomerRequiredException {
when(greatExpectations.isAvailable()).thenReturn(false);
library.returnItem(greatExpectations);
verify(greatExpectations, times(1)).checkIn();
}
@Test(expected = LibraryItemNotFoundException.class)
public void testVerifyThatItemDoesntExist() throws LibraryItemNotFoundException, LibraryItemNotCheckedOutException {
library.verifyItemExists(mock(Book.class));
}
@Test(expected = LibraryItemNotFoundException.class)
public void testReturnItemThatDoesntExist() throws LibraryItemNotFoundException, LibraryItemNotCheckedOutException {
library.returnItem(bleakHouse);
}
@Test(expected = LibraryItemNotFoundException.class)
public void testReturnNullItemThrowsNotFoundException() throws LibraryItemNotFoundException, LibraryItemNotCheckedOutException {
library.returnItem(null);
}
@Test
public void testReturnItemByTitle() throws LibraryItemNotFoundException, LibraryItemNotAvailableException, LibraryItemNotCheckedOutException, CustomerRequiredException {
library.returnItemByTitle("Great Expectations");
verify(greatExpectations, times(1)).checkIn();
}
@Test(expected = LibraryItemNotFoundException.class)
public void testReturnItemByTitleThatDoesntExist() throws LibraryItemNotFoundException, LibraryItemNotCheckedOutException {
library.returnItemByTitle("Hard Times");
}
@Test(expected = LibraryItemNotCheckedOutException.class)
public void testReturnItemByTitleThatHasntBeenCheckedOut() throws LibraryItemNotFoundException, LibraryItemNotCheckedOutException {
doThrow(LibraryItemNotCheckedOutException.class).when(greatExpectations).checkIn();
library.returnItemByTitle("Great Expectations");
}
@Test
public void testFindItemByTitle() {
assertThat(library.findItemByTitle("Great Expectations"), is(greatExpectations));
}
@Test
public void testFindItemByNullTitle() {
assertThat(library.findItemByTitle(null), is(nullValue()));
}
@Test
public void testFindItemByTitleThatDoesntExist() {
assertThat(library.findItemByTitle("Hard Times"), is(nullValue()));
}
@Test
public void testGetClassOfItemsInLibrary() {
assertThat(library.getItemsClass(), CoreMatchers.<Class<Book>>is(Book.class));
}
@Test
public void testGetNameOfItemsInLibrary() {
assertThat(library.getItemsName(), is("Book"));
assertThat(library.getItemsNameLowercase(), is("book"));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.endpoint.dsl;
import java.util.concurrent.BlockingQueue;
import javax.annotation.Generated;
import org.apache.camel.ExchangePattern;
import org.apache.camel.WaitForTaskToComplete;
import org.apache.camel.builder.EndpointConsumerBuilder;
import org.apache.camel.builder.EndpointProducerBuilder;
import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
import org.apache.camel.spi.ExceptionHandler;
/**
* The seda component provides asynchronous call to another endpoint from any
* CamelContext in the same JVM.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.EndpointDslMojo")
public interface SedaEndpointBuilderFactory {
/**
* Builder for endpoint consumers for the SEDA component.
*/
public interface SedaEndpointConsumerBuilder
extends
EndpointConsumerBuilder {
default AdvancedSedaEndpointConsumerBuilder advanced() {
return (AdvancedSedaEndpointConsumerBuilder) this;
}
/**
* The maximum capacity of the SEDA queue (i.e., the number of messages
* it can hold). Will by default use the defaultSize set on the SEDA
* component.
*
* The option is a: <code>int</code> type.
*
* Group: common
*/
default SedaEndpointConsumerBuilder size(int size) {
setProperty("size", size);
return this;
}
/**
* The maximum capacity of the SEDA queue (i.e., the number of messages
* it can hold). Will by default use the defaultSize set on the SEDA
* component.
*
* The option will be converted to a <code>int</code> type.
*
* Group: common
*/
default SedaEndpointConsumerBuilder size(String size) {
setProperty("size", size);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Group: consumer
*/
default SedaEndpointConsumerBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
setProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: consumer
*/
default SedaEndpointConsumerBuilder bridgeErrorHandler(
String bridgeErrorHandler) {
setProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Number of concurrent threads processing exchanges.
*
* The option is a: <code>int</code> type.
*
* Group: consumer
*/
default SedaEndpointConsumerBuilder concurrentConsumers(
int concurrentConsumers) {
setProperty("concurrentConsumers", concurrentConsumers);
return this;
}
/**
* Number of concurrent threads processing exchanges.
*
* The option will be converted to a <code>int</code> type.
*
* Group: consumer
*/
default SedaEndpointConsumerBuilder concurrentConsumers(
String concurrentConsumers) {
setProperty("concurrentConsumers", concurrentConsumers);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the SEDA component.
*/
public interface AdvancedSedaEndpointConsumerBuilder
extends
EndpointConsumerBuilder {
default SedaEndpointConsumerBuilder basic() {
return (SedaEndpointConsumerBuilder) this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*/
default AdvancedSedaEndpointConsumerBuilder exceptionHandler(
ExceptionHandler exceptionHandler) {
setProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedSedaEndpointConsumerBuilder exceptionHandler(
String exceptionHandler) {
setProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedSedaEndpointConsumerBuilder exchangePattern(
ExchangePattern exchangePattern) {
setProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedSedaEndpointConsumerBuilder exchangePattern(
String exchangePattern) {
setProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Whether to limit the number of concurrentConsumers to the maximum of
* 500. By default, an exception will be thrown if an endpoint is
* configured with a greater number. You can disable that check by
* turning this option off.
*
* The option is a: <code>boolean</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedSedaEndpointConsumerBuilder limitConcurrentConsumers(
boolean limitConcurrentConsumers) {
setProperty("limitConcurrentConsumers", limitConcurrentConsumers);
return this;
}
/**
* Whether to limit the number of concurrentConsumers to the maximum of
* 500. By default, an exception will be thrown if an endpoint is
* configured with a greater number. You can disable that check by
* turning this option off.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedSedaEndpointConsumerBuilder limitConcurrentConsumers(
String limitConcurrentConsumers) {
setProperty("limitConcurrentConsumers", limitConcurrentConsumers);
return this;
}
/**
* Specifies whether multiple consumers are allowed. If enabled, you can
* use SEDA for Publish-Subscribe messaging. That is, you can send a
* message to the SEDA queue and have each consumer receive a copy of
* the message. When enabled, this option should be specified on every
* consumer endpoint.
*
* The option is a: <code>boolean</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedSedaEndpointConsumerBuilder multipleConsumers(
boolean multipleConsumers) {
setProperty("multipleConsumers", multipleConsumers);
return this;
}
/**
* Specifies whether multiple consumers are allowed. If enabled, you can
* use SEDA for Publish-Subscribe messaging. That is, you can send a
* message to the SEDA queue and have each consumer receive a copy of
* the message. When enabled, this option should be specified on every
* consumer endpoint.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedSedaEndpointConsumerBuilder multipleConsumers(
String multipleConsumers) {
setProperty("multipleConsumers", multipleConsumers);
return this;
}
/**
* The timeout used when polling. When a timeout occurs, the consumer
* can check whether it is allowed to continue running. Setting a lower
* value allows the consumer to react more quickly upon shutdown.
*
* The option is a: <code>int</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedSedaEndpointConsumerBuilder pollTimeout(int pollTimeout) {
setProperty("pollTimeout", pollTimeout);
return this;
}
/**
* The timeout used when polling. When a timeout occurs, the consumer
* can check whether it is allowed to continue running. Setting a lower
* value allows the consumer to react more quickly upon shutdown.
*
* The option will be converted to a <code>int</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedSedaEndpointConsumerBuilder pollTimeout(
String pollTimeout) {
setProperty("pollTimeout", pollTimeout);
return this;
}
/**
* Whether to purge the task queue when stopping the consumer/route.
* This allows to stop faster, as any pending messages on the queue is
* discarded.
*
* The option is a: <code>boolean</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedSedaEndpointConsumerBuilder purgeWhenStopping(
boolean purgeWhenStopping) {
setProperty("purgeWhenStopping", purgeWhenStopping);
return this;
}
/**
* Whether to purge the task queue when stopping the consumer/route.
* This allows to stop faster, as any pending messages on the queue is
* discarded.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedSedaEndpointConsumerBuilder purgeWhenStopping(
String purgeWhenStopping) {
setProperty("purgeWhenStopping", purgeWhenStopping);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedSedaEndpointConsumerBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
setProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedSedaEndpointConsumerBuilder basicPropertyBinding(
String basicPropertyBinding) {
setProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Define the queue instance which will be used by the endpoint. This
* option is only for rare use-cases where you want to use a custom
* queue instance.
*
* The option is a: <code>java.util.concurrent.BlockingQueue</code>
* type.
*
* Group: advanced
*/
default AdvancedSedaEndpointConsumerBuilder queue(BlockingQueue queue) {
setProperty("queue", queue);
return this;
}
/**
* Define the queue instance which will be used by the endpoint. This
* option is only for rare use-cases where you want to use a custom
* queue instance.
*
* The option will be converted to a
* <code>java.util.concurrent.BlockingQueue</code> type.
*
* Group: advanced
*/
default AdvancedSedaEndpointConsumerBuilder queue(String queue) {
setProperty("queue", queue);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedSedaEndpointConsumerBuilder synchronous(
boolean synchronous) {
setProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedSedaEndpointConsumerBuilder synchronous(
String synchronous) {
setProperty("synchronous", synchronous);
return this;
}
}
/**
* Builder for endpoint producers for the SEDA component.
*/
public interface SedaEndpointProducerBuilder
extends
EndpointProducerBuilder {
default AdvancedSedaEndpointProducerBuilder advanced() {
return (AdvancedSedaEndpointProducerBuilder) this;
}
/**
* The maximum capacity of the SEDA queue (i.e., the number of messages
* it can hold). Will by default use the defaultSize set on the SEDA
* component.
*
* The option is a: <code>int</code> type.
*
* Group: common
*/
default SedaEndpointProducerBuilder size(int size) {
setProperty("size", size);
return this;
}
/**
* The maximum capacity of the SEDA queue (i.e., the number of messages
* it can hold). Will by default use the defaultSize set on the SEDA
* component.
*
* The option will be converted to a <code>int</code> type.
*
* Group: common
*/
default SedaEndpointProducerBuilder size(String size) {
setProperty("size", size);
return this;
}
/**
* Whether a thread that sends messages to a full SEDA queue will block
* until the queue's capacity is no longer exhausted. By default, an
* exception will be thrown stating that the queue is full. By enabling
* this option, the calling thread will instead block and wait until the
* message can be accepted.
*
* The option is a: <code>boolean</code> type.
*
* Group: producer
*/
default SedaEndpointProducerBuilder blockWhenFull(boolean blockWhenFull) {
setProperty("blockWhenFull", blockWhenFull);
return this;
}
/**
* Whether a thread that sends messages to a full SEDA queue will block
* until the queue's capacity is no longer exhausted. By default, an
* exception will be thrown stating that the queue is full. By enabling
* this option, the calling thread will instead block and wait until the
* message can be accepted.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: producer
*/
default SedaEndpointProducerBuilder blockWhenFull(String blockWhenFull) {
setProperty("blockWhenFull", blockWhenFull);
return this;
}
/**
* Whether the producer should discard the message (do not add the
* message to the queue), when sending to a queue with no active
* consumers. Only one of the options discardIfNoConsumers and
* failIfNoConsumers can be enabled at the same time.
*
* The option is a: <code>boolean</code> type.
*
* Group: producer
*/
default SedaEndpointProducerBuilder discardIfNoConsumers(
boolean discardIfNoConsumers) {
setProperty("discardIfNoConsumers", discardIfNoConsumers);
return this;
}
/**
* Whether the producer should discard the message (do not add the
* message to the queue), when sending to a queue with no active
* consumers. Only one of the options discardIfNoConsumers and
* failIfNoConsumers can be enabled at the same time.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: producer
*/
default SedaEndpointProducerBuilder discardIfNoConsumers(
String discardIfNoConsumers) {
setProperty("discardIfNoConsumers", discardIfNoConsumers);
return this;
}
/**
* Whether the producer should fail by throwing an exception, when
* sending to a queue with no active consumers. Only one of the options
* discardIfNoConsumers and failIfNoConsumers can be enabled at the same
* time.
*
* The option is a: <code>boolean</code> type.
*
* Group: producer
*/
default SedaEndpointProducerBuilder failIfNoConsumers(
boolean failIfNoConsumers) {
setProperty("failIfNoConsumers", failIfNoConsumers);
return this;
}
/**
* Whether the producer should fail by throwing an exception, when
* sending to a queue with no active consumers. Only one of the options
* discardIfNoConsumers and failIfNoConsumers can be enabled at the same
* time.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: producer
*/
default SedaEndpointProducerBuilder failIfNoConsumers(
String failIfNoConsumers) {
setProperty("failIfNoConsumers", failIfNoConsumers);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Group: producer
*/
default SedaEndpointProducerBuilder lazyStartProducer(
boolean lazyStartProducer) {
setProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: producer
*/
default SedaEndpointProducerBuilder lazyStartProducer(
String lazyStartProducer) {
setProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* offerTimeout (in milliseconds) can be added to the block case when
* queue is full. You can disable timeout by using 0 or a negative
* value.
*
* The option is a: <code>long</code> type.
*
* Group: producer
*/
default SedaEndpointProducerBuilder offerTimeout(long offerTimeout) {
setProperty("offerTimeout", offerTimeout);
return this;
}
/**
* offerTimeout (in milliseconds) can be added to the block case when
* queue is full. You can disable timeout by using 0 or a negative
* value.
*
* The option will be converted to a <code>long</code> type.
*
* Group: producer
*/
default SedaEndpointProducerBuilder offerTimeout(String offerTimeout) {
setProperty("offerTimeout", offerTimeout);
return this;
}
/**
* Timeout (in milliseconds) before a SEDA producer will stop waiting
* for an asynchronous task to complete. You can disable timeout by
* using 0 or a negative value.
*
* The option is a: <code>long</code> type.
*
* Group: producer
*/
default SedaEndpointProducerBuilder timeout(long timeout) {
setProperty("timeout", timeout);
return this;
}
/**
* Timeout (in milliseconds) before a SEDA producer will stop waiting
* for an asynchronous task to complete. You can disable timeout by
* using 0 or a negative value.
*
* The option will be converted to a <code>long</code> type.
*
* Group: producer
*/
default SedaEndpointProducerBuilder timeout(String timeout) {
setProperty("timeout", timeout);
return this;
}
/**
* Option to specify whether the caller should wait for the async task
* to complete or not before continuing. The following three options are
* supported: Always, Never or IfReplyExpected. The first two values are
* self-explanatory. The last value, IfReplyExpected, will only wait if
* the message is Request Reply based. The default option is
* IfReplyExpected.
*
* The option is a: <code>org.apache.camel.WaitForTaskToComplete</code>
* type.
*
* Group: producer
*/
default SedaEndpointProducerBuilder waitForTaskToComplete(
WaitForTaskToComplete waitForTaskToComplete) {
setProperty("waitForTaskToComplete", waitForTaskToComplete);
return this;
}
/**
* Option to specify whether the caller should wait for the async task
* to complete or not before continuing. The following three options are
* supported: Always, Never or IfReplyExpected. The first two values are
* self-explanatory. The last value, IfReplyExpected, will only wait if
* the message is Request Reply based. The default option is
* IfReplyExpected.
*
* The option will be converted to a
* <code>org.apache.camel.WaitForTaskToComplete</code> type.
*
* Group: producer
*/
default SedaEndpointProducerBuilder waitForTaskToComplete(
String waitForTaskToComplete) {
setProperty("waitForTaskToComplete", waitForTaskToComplete);
return this;
}
}
/**
* Advanced builder for endpoint producers for the SEDA component.
*/
public interface AdvancedSedaEndpointProducerBuilder
extends
EndpointProducerBuilder {
default SedaEndpointProducerBuilder basic() {
return (SedaEndpointProducerBuilder) this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedSedaEndpointProducerBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
setProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedSedaEndpointProducerBuilder basicPropertyBinding(
String basicPropertyBinding) {
setProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Define the queue instance which will be used by the endpoint. This
* option is only for rare use-cases where you want to use a custom
* queue instance.
*
* The option is a: <code>java.util.concurrent.BlockingQueue</code>
* type.
*
* Group: advanced
*/
default AdvancedSedaEndpointProducerBuilder queue(BlockingQueue queue) {
setProperty("queue", queue);
return this;
}
/**
* Define the queue instance which will be used by the endpoint. This
* option is only for rare use-cases where you want to use a custom
* queue instance.
*
* The option will be converted to a
* <code>java.util.concurrent.BlockingQueue</code> type.
*
* Group: advanced
*/
default AdvancedSedaEndpointProducerBuilder queue(String queue) {
setProperty("queue", queue);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedSedaEndpointProducerBuilder synchronous(
boolean synchronous) {
setProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedSedaEndpointProducerBuilder synchronous(
String synchronous) {
setProperty("synchronous", synchronous);
return this;
}
}
/**
* Builder for endpoint for the SEDA component.
*/
public interface SedaEndpointBuilder
extends
SedaEndpointConsumerBuilder, SedaEndpointProducerBuilder {
default AdvancedSedaEndpointBuilder advanced() {
return (AdvancedSedaEndpointBuilder) this;
}
/**
* The maximum capacity of the SEDA queue (i.e., the number of messages
* it can hold). Will by default use the defaultSize set on the SEDA
* component.
*
* The option is a: <code>int</code> type.
*
* Group: common
*/
default SedaEndpointBuilder size(int size) {
setProperty("size", size);
return this;
}
/**
* The maximum capacity of the SEDA queue (i.e., the number of messages
* it can hold). Will by default use the defaultSize set on the SEDA
* component.
*
* The option will be converted to a <code>int</code> type.
*
* Group: common
*/
default SedaEndpointBuilder size(String size) {
setProperty("size", size);
return this;
}
}
/**
* Advanced builder for endpoint for the SEDA component.
*/
public interface AdvancedSedaEndpointBuilder
extends
AdvancedSedaEndpointConsumerBuilder, AdvancedSedaEndpointProducerBuilder {
default SedaEndpointBuilder basic() {
return (SedaEndpointBuilder) this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedSedaEndpointBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
setProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedSedaEndpointBuilder basicPropertyBinding(
String basicPropertyBinding) {
setProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Define the queue instance which will be used by the endpoint. This
* option is only for rare use-cases where you want to use a custom
* queue instance.
*
* The option is a: <code>java.util.concurrent.BlockingQueue</code>
* type.
*
* Group: advanced
*/
default AdvancedSedaEndpointBuilder queue(BlockingQueue queue) {
setProperty("queue", queue);
return this;
}
/**
* Define the queue instance which will be used by the endpoint. This
* option is only for rare use-cases where you want to use a custom
* queue instance.
*
* The option will be converted to a
* <code>java.util.concurrent.BlockingQueue</code> type.
*
* Group: advanced
*/
default AdvancedSedaEndpointBuilder queue(String queue) {
setProperty("queue", queue);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedSedaEndpointBuilder synchronous(boolean synchronous) {
setProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Group: advanced
*/
default AdvancedSedaEndpointBuilder synchronous(String synchronous) {
setProperty("synchronous", synchronous);
return this;
}
}
/**
* SEDA (camel-seda)
* The seda component provides asynchronous call to another endpoint from
* any CamelContext in the same JVM.
*
* Category: core,endpoint
* Available as of version: 1.1
* Maven coordinates: org.apache.camel:camel-seda
*
* Syntax: <code>seda:name</code>
*
* Path parameter: name (required)
* Name of queue
*/
default SedaEndpointBuilder seda(String path) {
class SedaEndpointBuilderImpl extends AbstractEndpointBuilder implements SedaEndpointBuilder, AdvancedSedaEndpointBuilder {
public SedaEndpointBuilderImpl(String path) {
super("seda", path);
}
}
return new SedaEndpointBuilderImpl(path);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Numbers;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NumericLongAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeLongValue;
import static org.elasticsearch.index.mapper.MapperBuilders.longField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField;
/**
*
*/
public class LongFieldMapper extends NumberFieldMapper {
public static final String CONTENT_TYPE = "long";
public static class Defaults extends NumberFieldMapper.Defaults {
public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE);
static {
FIELD_TYPE.freeze();
}
public static final Long NULL_VALUE = null;
}
public static class Builder extends NumberFieldMapper.Builder<Builder, LongFieldMapper> {
protected Long nullValue = Defaults.NULL_VALUE;
public Builder(String name) {
super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_64_BIT);
builder = this;
}
public Builder nullValue(long nullValue) {
this.nullValue = nullValue;
return this;
}
@Override
public LongFieldMapper build(BuilderContext context) {
fieldType.setOmitNorms(fieldType.omitNorms() && boost == 1.0f);
LongFieldMapper fieldMapper = new LongFieldMapper(buildNames(context), fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue,
ignoreMalformed(context), coerce(context), similarity, normsLoading,
fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
LongFieldMapper.Builder builder = longField(name);
parseNumberField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("null_value")) {
if (propNode == null) {
throw new MapperParsingException("Property [null_value] cannot be null.");
}
builder.nullValue(nodeLongValue(propNode));
iterator.remove();
}
}
return builder;
}
}
private Long nullValue;
private String nullValueAsString;
protected LongFieldMapper(Names names, int precisionStep, float boost, FieldType fieldType, Boolean docValues,
Long nullValue, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce,
SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings,
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(names, precisionStep, boost, fieldType, docValues, ignoreMalformed, coerce,
NumericLongAnalyzer.buildNamedAnalyzer(precisionStep), NumericLongAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE),
similarity, normsLoading, fieldDataSettings, indexSettings, multiFields, copyTo);
this.nullValue = nullValue;
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
}
@Override
public FieldType defaultFieldType() {
return Defaults.FIELD_TYPE;
}
@Override
public FieldDataType defaultFieldDataType() {
return new FieldDataType("long");
}
@Override
protected int maxPrecisionStep() {
return 64;
}
@Override
public Long value(Object value) {
if (value == null) {
return null;
}
if (value instanceof Number) {
return ((Number) value).longValue();
}
if (value instanceof BytesRef) {
return Numbers.bytesToLong((BytesRef) value);
}
return Long.parseLong(value.toString());
}
@Override
public BytesRef indexedValueForSearch(Object value) {
BytesRefBuilder bytesRef = new BytesRefBuilder();
NumericUtils.longToPrefixCoded(parseLongValue(value), 0, bytesRef); // 0 because of exact match
return bytesRef.get();
}
@Override
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
long iValue = Long.parseLong(value);
final long iSim = fuzziness.asLong();
return NumericRangeQuery.newLongRange(names.indexName(), precisionStep,
iValue - iSim,
iValue + iSim,
true, true);
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return NumericRangeQuery.newLongRange(names.indexName(), precisionStep,
lowerTerm == null ? null : parseLongValue(lowerTerm),
upperTerm == null ? null : parseLongValue(upperTerm),
includeLower, includeUpper);
}
@Override
public Query nullValueFilter() {
if (nullValue == null) {
return null;
}
return new ConstantScoreQuery(termQuery(nullValue, null));
}
@Override
protected boolean customBoost() {
return true;
}
@Override
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
long value;
float boost = this.boost;
if (context.externalValueSet()) {
Object externalValue = context.externalValue();
if (externalValue == null) {
if (nullValue == null) {
return;
}
value = nullValue;
} else if (externalValue instanceof String) {
String sExternalValue = (String) externalValue;
if (sExternalValue.length() == 0) {
if (nullValue == null) {
return;
}
value = nullValue;
} else {
value = Long.parseLong(sExternalValue);
}
} else {
value = ((Number) externalValue).longValue();
}
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), Long.toString(value), boost);
}
} else {
XContentParser parser = context.parser();
if (parser.currentToken() == XContentParser.Token.VALUE_NULL ||
(parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) {
if (nullValue == null) {
return;
}
value = nullValue;
if (nullValueAsString != null && (context.includeInAll(includeInAll, this))) {
context.allEntries().addText(names.fullName(), nullValueAsString, boost);
}
} else if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
XContentParser.Token token;
String currentFieldName = null;
Long objValue = nullValue;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else {
if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) {
if (parser.currentToken() != XContentParser.Token.VALUE_NULL) {
objValue = parser.longValue(coerce.value());
}
} else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else {
throw new IllegalArgumentException("unknown property [" + currentFieldName + "]");
}
}
}
if (objValue == null) {
// no value
return;
}
value = objValue;
} else {
value = parser.longValue(coerce.value());
if (context.includeInAll(includeInAll, this)) {
context.allEntries().addText(names.fullName(), parser.text(), boost);
}
}
}
if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) {
CustomLongNumericField field = new CustomLongNumericField(this, value, fieldType);
field.setBoost(boost);
fields.add(field);
}
if (hasDocValues()) {
addDocValue(context, fields, value);
}
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
if (!mergeResult.simulate()) {
this.nullValue = ((LongFieldMapper) mergeWith).nullValue;
this.nullValueAsString = ((LongFieldMapper) mergeWith).nullValueAsString;
}
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || precisionStep != Defaults.PRECISION_STEP_64_BIT) {
builder.field("precision_step", precisionStep);
}
if (includeDefaults || nullValue != null) {
builder.field("null_value", nullValue);
}
if (includeInAll != null) {
builder.field("include_in_all", includeInAll);
} else if (includeDefaults) {
builder.field("include_in_all", false);
}
}
@Override
public FieldStats stats(Terms terms, int maxDoc) throws IOException {
long minValue = NumericUtils.getMinLong(terms);
long maxValue = NumericUtils.getMaxLong(terms);
return new FieldStats.Long(
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
public static class CustomLongNumericField extends CustomNumericField {
private final long number;
private final NumberFieldMapper mapper;
public CustomLongNumericField(NumberFieldMapper mapper, long number, FieldType fieldType) {
super(mapper, number, fieldType);
this.mapper = mapper;
this.number = number;
}
@Override
public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) throws IOException {
if (fieldType().indexOptions() != IndexOptions.NONE) {
return mapper.popCachedStream().setLongValue(number);
}
return null;
}
@Override
public String numericAsString() {
return Long.toString(number);
}
}
}
| |
package mil.nga.mapcache.load;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Environment;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.TextView;
import android.widget.Toast;
import androidx.appcompat.app.AlertDialog;
import androidx.core.content.FileProvider;
import androidx.fragment.app.FragmentActivity;
import androidx.lifecycle.ViewModelProviders;
import java.io.File;
import java.io.IOException;
import mil.nga.geopackage.GeoPackageConstants;
import mil.nga.geopackage.io.GeoPackageIOUtils;
import mil.nga.mapcache.BuildConfig;
import mil.nga.mapcache.GeoPackageUtils;
import mil.nga.mapcache.R;
import mil.nga.mapcache.utils.ViewAnimation;
import mil.nga.mapcache.viewmodel.GeoPackageViewModel;
/**
* Handles sending a GeoPackage to external apps or saving that file to external disk
* Used from the GeoPackage detail view share button
*/
public class ShareTask {
/**
* Intent activity request code when sharing a file
*/
public static final int ACTIVITY_SHARE_FILE = 3343;
/**
* file provider id
*/
private static final String AUTHORITY = BuildConfig.APPLICATION_ID+".fileprovider";
/**
* Save a reference to the parent activity
*/
private Activity activity;
/**
* Need access to the viewModel to retrieve database files
*/
private GeoPackageViewModel geoPackageViewModel;
/**
* Name should be saved to the task
*/
private String geoPackageName;
public ShareTask(FragmentActivity activity) {
this.activity = activity;
geoPackageViewModel = ViewModelProviders.of(activity).get(GeoPackageViewModel.class);
}
/**
* Share database with external apps via intent
*
* @param database GeoPackage name
*/
public void shareDatabaseOption(final String database) {
try {
// Get the database file
File databaseFile = geoPackageViewModel.getDatabaseFile(database);
// Create the share intent
Intent shareIntent = new Intent();
shareIntent.setAction(Intent.ACTION_SEND);
shareIntent.setType("*/*");
// If external database, no permission is needed
if (geoPackageViewModel.isExternal(database)) {
// Create the Uri and share
Uri databaseUri = FileProvider.getUriForFile(activity,
AUTHORITY,
databaseFile);
shareIntent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION);
launchShareIntent(shareIntent, databaseUri);
}
// If internal database, file must be copied to cache for permission
else {
// Launch the share copy task
ShareCopyTask shareCopyTask = new ShareCopyTask(shareIntent);
shareCopyTask.execute(databaseFile, database);
}
} catch (Exception e) {
GeoPackageUtils.showMessage(activity, "Error sharing GeoPackage", e.getMessage());
}
}
/**
* Save the given database to the downloads directory
* @param database GeoPackage name
*/
private void saveDatabaseOption(final String database){
try {
// Get the database file
File databaseFile = geoPackageViewModel.getDatabaseFile(database);
// Create the share intent
Intent shareIntent = new Intent();
shareIntent.setAction(Intent.ACTION_SEND);
// Launch the save to disk task
SaveToDiskTask saveTask = new SaveToDiskTask(shareIntent);
saveTask.execute(databaseFile, database);
} catch (Exception e) {
GeoPackageUtils.showMessage(activity, "Error saving to file", e.getMessage());
}
}
/**
* Shows a popup to ask if the user wants to save to disk or share with external apps
* @return constant representing either share or save
*/
public void askToSaveOrShare(String database){
// Create Alert window with basic input text layout
LayoutInflater inflater = LayoutInflater.from(activity);
View alertView = inflater.inflate(R.layout.share_file_popup, null);
ViewAnimation.setScaleAnimatiom(alertView, 200);
// title
TextView titleText = (TextView) alertView.findViewById(R.id.alert_title);
titleText.setText("Share GeoPackage");
// Initial dialog asking for create or import
AlertDialog.Builder dialog = new AlertDialog.Builder(activity, R.style.AppCompatAlertDialogStyle)
.setView(alertView);
final AlertDialog alertDialog = dialog.create();
// Click listener for "Share"
alertView.findViewById(R.id.share_menu_share_card)
.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
shareDatabaseOption(database);
alertDialog.dismiss();
}
});
// Click listener for "Save"
alertView.findViewById(R.id.share_menu_save_card)
.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
saveDatabaseOption(database);
alertDialog.dismiss();
}
});
alertDialog.show();
}
/**
* Get the database cache directory
*
* @return
*/
private File getDatabaseCacheDirectory() {
return new File(activity.getCacheDir(), "databases");
}
/**
* Launch the provided share intent with the database Uri
*
* @param shareIntent
* @param databaseUri
*/
private void launchShareIntent(Intent shareIntent, Uri databaseUri) {
// Add the Uri
shareIntent.putExtra(Intent.EXTRA_STREAM, databaseUri);
// Start the share activity for result to delete the cache when done
activity.startActivityForResult(Intent.createChooser(shareIntent, "Share"), ACTIVITY_SHARE_FILE);
}
/**
* Copy an internal database to a shareable location and share
*/
private class ShareCopyTask extends AsyncTask<Object, Void, String> {
/**
* Share intent
*/
private Intent shareIntent;
/**
* Share copy dialog
*/
private ProgressDialog shareCopyDialog = null;
/**
* Cache file created
*/
private File cacheFile = null;
/**
* Constructor
*
* @param shareIntent
*/
ShareCopyTask(Intent shareIntent) {
this.shareIntent = shareIntent;
}
/**
* {@inheritDoc}
*/
@Override
protected void onPreExecute() {
shareCopyDialog = new ProgressDialog(activity);
shareCopyDialog
.setMessage("Preparing internal GeoPackage for sharing");
shareCopyDialog.setCancelable(false);
shareCopyDialog.setIndeterminate(true);
shareCopyDialog.setButton(ProgressDialog.BUTTON_NEGATIVE,
"Cancel",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
cancel(true);
}
});
shareCopyDialog.show();
}
/**
* {@inheritDoc}
*/
@Override
protected String doInBackground(Object... params) {
File databaseFile = (File) params[0];
String database = (String) params[1];
// Copy the database to cache
File cacheDirectory = getDatabaseCacheDirectory();
cacheDirectory.mkdir();
cacheFile = new File(cacheDirectory, database + "."
+ GeoPackageConstants.EXTENSION);
try {
GeoPackageIOUtils.copyFile(databaseFile, cacheFile);
} catch (IOException e) {
return e.getMessage();
}
return null;
}
/**
* {@inheritDoc}
*/
@Override
protected void onCancelled(String result) {
shareCopyDialog.dismiss();
deleteCachedDatabaseFiles();
}
/**
* {@inheritDoc}
*/
@Override
protected void onPostExecute(String result) {
shareCopyDialog.dismiss();
if (result != null) {
GeoPackageUtils.showMessage(activity,
"Share", result);
} else {
// Create the content Uri and add intent permissions
Uri databaseUri = FileProvider.getUriForFile(activity,
AUTHORITY,
cacheFile);
shareIntent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION);
launchShareIntent(shareIntent, databaseUri);
}
}
}
/**
* Saves a given file to disk
*/
private class SaveToDiskTask extends AsyncTask<Object, Void, String> {
/**
* save intent
*/
private Intent saveIntent;
/**
* Cache file created
*/
private File cacheFile = null;
/**
* Save dialog
*/
private ProgressDialog saveDialog = null;
/**
* Constructor
*
* @param saveIntent
*/
SaveToDiskTask(Intent saveIntent) {
this.saveIntent = saveIntent;
}
/**
* pre execute - show dialog
*/
@Override
protected void onPreExecute() {
saveDialog = new ProgressDialog(activity);
saveDialog
.setMessage("Saving GeoPackage to Downloads");
saveDialog.setCancelable(false);
saveDialog.setIndeterminate(true);
saveDialog.setButton(ProgressDialog.BUTTON_NEGATIVE,
"Cancel",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
cancel(true);
}
});
saveDialog.show();
}
/**
* Save file to the downloads directory
* @param params
* @return
*/
@Override
protected String doInBackground(Object... params) {
File downloadDir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS);
File databaseFile = (File) params[0];
String database = (String) params[1];
// Copy the database to cache
File cacheDirectory = getDatabaseCacheDirectory();
cacheDirectory.mkdir();
cacheFile = new File(downloadDir, database + "."
+ GeoPackageConstants.EXTENSION);
// If file already exists, add a number on the end to ensure we don't overwrite
int fileNumber = 0;
while(cacheFile.exists()){
fileNumber++;
cacheFile = new File(downloadDir, database + fileNumber + "."
+ GeoPackageConstants.EXTENSION);
}
try {
GeoPackageIOUtils.copyFile(databaseFile, cacheFile);
} catch (IOException e) {
return e.getMessage();
}
return null;
}
/**
* post execute - close dialog
*/
@Override
protected void onPostExecute(String result) {
saveDialog.dismiss();
if (result != null) {
GeoPackageUtils.showMessage(activity,
"Save", result);
}
Toast.makeText(activity, "GeoPackage saved to Downloads", Toast.LENGTH_SHORT).show();
deleteCachedDatabaseFiles();
}
}
/**
* Delete any cached database files
*/
private void deleteCachedDatabaseFiles() {
File databaseCache = getDatabaseCacheDirectory();
if (databaseCache.exists()) {
File[] cacheFiles = databaseCache.listFiles();
if (cacheFiles != null) {
for (File cacheFile : cacheFiles) {
cacheFile.delete();
}
}
databaseCache.delete();
}
}
public String getGeoPackageName() {
return geoPackageName;
}
public void setGeoPackageName(String geoPackageName) {
this.geoPackageName = geoPackageName;
}
}
| |
/*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.plugin.ij.annotator;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.intellij.lang.ASTNode;
import com.intellij.lang.annotation.Annotation;
import com.intellij.lang.annotation.AnnotationHolder;
import com.intellij.lang.annotation.Annotator;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.colors.TextAttributesKey;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiJavaToken;
import com.intellij.psi.PsiMethod;
import com.intellij.psi.PsiReference;
import com.intellij.psi.StubBasedPsiElement;
import com.intellij.psi.impl.java.stubs.JavaStubElementTypes;
import com.intellij.psi.impl.source.tree.LeafPsiElement;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PsiTreeUtil;
import gw.lang.parser.IParsedElement;
import gw.lang.parser.expressions.IBlockLiteralExpression;
import gw.lang.parser.expressions.INewExpression;
import gw.lang.reflect.gs.IGosuClass;
import gw.plugin.ij.highlighter.GosuHighlighterColors;
import gw.plugin.ij.lang.GosuTokenTypes;
import gw.plugin.ij.lang.parser.GosuElementTypes;
import gw.plugin.ij.lang.psi.api.auxilary.IGosuModifierList;
import gw.plugin.ij.lang.psi.impl.GosuBaseElementImpl;
import gw.plugin.ij.lang.psi.impl.GosuFragmentFileImpl;
import gw.plugin.ij.util.ExceptionUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
public class GosuSemanticAnnotator implements Annotator {
private static final Logger LOG = Logger.getInstance(GosuSemanticAnnotator.class);
@Override
public void annotate(@NotNull PsiElement element, @NotNull AnnotationHolder holder) {
if (PsiTreeUtil.getParentOfType(element, GosuFragmentFileImpl.class) != null) {
return;
}
if (element.getChildren().length != 0) {
return;
}
try {
final TextAttributesKey key = visitPsiElement(element);
if (key != null && key != GosuHighlighterColors.DEFAULT_ATTRKEY) {
final Annotation annotation = holder.createInfoAnnotation(element, null);
annotation.setTextAttributes(key);
}
} catch (ProcessCanceledException e) {
throw e;
} catch (Exception e) {
if (ExceptionUtil.isWrappedCanceled(e)) {
throw new ProcessCanceledException(e);
}
LOG.error(e);
} catch (AssertionError e) {
LOG.error(e);
}
}
@Nullable
public TextAttributesKey visitPsiElement(@NotNull PsiElement element) {
if (element instanceof PsiJavaToken) {
if (((PsiJavaToken) element).getTokenType() == GosuTokenTypes.TT_IDENTIFIER) {
if (NATIVE_TYPES.contains(element.getText())) {
return GosuHighlighterColors.KEYWORD;
}
if (element instanceof ASTNode) {
return deriveFromParent((ASTNode) element);
}
}
} else if (element instanceof LeafPsiElement) {
final IElementType type = ((LeafPsiElement) element).getElementType();
return COLOR_MAP.get(type);
}
return null;
}
@Nullable
private TextAttributesKey deriveFromParent(@NotNull ASTNode child) {
ASTNode node = child.getTreeParent();
if (node == null || node == child) {
return null;
}
IElementType type = node.getElementType();
TextAttributesKey key;
if (type == GosuElementTypes.ELEM_TYPE_IdentifierExpression ||
type == GosuElementTypes.ELEM_TYPE_BeanMethodCallExpression ||
type == GosuElementTypes.ELEM_TYPE_MemberAccess ||
type == GosuElementTypes.ELEM_TYPE_TypeLiteral) {
key = deriveFromReference(node);
} else {
key = COLOR_MAP.get(type);
if (key == null) {
ASTNode parent = node.getTreeParent();
if (parent != null) {
// todo may need to short circuit this
key = deriveFromParent(parent);
}
} else {
PsiElement psi = node.getPsi();
if (type == GosuElementTypes.CLASS_DEFINITION) {
if (isAbstractClass(psi)) {
key = GosuHighlighterColors.ABSTRACT_CLASS_NAME_ATTRKEY;
}
} else if (type == GosuElementTypes.FIELD) {
if (psi instanceof IGosuModifierList) {
if (((IGosuModifierList) psi).hasModifierProperty("static")) {
key = GosuHighlighterColors.STATIC_FIELD_ATTRKEY;
}
}
} else if (type == GosuElementTypes.METHOD_DEFINITION) {
if (psi instanceof PsiMethod && ((PsiMethod) psi).isConstructor()) {
key = GosuHighlighterColors.CONSTRUCTOR_DECLARATION_ATTRKEY;
} else if (psi instanceof IGosuModifierList) {
if (((IGosuModifierList) psi).hasModifierProperty("static")) {
key = GosuHighlighterColors.STATIC_METHOD_ATTRKEY;
}
}
}
}
}
return key;
}
@Nullable
private TextAttributesKey deriveFromReference(@NotNull ASTNode node) {
TextAttributesKey key = null;
PsiElement psi = node.getPsi();
if (psi instanceof PsiReference) {
PsiElement resolve = null;
try {
resolve = ((PsiReference) psi).resolve();
} catch (Exception ex) {
// ignore... probably a bug, but it'll be caught and logged by some other operation.
}
if (resolve instanceof PsiClass) {
// XXX better way to do this?
ASTNode parentNode = node.getTreeParent();
IElementType parentType = parentNode != null ? parentNode.getElementType() : null;
if (((PsiClass) resolve).isAnnotationType()) {
key = GosuHighlighterColors.ANNOTATION_NAME_ATTRKEY;
} else if (parentType == GosuElementTypes.ELEM_TYPE_NewExpression && !isAnonymousClassExpr(parentNode)) {
key = GosuHighlighterColors.CONSTRUCTOR_CALL_ATTRKEY;
} else if (isAbstractClass(resolve)) {
key = GosuHighlighterColors.ABSTRACT_CLASS_NAME_ATTRKEY;
} else if (((PsiClass) resolve).isInterface()) {
key = GosuHighlighterColors.INTERFACE_NAME_ATTRKEY;
}
} else if (resolve == null && psi instanceof GosuBaseElementImpl) {
// block literals show up as psi class refs in the tree, but can't be resolved so handle them here
IParsedElement pe = ((GosuBaseElementImpl) psi).getParsedElement();
if (pe instanceof IBlockLiteralExpression) {
key = GosuHighlighterColors.PARAMETER_ATTRKEY;
}
}
if (key == null) {
if (resolve != null) {
final ASTNode resolvedNode = resolve.getNode();
if (resolvedNode != null) {
// IntelliJ doesn't differentiate variable references from declarations
// If we want to, use _colorRef map here to enable reference distinction for Gosu
key = COLOR_FROM_REF_MAP.get(resolvedNode.getElementType());
} else if (resolve instanceof StubBasedPsiElement) {
key = COLOR_FROM_REF_MAP.get(((StubBasedPsiElement) resolve).getElementType());
}
} else {
key = deriveFromParent(node);
}
}
}
return key;
}
private boolean isAnonymousClassExpr(@NotNull ASTNode node) {
final PsiElement psi = node.getPsi();
if (psi instanceof GosuBaseElementImpl) {
final IParsedElement pe = ((GosuBaseElementImpl) psi).getParsedElement();
if (pe instanceof INewExpression && ((INewExpression) pe).isAnonymousClass()) {
return true;
}
}
return false;
}
private boolean isAbstractClass(@NotNull PsiElement psi) {
if (psi instanceof GosuBaseElementImpl) {
final IParsedElement pe = ((GosuBaseElementImpl) psi).getParsedElement();
if (pe != null) {
final IGosuClass klass = pe.getGosuClass();
if (klass != null && klass.isAbstract()) {
return true;
}
}
}
return false;
}
private static final Map<IElementType, TextAttributesKey> COLOR_MAP = new HashMap<>();
static {
// elements that control search flow
COLOR_MAP.put(GosuElementTypes.ELEM_TYPE_StatementList, GosuHighlighterColors.DEFAULT_ATTRKEY); // short circuit
COLOR_MAP.put(GosuElementTypes.ELEM_TYPE_NamespaceStatement, GosuHighlighterColors.DEFAULT_ATTRKEY); // todo use package qualifier?
COLOR_MAP.put(GosuElementTypes.ELEM_TYPE_NewExpression, GosuHighlighterColors.DEFAULT_ATTRKEY);
// elements with real data
COLOR_MAP.put(GosuElementTypes.ELEM_TYPE_ClassDeclaration, GosuHighlighterColors.CLASS_NAME_ATTRKEY);
COLOR_MAP.put(GosuElementTypes.CLASS_DEFINITION, GosuHighlighterColors.CLASS_NAME_ATTRKEY);
COLOR_MAP.put(GosuElementTypes.INTERFACE_DEFINITION, GosuHighlighterColors.INTERFACE_NAME_ATTRKEY);
COLOR_MAP.put(GosuElementTypes.ENUM_DEFINITION, GosuHighlighterColors.ENUM_NAME_ATTRKEY);
COLOR_MAP.put(GosuElementTypes.FIELD, GosuHighlighterColors.INSTANCE_FIELD_ATTRKEY);
COLOR_MAP.put(GosuElementTypes.ENUM_CONSTANT, GosuHighlighterColors.STATIC_FIELD_ATTRKEY);
COLOR_MAP.put(GosuElementTypes.ELEM_TYPE_VarStatement, GosuHighlighterColors.LOCAL_VARIABLE_ATTRKEY);
COLOR_MAP.put(GosuElementTypes.ELEM_TYPE_LocalVarDeclaration, GosuHighlighterColors.LOCAL_VARIABLE_ATTRKEY);
COLOR_MAP.put(GosuElementTypes.ELEM_TYPE_ParameterDeclaration, GosuHighlighterColors.PARAMETER_ATTRKEY);
COLOR_MAP.put(GosuElementTypes.METHOD_DEFINITION, GosuHighlighterColors.METHOD_DECLARATION_ATTRKEY);
COLOR_MAP.put(GosuElementTypes.ELEM_TYPE_MethodCallExpression, GosuHighlighterColors.METHOD_CALL_ATTRKEY);
COLOR_MAP.put(GosuElementTypes.ELEM_TYPE_TypeVariableDefinitionExpression, GosuHighlighterColors.TYPE_VARIABLE_ATTRKEY);
// !PW Argh, this is awful! Need keyword/operator/brace flag on GosuElementType
// keywords and symbols
COLOR_MAP.put(GosuElementTypes.TT_true, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_false, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_NaN, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_Infinity, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_and, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_or, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_not, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_null, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_length, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_exists, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_in, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_out, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_startswith, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_contains, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_where, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_find, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_var, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_delegate, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_represents, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_as, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_typeof, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_statictypeof, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_typeis, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_typeas, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_print, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_package, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_uses, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_if, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_else, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_except, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_unless, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_foreach, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_for, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_index, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_while, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_do, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_continue, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_break, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_return, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_construct, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_function, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_property, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_get, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_set, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_try, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_catch, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_finally, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_this, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_throw, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_assert, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_new, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_switch, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_case, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_default, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_eval, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_private, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_internal, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_protected, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_public, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_abstract, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_override, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_hide, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_final, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_static, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_extends, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_transient, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_implements, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_readonly, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_class, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_interface, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_annotation, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_structure, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_enum, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_super, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_outer, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_execution, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_request, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_session, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_application, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_void, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_block, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_enhancement, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_classpath, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_typeloader, GosuHighlighterColors.KEYWORD);
COLOR_MAP.put(GosuElementTypes.TT_using, GosuHighlighterColors.KEYWORD);
//
// Operators
//
COLOR_MAP.put(GosuElementTypes.TT_OP_assign, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_greater, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_less, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_not_logical, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_not_bitwise, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_question, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_colon, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_ternary, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_equals, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_less_equals, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_not_equals, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_not_equals_for_losers, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_logical_and, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_logical_or, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_increment, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_decrement, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_identity, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_not_identity, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_expansion, GosuHighlighterColors.OPERATOR);
// Arithmetic operators
COLOR_MAP.put(GosuElementTypes.TT_OP_plus, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_minus, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_multiply, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_divide, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_modulo, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_bitwise_and, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_bitwise_or, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_bitwise_xor, GosuHighlighterColors.OPERATOR);
// Null-safe arithmetic operators
COLOR_MAP.put(GosuElementTypes.TT_OP_nullsafe_plus, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_nullsafe_minus, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_nullsafe_multiply, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_nullsafe_divide, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_nullsafe_modulo, GosuHighlighterColors.OPERATOR);
// Unchecked overflow arithmetic operators for integers
COLOR_MAP.put(GosuElementTypes.TT_OP_unchecked_plus, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_unchecked_minus, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_unchecked_multiply, GosuHighlighterColors.OPERATOR);
// Compound operators
COLOR_MAP.put(GosuElementTypes.TT_OP_assign_plus, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_assign_minus, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_assign_multiply, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_assign_divide, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_assign_modulo, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_assign_and, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_assign_logical_and, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_assing_or, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_assing_logical_or, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_assign_xor, GosuHighlighterColors.OPERATOR);
// Block operators
COLOR_MAP.put(GosuElementTypes.TT_OP_escape, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_assign_closure, GosuHighlighterColors.OPERATOR);
// Member-access operators
COLOR_MAP.put(GosuElementTypes.TT_OP_dot, GosuHighlighterColors.DOT);
COLOR_MAP.put(GosuElementTypes.TT_OP_nullsafe_dot, GosuHighlighterColors.DOT);
// Null-safe array access
COLOR_MAP.put(GosuElementTypes.TT_OP_nullsafe_array_access, GosuHighlighterColors.OPERATOR);
// Interval operators
COLOR_MAP.put(GosuElementTypes.TT_OP_interval, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_interval_left_open, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_interval_right_open, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_interval_open, GosuHighlighterColors.OPERATOR);
// Feature Literals
COLOR_MAP.put(GosuElementTypes.TT_OP_feature_access, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_shift_left, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_shift_right, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_shift_right_unsigned, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_assign_shift_left, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_assign_shift_right, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_assign_shift_right_unsigned, GosuHighlighterColors.OPERATOR);
// Delimiters
COLOR_MAP.put(GosuElementTypes.TT_OP_brace_left, GosuHighlighterColors.BRACES);
COLOR_MAP.put(GosuElementTypes.TT_OP_brace_right, GosuHighlighterColors.BRACES);
COLOR_MAP.put(GosuElementTypes.TT_OP_paren_left, GosuHighlighterColors.PARENTHS);
COLOR_MAP.put(GosuElementTypes.TT_OP_paren_right, GosuHighlighterColors.PARENTHS);
COLOR_MAP.put(GosuElementTypes.TT_OP_bracket_left, GosuHighlighterColors.BRACKETS);
COLOR_MAP.put(GosuElementTypes.TT_OP_bracket_right, GosuHighlighterColors.BRACKETS);
COLOR_MAP.put(GosuElementTypes.TT_OP_quote_double, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_quote_single, GosuHighlighterColors.OPERATOR);
// Separators
COLOR_MAP.put(GosuElementTypes.TT_OP_at, GosuHighlighterColors.ANNOTATION_NAME_ATTRKEY);
COLOR_MAP.put(GosuElementTypes.TT_OP_dollar, GosuHighlighterColors.OPERATOR);
COLOR_MAP.put(GosuElementTypes.TT_OP_comma, GosuHighlighterColors.COMMA);
COLOR_MAP.put(GosuElementTypes.TT_OP_semicolon, GosuHighlighterColors.SEMICOLON);
}
// Lookup via referenced elements vs direct. Segregated if we ever want distinct colors for references (ala Eclipse)
private static final Map<IElementType, TextAttributesKey> COLOR_FROM_REF_MAP = new HashMap<>();
static {
// types
COLOR_FROM_REF_MAP.put(GosuElementTypes.CLASS_DEFINITION, GosuHighlighterColors.CLASS_NAME_ATTRKEY);
COLOR_FROM_REF_MAP.put(JavaStubElementTypes.CLASS, GosuHighlighterColors.CLASS_NAME_ATTRKEY);
COLOR_FROM_REF_MAP.put(GosuElementTypes.INTERFACE_DEFINITION, GosuHighlighterColors.INTERFACE_NAME_ATTRKEY);
COLOR_FROM_REF_MAP.put(GosuElementTypes.ENUM_DEFINITION, GosuHighlighterColors.ENUM_NAME_ATTRKEY);
// type variables
COLOR_FROM_REF_MAP.put(GosuElementTypes.ELEM_TYPE_TypeVariableDefinitionExpression, GosuHighlighterColors.TYPE_VARIABLE_ATTRKEY);
// methods
COLOR_FROM_REF_MAP.put(GosuElementTypes.METHOD_DEFINITION, GosuHighlighterColors.METHOD_CALL_ATTRKEY);
COLOR_FROM_REF_MAP.put(JavaStubElementTypes.METHOD, GosuHighlighterColors.METHOD_CALL_ATTRKEY);
// parameters
COLOR_FROM_REF_MAP.put(GosuElementTypes.ELEM_TYPE_ParameterDeclaration, GosuHighlighterColors.PARAMETER_ATTRKEY);
COLOR_FROM_REF_MAP.put(JavaStubElementTypes.PARAMETER, GosuHighlighterColors.PARAMETER_ATTRKEY);
// local variables
COLOR_FROM_REF_MAP.put(GosuElementTypes.ELEM_TYPE_VarStatement, GosuHighlighterColors.LOCAL_VARIABLE_ATTRKEY);
COLOR_FROM_REF_MAP.put(GosuElementTypes.ELEM_TYPE_LocalVarDeclaration, GosuHighlighterColors.LOCAL_VARIABLE_ATTRKEY);
// fields
COLOR_FROM_REF_MAP.put(GosuElementTypes.FIELD, GosuHighlighterColors.INSTANCE_FIELD_ATTRKEY);
COLOR_FROM_REF_MAP.put(JavaStubElementTypes.FIELD, GosuHighlighterColors.INSTANCE_FIELD_ATTRKEY);
// static fields
COLOR_FROM_REF_MAP.put(GosuElementTypes.ENUM_CONSTANT, GosuHighlighterColors.STATIC_FIELD_ATTRKEY);
COLOR_FROM_REF_MAP.put(JavaStubElementTypes.ENUM_CONSTANT, GosuHighlighterColors.STATIC_FIELD_ATTRKEY);
// properties
}
private static final Map<IElementType, TextAttributesKey> COLOR_FOR_REF_MAP = Maps.newHashMap();
static {
COLOR_FOR_REF_MAP.put(GosuElementTypes.ELEM_TYPE_ParameterDeclaration, GosuHighlighterColors.PARAMETER_ATTRKEY);
COLOR_FOR_REF_MAP.put(GosuElementTypes.ELEM_TYPE_VarStatement, GosuHighlighterColors.LOCAL_VARIABLE_ATTRKEY);
}
// TODO: use Keyword class here (or Guava immutable set)
private static final Set<String> GOSU_KEYWORDS = ImmutableSet.of(
"abstract", "and", "application", "as", "block",
"break", "case", "catch", "class", "classpath", "construct", "contains", "continue",
"default", "delegate", "do", "else", "enhancement", "enum", "eval", "except", "execution",
"exists", "extends", "false", "final", "finally", "find", "for", "foreach", "function",
"get", "hide", "if", "implements", "in", "out", "index", "interface", "internal", "length", "new",
"not", "null", "or", "outer", "override", "package", "private", "property", "protected",
"public", "readonly", "represents", "request",
"return",
"session", "set", "startswith", "static", "statictypeof", "super", "switch", "this", "throw",
"transient", "true", "try", "typeas", "typeis", "typeof", "unless", "uses", "using", "var",
"void", "where", "while", "Infinity", "NaN");
private static final Set<String> NATIVE_TYPES = ImmutableSet.of(
"boolean", "byte", "char", "double", "float",
"int", "long", "short", "strictfp", "void");
private static final Set<String> NATIVE_CONSTANTS = ImmutableSet.of(
"true", "false", "null");
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.net;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import io.netty.channel.WriteBufferWaterMark;
import org.apache.cassandra.auth.IInternodeAuthenticator;
import org.apache.cassandra.config.Config;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.config.EncryptionOptions;
import org.apache.cassandra.config.EncryptionOptions.ServerEncryptionOptions;
import org.apache.cassandra.db.SystemKeyspace;
import org.apache.cassandra.locator.IEndpointSnitch;
import org.apache.cassandra.locator.InetAddressAndPort;
import org.apache.cassandra.utils.FBUtilities;
import static org.apache.cassandra.config.DatabaseDescriptor.getEndpointSnitch;
import static org.apache.cassandra.net.MessagingService.VERSION_40;
import static org.apache.cassandra.net.MessagingService.instance;
import static org.apache.cassandra.net.SocketFactory.encryptionLogStatement;
import static org.apache.cassandra.utils.FBUtilities.getBroadcastAddressAndPort;
/**
* A collection of settings to be passed around for outbound connections.
*/
@SuppressWarnings({ "WeakerAccess", "unused" })
public class OutboundConnectionSettings
{
private static final String INTRADC_TCP_NODELAY_PROPERTY = Config.PROPERTY_PREFIX + "otc_intradc_tcp_nodelay";
/**
* Enabled/disable TCP_NODELAY for intradc connections. Defaults to enabled.
*/
private static final boolean INTRADC_TCP_NODELAY = Boolean.parseBoolean(System.getProperty(INTRADC_TCP_NODELAY_PROPERTY, "true"));
public enum Framing
{
// for < VERSION_40, implies no framing
// for >= VERSION_40, uses simple unprotected frames with header crc but no payload protection
UNPROTECTED(0),
// for < VERSION_40, uses the jpountz framing format
// for >= VERSION_40, uses our framing format with header crc24
LZ4(1),
// for < VERSION_40, implies UNPROTECTED
// for >= VERSION_40, uses simple frames with separate header and payload crc
CRC(2);
public static Framing forId(int id)
{
switch (id)
{
case 0: return UNPROTECTED;
case 1: return LZ4;
case 2: return CRC;
}
throw new IllegalStateException();
}
final int id;
Framing(int id)
{
this.id = id;
}
}
public final IInternodeAuthenticator authenticator;
public final InetAddressAndPort to;
public final InetAddressAndPort connectTo; // may be represented by a different IP address on this node's local network
public final EncryptionOptions encryption;
public final Framing framing;
public final Integer socketSendBufferSizeInBytes;
public final Integer applicationSendQueueCapacityInBytes;
public final Integer applicationSendQueueReserveEndpointCapacityInBytes;
public final ResourceLimits.Limit applicationSendQueueReserveGlobalCapacityInBytes;
public final Boolean tcpNoDelay;
public final int flushLowWaterMark, flushHighWaterMark;
public final Integer tcpConnectTimeoutInMS;
public final Integer tcpUserTimeoutInMS;
public final AcceptVersions acceptVersions;
public final InetAddressAndPort from;
public final SocketFactory socketFactory;
public final OutboundMessageCallbacks callbacks;
public final OutboundDebugCallbacks debug;
public final EndpointMessagingVersions endpointToVersion;
public OutboundConnectionSettings(InetAddressAndPort to)
{
this(to, null);
}
public OutboundConnectionSettings(InetAddressAndPort to, InetAddressAndPort preferred)
{
this(null, to, preferred, null, null, null, null, null, null, null, 1 << 15, 1 << 16, null, null, null, null, null, null, null, null);
}
private OutboundConnectionSettings(IInternodeAuthenticator authenticator,
InetAddressAndPort to,
InetAddressAndPort connectTo,
EncryptionOptions encryption,
Framing framing,
Integer socketSendBufferSizeInBytes,
Integer applicationSendQueueCapacityInBytes,
Integer applicationSendQueueReserveEndpointCapacityInBytes,
ResourceLimits.Limit applicationSendQueueReserveGlobalCapacityInBytes,
Boolean tcpNoDelay,
int flushLowWaterMark,
int flushHighWaterMark,
Integer tcpConnectTimeoutInMS,
Integer tcpUserTimeoutInMS,
AcceptVersions acceptVersions,
InetAddressAndPort from,
SocketFactory socketFactory,
OutboundMessageCallbacks callbacks,
OutboundDebugCallbacks debug,
EndpointMessagingVersions endpointToVersion)
{
Preconditions.checkArgument(socketSendBufferSizeInBytes == null || socketSendBufferSizeInBytes == 0 || socketSendBufferSizeInBytes >= 1 << 10, "illegal socket send buffer size: " + socketSendBufferSizeInBytes);
Preconditions.checkArgument(applicationSendQueueCapacityInBytes == null || applicationSendQueueCapacityInBytes >= 1 << 10, "illegal application send queue capacity: " + applicationSendQueueCapacityInBytes);
Preconditions.checkArgument(tcpUserTimeoutInMS == null || tcpUserTimeoutInMS >= 0, "tcp user timeout must be non negative: " + tcpUserTimeoutInMS);
Preconditions.checkArgument(tcpConnectTimeoutInMS == null || tcpConnectTimeoutInMS > 0, "tcp connect timeout must be positive: " + tcpConnectTimeoutInMS);
this.authenticator = authenticator;
this.to = to;
this.connectTo = connectTo;
this.encryption = encryption;
this.framing = framing;
this.socketSendBufferSizeInBytes = socketSendBufferSizeInBytes;
this.applicationSendQueueCapacityInBytes = applicationSendQueueCapacityInBytes;
this.applicationSendQueueReserveEndpointCapacityInBytes = applicationSendQueueReserveEndpointCapacityInBytes;
this.applicationSendQueueReserveGlobalCapacityInBytes = applicationSendQueueReserveGlobalCapacityInBytes;
this.tcpNoDelay = tcpNoDelay;
this.flushLowWaterMark = flushLowWaterMark;
this.flushHighWaterMark = flushHighWaterMark;
this.tcpConnectTimeoutInMS = tcpConnectTimeoutInMS;
this.tcpUserTimeoutInMS = tcpUserTimeoutInMS;
this.acceptVersions = acceptVersions;
this.from = from;
this.socketFactory = socketFactory;
this.callbacks = callbacks;
this.debug = debug;
this.endpointToVersion = endpointToVersion;
}
public boolean authenticate()
{
return authenticator.authenticate(to.address, to.port);
}
public boolean withEncryption()
{
return encryption != null;
}
public String toString()
{
return String.format("peer: (%s, %s), framing: %s, encryption: %s",
to, connectTo, framing, encryptionLogStatement(encryption));
}
public OutboundConnectionSettings withAuthenticator(IInternodeAuthenticator authenticator)
{
return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing,
socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes,
applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes,
tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS,
tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion);
}
@SuppressWarnings("unused")
public OutboundConnectionSettings toEndpoint(InetAddressAndPort endpoint)
{
return new OutboundConnectionSettings(authenticator, endpoint, connectTo, encryption, framing,
socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes,
applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes,
tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS,
tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion);
}
public OutboundConnectionSettings withConnectTo(InetAddressAndPort connectTo)
{
return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing,
socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes,
applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes,
tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS,
tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion);
}
public OutboundConnectionSettings withEncryption(ServerEncryptionOptions encryption)
{
return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing,
socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes,
applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes,
tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS,
tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion);
}
@SuppressWarnings("unused")
public OutboundConnectionSettings withFraming(Framing framing)
{
return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing, socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes,
applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes,
tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS,
tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion);
}
public OutboundConnectionSettings withSocketSendBufferSizeInBytes(int socketSendBufferSizeInBytes)
{
return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing,
socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes,
applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes,
tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS,
tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion);
}
@SuppressWarnings("unused")
public OutboundConnectionSettings withApplicationSendQueueCapacityInBytes(int applicationSendQueueCapacityInBytes)
{
return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing,
socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes,
applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes,
tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS,
tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion);
}
public OutboundConnectionSettings withApplicationReserveSendQueueCapacityInBytes(Integer applicationReserveSendQueueEndpointCapacityInBytes, ResourceLimits.Limit applicationReserveSendQueueGlobalCapacityInBytes)
{
return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing,
socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes,
applicationReserveSendQueueEndpointCapacityInBytes, applicationReserveSendQueueGlobalCapacityInBytes,
tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS,
tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion);
}
@SuppressWarnings("unused")
public OutboundConnectionSettings withTcpNoDelay(boolean tcpNoDelay)
{
return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing,
socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes,
applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes,
tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS,
tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion);
}
@SuppressWarnings("unused")
public OutboundConnectionSettings withNettyBufferBounds(WriteBufferWaterMark nettyBufferBounds)
{
return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing,
socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes,
applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes,
tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS,
tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion);
}
public OutboundConnectionSettings withTcpConnectTimeoutInMS(int tcpConnectTimeoutInMS)
{
return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing,
socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes,
applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes,
tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS,
tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion);
}
public OutboundConnectionSettings withTcpUserTimeoutInMS(int tcpUserTimeoutInMS)
{
return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing,
socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes,
applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes,
tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS,
tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion);
}
public OutboundConnectionSettings withAcceptVersions(AcceptVersions acceptVersions)
{
return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing,
socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes,
applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes,
tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS,
tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion);
}
public OutboundConnectionSettings withFrom(InetAddressAndPort from)
{
return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing,
socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes,
applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes,
tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS,
tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion);
}
public OutboundConnectionSettings withSocketFactory(SocketFactory socketFactory)
{
return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing,
socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes,
applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes,
tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS,
tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion);
}
public OutboundConnectionSettings withCallbacks(OutboundMessageCallbacks callbacks)
{
return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing,
socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes,
applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes,
tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS,
tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion);
}
public OutboundConnectionSettings withDebugCallbacks(OutboundDebugCallbacks debug)
{
return new OutboundConnectionSettings(authenticator, to, connectTo, encryption, framing,
socketSendBufferSizeInBytes, applicationSendQueueCapacityInBytes,
applicationSendQueueReserveEndpointCapacityInBytes, applicationSendQueueReserveGlobalCapacityInBytes,
tcpNoDelay, flushLowWaterMark, flushHighWaterMark, tcpConnectTimeoutInMS,
tcpUserTimeoutInMS, acceptVersions, from, socketFactory, callbacks, debug, endpointToVersion);
}
public OutboundConnectionSettings withDefaultReserveLimits()
{
Integer applicationReserveSendQueueEndpointCapacityInBytes = this.applicationSendQueueReserveEndpointCapacityInBytes;
ResourceLimits.Limit applicationReserveSendQueueGlobalCapacityInBytes = this.applicationSendQueueReserveGlobalCapacityInBytes;
if (applicationReserveSendQueueEndpointCapacityInBytes == null)
applicationReserveSendQueueEndpointCapacityInBytes = DatabaseDescriptor.getInternodeApplicationSendQueueReserveEndpointCapacityInBytes();
if (applicationReserveSendQueueGlobalCapacityInBytes == null)
applicationReserveSendQueueGlobalCapacityInBytes = MessagingService.instance().outboundGlobalReserveLimit;
return withApplicationReserveSendQueueCapacityInBytes(applicationReserveSendQueueEndpointCapacityInBytes, applicationReserveSendQueueGlobalCapacityInBytes);
}
public IInternodeAuthenticator authenticator()
{
return authenticator != null ? authenticator : DatabaseDescriptor.getInternodeAuthenticator();
}
public EndpointMessagingVersions endpointToVersion()
{
if (endpointToVersion == null)
return instance().versions;
return endpointToVersion;
}
public InetAddressAndPort from()
{
return from != null ? from : FBUtilities.getBroadcastAddressAndPort();
}
public OutboundDebugCallbacks debug()
{
return debug != null ? debug : OutboundDebugCallbacks.NONE;
}
public EncryptionOptions encryption()
{
return encryption != null ? encryption : defaultEncryptionOptions(to);
}
public SocketFactory socketFactory()
{
return socketFactory != null ? socketFactory : instance().socketFactory;
}
public OutboundMessageCallbacks callbacks()
{
return callbacks != null ? callbacks : instance().callbacks;
}
public int socketSendBufferSizeInBytes()
{
return socketSendBufferSizeInBytes != null ? socketSendBufferSizeInBytes
: DatabaseDescriptor.getInternodeSocketSendBufferSizeInBytes();
}
public int applicationSendQueueCapacityInBytes()
{
return applicationSendQueueCapacityInBytes != null ? applicationSendQueueCapacityInBytes
: DatabaseDescriptor.getInternodeApplicationSendQueueCapacityInBytes();
}
public ResourceLimits.Limit applicationSendQueueReserveGlobalCapacityInBytes()
{
return applicationSendQueueReserveGlobalCapacityInBytes != null ? applicationSendQueueReserveGlobalCapacityInBytes
: instance().outboundGlobalReserveLimit;
}
public int applicationSendQueueReserveEndpointCapacityInBytes()
{
return applicationSendQueueReserveEndpointCapacityInBytes != null ? applicationSendQueueReserveEndpointCapacityInBytes
: DatabaseDescriptor.getInternodeApplicationReceiveQueueReserveEndpointCapacityInBytes();
}
public int tcpConnectTimeoutInMS()
{
return tcpConnectTimeoutInMS != null ? tcpConnectTimeoutInMS
: DatabaseDescriptor.getInternodeTcpConnectTimeoutInMS();
}
public int tcpUserTimeoutInMS()
{
return tcpUserTimeoutInMS != null ? tcpUserTimeoutInMS
: DatabaseDescriptor.getInternodeTcpUserTimeoutInMS();
}
public boolean tcpNoDelay()
{
if (tcpNoDelay != null)
return tcpNoDelay;
if (isInLocalDC(getEndpointSnitch(), getBroadcastAddressAndPort(), to))
return INTRADC_TCP_NODELAY;
return DatabaseDescriptor.getInterDCTcpNoDelay();
}
public AcceptVersions acceptVersions(ConnectionCategory category)
{
return acceptVersions != null ? acceptVersions
: category.isStreaming()
? MessagingService.accept_streaming
: MessagingService.accept_messaging;
}
public OutboundConnectionSettings withLegacyPortIfNecessary(int messagingVersion)
{
return withConnectTo(maybeWithSecurePort(connectTo(), messagingVersion, withEncryption()));
}
public InetAddressAndPort connectTo()
{
InetAddressAndPort connectTo = this.connectTo;
if (connectTo == null)
connectTo = SystemKeyspace.getPreferredIP(to);
return connectTo;
}
public Framing framing(ConnectionCategory category)
{
if (framing != null)
return framing;
if (category.isStreaming())
return Framing.UNPROTECTED;
return shouldCompressConnection(getEndpointSnitch(), getBroadcastAddressAndPort(), to)
? Framing.LZ4 : Framing.CRC;
}
// note that connectTo is updated even if specified, in the case of pre40 messaging and using encryption (to update port)
public OutboundConnectionSettings withDefaults(ConnectionCategory category)
{
if (to == null)
throw new IllegalArgumentException();
return new OutboundConnectionSettings(authenticator(), to, connectTo(),
encryption(), framing(category),
socketSendBufferSizeInBytes(), applicationSendQueueCapacityInBytes(),
applicationSendQueueReserveEndpointCapacityInBytes(),
applicationSendQueueReserveGlobalCapacityInBytes(),
tcpNoDelay(), flushLowWaterMark, flushHighWaterMark,
tcpConnectTimeoutInMS(), tcpUserTimeoutInMS(), acceptVersions(category),
from(), socketFactory(), callbacks(), debug(), endpointToVersion());
}
private static boolean isInLocalDC(IEndpointSnitch snitch, InetAddressAndPort localHost, InetAddressAndPort remoteHost)
{
String remoteDC = snitch.getDatacenter(remoteHost);
String localDC = snitch.getDatacenter(localHost);
return remoteDC != null && remoteDC.equals(localDC);
}
@VisibleForTesting
static EncryptionOptions defaultEncryptionOptions(InetAddressAndPort endpoint)
{
ServerEncryptionOptions options = DatabaseDescriptor.getInternodeMessagingEncyptionOptions();
return options.shouldEncrypt(endpoint) ? options : null;
}
@VisibleForTesting
static boolean shouldCompressConnection(IEndpointSnitch snitch, InetAddressAndPort localHost, InetAddressAndPort remoteHost)
{
return (DatabaseDescriptor.internodeCompression() == Config.InternodeCompression.all)
|| ((DatabaseDescriptor.internodeCompression() == Config.InternodeCompression.dc) && !isInLocalDC(snitch, localHost, remoteHost));
}
private static InetAddressAndPort maybeWithSecurePort(InetAddressAndPort address, int messagingVersion, boolean isEncrypted)
{
if (!isEncrypted || messagingVersion >= VERSION_40)
return address;
// if we don't know the version of the peer, assume it is 4.0 (or higher) as the only time is would be lower
// (as in a 3.x version) is during a cluster upgrade (from 3.x to 4.0). In that case the outbound connection will
// unfortunately fail - however the peer should connect to this node (at some point), and once we learn it's version, it'll be
// in versions map. thus, when we attempt to reconnect to that node, we'll have the version and we can get the correct port.
// we will be able to remove this logic at 5.0.
// Also as of 4.0 we will propagate the "regular" port (which will support both SSL and non-SSL) via gossip so
// for SSL and version 4.0 always connect to the gossiped port because if SSL is enabled it should ALWAYS
// listen for SSL on the "regular" port.
return address.withPort(DatabaseDescriptor.getSSLStoragePort());
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql;
import com.facebook.presto.sql.planner.DeterminismEvaluator;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.SymbolsExtractor;
import com.facebook.presto.sql.tree.ComparisonExpression;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.ExpressionRewriter;
import com.facebook.presto.sql.tree.ExpressionTreeRewriter;
import com.facebook.presto.sql.tree.Identifier;
import com.facebook.presto.sql.tree.IsNullPredicate;
import com.facebook.presto.sql.tree.LambdaExpression;
import com.facebook.presto.sql.tree.LogicalBinaryExpression;
import com.facebook.presto.sql.tree.NotExpression;
import com.facebook.presto.sql.tree.SymbolReference;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import java.util.ArrayDeque;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Queue;
import java.util.Set;
import java.util.function.Function;
import java.util.function.Predicate;
import static com.facebook.presto.sql.tree.BooleanLiteral.FALSE_LITERAL;
import static com.facebook.presto.sql.tree.BooleanLiteral.TRUE_LITERAL;
import static com.facebook.presto.sql.tree.ComparisonExpressionType.IS_DISTINCT_FROM;
import static com.google.common.base.Predicates.not;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toList;
public final class ExpressionUtils
{
private ExpressionUtils() {}
public static List<Expression> extractConjuncts(Expression expression)
{
return extractPredicates(LogicalBinaryExpression.Type.AND, expression);
}
public static List<Expression> extractDisjuncts(Expression expression)
{
return extractPredicates(LogicalBinaryExpression.Type.OR, expression);
}
public static List<Expression> extractPredicates(LogicalBinaryExpression expression)
{
return extractPredicates(expression.getType(), expression);
}
public static List<Expression> extractPredicates(LogicalBinaryExpression.Type type, Expression expression)
{
if (expression instanceof LogicalBinaryExpression && ((LogicalBinaryExpression) expression).getType() == type) {
LogicalBinaryExpression logicalBinaryExpression = (LogicalBinaryExpression) expression;
return ImmutableList.<Expression>builder()
.addAll(extractPredicates(type, logicalBinaryExpression.getLeft()))
.addAll(extractPredicates(type, logicalBinaryExpression.getRight()))
.build();
}
return ImmutableList.of(expression);
}
public static Expression and(Expression... expressions)
{
return and(Arrays.asList(expressions));
}
public static Expression and(Collection<Expression> expressions)
{
return binaryExpression(LogicalBinaryExpression.Type.AND, expressions);
}
public static Expression or(Expression... expressions)
{
return or(Arrays.asList(expressions));
}
public static Expression or(Collection<Expression> expressions)
{
return binaryExpression(LogicalBinaryExpression.Type.OR, expressions);
}
public static Expression binaryExpression(LogicalBinaryExpression.Type type, Collection<Expression> expressions)
{
requireNonNull(type, "type is null");
requireNonNull(expressions, "expressions is null");
Preconditions.checkArgument(!expressions.isEmpty(), "expressions is empty");
// Build balanced tree for efficient recursive processing that
// preserves the evaluation order of the input expressions.
//
// The tree is built bottom up by combining pairs of elements into
// binary AND expressions.
//
// Example:
//
// Initial state:
// a b c d e
//
// First iteration:
//
// /\ /\ e
// a b c d
//
// Second iteration:
//
// / \ e
// /\ /\
// a b c d
//
//
// Last iteration:
//
// / \
// / \ e
// /\ /\
// a b c d
Queue<Expression> queue = new ArrayDeque<>(expressions);
while (queue.size() > 1) {
Queue<Expression> buffer = new ArrayDeque<>();
// combine pairs of elements
while (queue.size() >= 2) {
buffer.add(new LogicalBinaryExpression(type, queue.remove(), queue.remove()));
}
// if there's and odd number of elements, just append the last one
if (!queue.isEmpty()) {
buffer.add(queue.remove());
}
// continue processing the pairs that were just built
queue = buffer;
}
return queue.remove();
}
public static Expression combinePredicates(LogicalBinaryExpression.Type type, Expression... expressions)
{
return combinePredicates(type, Arrays.asList(expressions));
}
public static Expression combinePredicates(LogicalBinaryExpression.Type type, Collection<Expression> expressions)
{
if (type == LogicalBinaryExpression.Type.AND) {
return combineConjuncts(expressions);
}
return combineDisjuncts(expressions);
}
public static Expression combineConjuncts(Expression... expressions)
{
return combineConjuncts(Arrays.asList(expressions));
}
public static Expression combineConjuncts(Collection<Expression> expressions)
{
return combineConjunctsWithDefault(expressions, TRUE_LITERAL);
}
public static Expression combineConjunctsWithDefault(Collection<Expression> expressions, Expression emptyDefault)
{
requireNonNull(expressions, "expressions is null");
List<Expression> conjuncts = expressions.stream()
.flatMap(e -> ExpressionUtils.extractConjuncts(e).stream())
.filter(e -> !e.equals(TRUE_LITERAL))
.collect(toList());
conjuncts = removeDuplicates(conjuncts);
if (conjuncts.contains(FALSE_LITERAL)) {
return FALSE_LITERAL;
}
return conjuncts.isEmpty() ? emptyDefault : and(conjuncts);
}
public static Expression combineDisjuncts(Expression... expressions)
{
return combineDisjuncts(Arrays.asList(expressions));
}
public static Expression combineDisjuncts(Collection<Expression> expressions)
{
return combineDisjunctsWithDefault(expressions, FALSE_LITERAL);
}
public static Expression combineDisjunctsWithDefault(Collection<Expression> expressions, Expression emptyDefault)
{
requireNonNull(expressions, "expressions is null");
List<Expression> disjuncts = expressions.stream()
.flatMap(e -> ExpressionUtils.extractDisjuncts(e).stream())
.filter(e -> !e.equals(FALSE_LITERAL))
.collect(toList());
disjuncts = removeDuplicates(disjuncts);
if (disjuncts.contains(TRUE_LITERAL)) {
return TRUE_LITERAL;
}
return disjuncts.isEmpty() ? emptyDefault : or(disjuncts);
}
public static Expression filterDeterministicConjuncts(Expression expression)
{
return filterConjuncts(expression, DeterminismEvaluator::isDeterministic);
}
public static Expression filterNonDeterministicConjuncts(Expression expression)
{
return filterConjuncts(expression, not(DeterminismEvaluator::isDeterministic));
}
public static Expression filterConjuncts(Expression expression, Predicate<Expression> predicate)
{
List<Expression> conjuncts = extractConjuncts(expression).stream()
.filter(predicate)
.collect(toList());
return combineConjuncts(conjuncts);
}
public static boolean referencesAny(Expression expression, Collection<Symbol> variables)
{
Set<Symbol> references = SymbolsExtractor.extractUnique(expression);
return variables.stream().anyMatch(references::contains);
}
public static Function<Expression, Expression> expressionOrNullSymbols(final Predicate<Symbol>... nullSymbolScopes)
{
return expression -> {
ImmutableList.Builder<Expression> resultDisjunct = ImmutableList.builder();
resultDisjunct.add(expression);
for (Predicate<Symbol> nullSymbolScope : nullSymbolScopes) {
List<Symbol> symbols = SymbolsExtractor.extractUnique(expression).stream()
.filter(nullSymbolScope)
.collect(toImmutableList());
if (Iterables.isEmpty(symbols)) {
continue;
}
ImmutableList.Builder<Expression> nullConjuncts = ImmutableList.builder();
for (Symbol symbol : symbols) {
nullConjuncts.add(new IsNullPredicate(symbol.toSymbolReference()));
}
resultDisjunct.add(and(nullConjuncts.build()));
}
return or(resultDisjunct.build());
};
}
/**
* Removes duplicate deterministic expressions. Preserves the relative order
* of the expressions in the list.
*/
private static List<Expression> removeDuplicates(List<Expression> expressions)
{
Set<Expression> seen = new HashSet<>();
ImmutableList.Builder<Expression> result = ImmutableList.builder();
for (Expression expression : expressions) {
if (!DeterminismEvaluator.isDeterministic(expression)) {
result.add(expression);
}
else if (!seen.contains(expression)) {
result.add(expression);
seen.add(expression);
}
}
return result.build();
}
public static Expression normalize(Expression expression)
{
if (expression instanceof NotExpression) {
NotExpression not = (NotExpression) expression;
if (not.getValue() instanceof ComparisonExpression && ((ComparisonExpression) not.getValue()).getType() != IS_DISTINCT_FROM) {
ComparisonExpression comparison = (ComparisonExpression) not.getValue();
return new ComparisonExpression(comparison.getType().negate(), comparison.getLeft(), comparison.getRight());
}
if (not.getValue() instanceof NotExpression) {
return normalize(((NotExpression) not.getValue()).getValue());
}
}
return expression;
}
public static Expression rewriteIdentifiersToSymbolReferences(Expression expression)
{
return ExpressionTreeRewriter.rewriteWith(new ExpressionRewriter<Void>()
{
@Override
public Expression rewriteIdentifier(Identifier node, Void context, ExpressionTreeRewriter<Void> treeRewriter)
{
return new SymbolReference(node.getValue());
}
@Override
public Expression rewriteLambdaExpression(LambdaExpression node, Void context, ExpressionTreeRewriter<Void> treeRewriter)
{
return new LambdaExpression(node.getArguments(), treeRewriter.rewrite(node.getBody(), context));
}
}, expression);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.scripting;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.impl.PigContext;
import org.apache.pig.tools.pigstats.PigStats;
/**
* Base class for various scripting implementations
*/
public abstract class ScriptEngine {
public static enum SupportedScriptLang {
// possibly jruby in the future
jruby(new String[]{"ruby", "jruby"}, new String[]{"rb"}, "org.apache.pig.scripting.jruby.JrubyScriptEngine"),
jython(new String[]{"python", "jython"}, new String[]{"py"}, "org.apache.pig.scripting.jython.JythonScriptEngine"),
javascript(new String[]{}, new String[]{"js"}, "org.apache.pig.scripting.js.JsScriptEngine");
private static Set<String> supportedScriptLangs;
static {
supportedScriptLangs = new HashSet<String>();
for (SupportedScriptLang value : SupportedScriptLang.values()) {
supportedScriptLangs.add(value.name());
}
supportedScriptLangs = Collections.unmodifiableSet(supportedScriptLangs);
}
public static boolean contains(String supportedScriptLang) {
return supportedScriptLangs.contains(supportedScriptLang);
}
private String[] shebangs;
private String[] extensions;
/** Class implementing the engine. As a string as dependencies are possibly not on the class path*/
private String engineClassName;
private SupportedScriptLang(String[] shebangs, String[] extensions, String engineClassName) {
this.shebangs = shebangs;
this.extensions = extensions;
this.engineClassName = engineClassName;
}
/**
* If other discovery mechanisms come up they can also override accepts()
* @param file the path of the file
* @param firstLine The first line of the file (possibly containing #!...)
*/
public boolean accepts(String file, String firstLine) {
if( firstLine == null )
return false;
for (String shebang : shebangs) {
Pattern p = Pattern.compile("^#!.*/" + shebang + "\\s*$");
if (p.matcher(firstLine).matches()) {
return true;
}
}
for (String ext : extensions) {
if (file.endsWith("."+ext)) {
return true;
}
}
return false;
}
public String getEngineClassName() {
return engineClassName;
}
}
private static final Pattern shebangPattern = Pattern.compile("^#!.+");
private static boolean declaresShebang(String firstLine) {
if( firstLine == null )
return false;
return shebangPattern.matcher(firstLine).matches();
}
/**
* open a stream load a script locally or in the classpath
* @param scriptPath the path of the script
* @return a stream (it is the responsibility of the caller to close it)
* @throws IllegalStateException if we could not open a stream
*/
public static InputStream getScriptAsStream(String scriptPath) {
//protected static InputStream getScriptAsStream(String scriptPath) {
InputStream is = null;
File file = new File(scriptPath);
if (file.exists()) {
try {
is = new FileInputStream(file);
} catch (FileNotFoundException e) {
throw new IllegalStateException("could not find existing file "+scriptPath, e);
}
} else {
// Try system, current and context classloader.
is = ScriptEngine.class.getResourceAsStream(scriptPath);
if (is == null) {
is = getResourceUsingClassLoader(scriptPath, ScriptEngine.class.getClassLoader());
}
if (is == null) {
is = getResourceUsingClassLoader(scriptPath, Thread.currentThread().getContextClassLoader());
}
if (is == null && !file.isAbsolute()) {
String path = "/" + scriptPath;
is = ScriptEngine.class.getResourceAsStream(path);
if (is == null) {
is = getResourceUsingClassLoader(path, ScriptEngine.class.getClassLoader());
}
if (is == null) {
is = getResourceUsingClassLoader(path, Thread.currentThread().getContextClassLoader());
}
}
}
// TODO: discuss if we want to add logic here to load a script from HDFS
if (is == null) {
throw new IllegalStateException(
"Could not initialize interpreter (from file system or classpath) with " + scriptPath);
}
return is;
}
private static InputStream getResourceUsingClassLoader(String fullFilename, ClassLoader loader) {
if (loader != null) {
return loader.getResourceAsStream(fullFilename);
}
return null;
}
public static final String NAMESPACE_SEPARATOR = ".";
/**
* @param file the file to inspect
* @return the Supported Script Lang if this is a supported script language
* @throws IOException if there was an error reading the file or if the file defines explicitly an unknown #!
*/
public static SupportedScriptLang getSupportedScriptLang(String file) throws IOException {
BufferedReader br = new BufferedReader(new FileReader(file));
String firstLine;
try {
firstLine = br.readLine();
} finally {
br.close();
}
for (SupportedScriptLang supportedScriptLang : SupportedScriptLang.values()) {
if (supportedScriptLang.accepts(file, firstLine)) {
return supportedScriptLang;
}
}
if (declaresShebang(firstLine)) {
throw new IOException("Unsupported script type is specified: " + firstLine);
}
return null;
}
private Map<String, List<PigStats>> statsMap = new HashMap<String, List<PigStats>>();
/**
* Registers scripting language functions as Pig functions with given namespace
*
* @param path path of the script
* @param namespace namespace for the functions
* @param pigContext pigcontext to register functions to pig in the given namespace
* @throws IOException
*/
public abstract void registerFunctions(String path, String namespace,
PigContext pigContext) throws IOException;
/**
* Actually runs the script file. This method will be implemented by
* individual script engines.
*
* @param context {@link ScriptPigContext} to run the script file
* @param scriptFile the file
* @throws IOException
*/
protected abstract Map<String, List<PigStats>> main(
PigContext context, String scriptFile) throws IOException;
// Not needed as a general abstraction so far
// /**
// * Loads the script in the interpreter.
// * @param script
// */
// protected abstract void load(InputStream script) throws IOException;
/**
* Gets ScriptEngine classname or keyword for the scripting language
*/
protected abstract String getScriptingLang();
/**
* Returns a map from local variable names to their values
* @throws IOException
*/
protected abstract Map<String, Object> getParamsFromVariables()
throws IOException;
/**
* Figures out the jar location from the class
* @param clazz class in the jar file
* @return the jar file location, null if the class was not loaded from a jar
* @throws FileNotFoundException
*/
public static String getJarPath(Class<?> clazz)
throws FileNotFoundException {
URL resource = clazz.getClassLoader().getResource(
clazz.getCanonicalName().replace(".", "/") + ".class");
if (resource.getProtocol().equals("jar")) {
return resource.getPath().substring(
resource.getPath().indexOf(':') + 1,
resource.getPath().indexOf('!'));
}
throw new FileNotFoundException("Jar for " + clazz.getName()
+ " class is not found");
}
/**
* Gets instance of the scriptEngine for the given scripting language
*
* @param scriptingLang ScriptEngine classname or keyword for the scriptingLang
* @return scriptengine for the given scripting language
* @throws IOException
*/
public static ScriptEngine getInstance(String scriptingLang)
throws IOException {
String scriptingEngine = scriptingLang;
try {
if (SupportedScriptLang.contains(scriptingLang)) {
SupportedScriptLang supportedScriptLang = SupportedScriptLang.valueOf(scriptingLang);
scriptingEngine = supportedScriptLang.getEngineClassName();
}
return (ScriptEngine) Class.forName(scriptingEngine).newInstance();
} catch (Exception e) {
throw new IOException("Could not load ScriptEngine: "
+ scriptingEngine + " for "+scriptingLang+" (Supported langs: "+SupportedScriptLang.supportedScriptLangs+") : " + e, e);
}
}
/**
* Runs a script file.
* @param pigContext {@link PigContext} to run the script file
* @param scriptFile the file
* @return a collection of {@link PigStats} objects. One for each runtime
* instance of {@link Pig} in the script. For named pipelines, the
* map key is the name of the pipeline. For unnamed pipeline, the map key
* is the script id associated with the instance.
* @throws ExecException
* @throws IOException
*/
public Map<String, List<PigStats>> run(PigContext pigContext, String scriptFile)
throws ExecException, IOException {
ScriptPigContext.set(pigContext, this);
return main(pigContext, scriptFile);
}
/**
* Gets the collection of {@link PigStats} after the script is run.
*/
protected Map<String, List<PigStats>> getPigStatsMap() {
return statsMap;
}
void setPigStats(String key, PigStats stats) {
List<PigStats> lst = statsMap.get(key);
if (lst == null) {
lst = new ArrayList<PigStats>();
statsMap.put(key, lst);
}
lst.add(stats);
}
void setPigStats(String key, List<PigStats> stats) {
statsMap.put(key, stats);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal;
import org.apache.ignite.*;
import org.apache.ignite.cluster.*;
import org.apache.ignite.configuration.*;
import org.apache.ignite.events.*;
import org.apache.ignite.internal.processors.task.*;
import org.apache.ignite.internal.util.lang.*;
import org.apache.ignite.lang.*;
import org.apache.ignite.messaging.*;
import org.apache.ignite.spi.discovery.tcp.*;
import org.apache.ignite.spi.discovery.tcp.ipfinder.*;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*;
import org.apache.ignite.testframework.*;
import org.apache.ignite.testframework.junits.common.*;
import java.io.*;
import java.util.*;
import java.util.concurrent.*;
import static org.apache.ignite.events.EventType.*;
/**
* Grid node metrics self test.
*/
@GridCommonTest(group = "Kernal Self")
public class ClusterNodeMetricsSelfTest extends GridCommonAbstractTest {
/** */
private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true);
/** Test message size. */
private static final int MSG_SIZE = 1024;
/** Number of messages. */
private static final int MSG_CNT = 3;
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
startGrid();
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
stopAllGrids();
}
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(gridName);
TcpDiscoverySpi spi = new TcpDiscoverySpi();
spi.setIpFinder(IP_FINDER);
cfg.setDiscoverySpi(spi);
cfg.setCacheConfiguration();
cfg.setMetricsUpdateFrequency(0);
return cfg;
}
/**
* @throws Exception If failed.
*/
public void testSingleTaskMetrics() throws Exception {
Ignite ignite = grid();
ignite.compute().execute(new GridTestTask(), "testArg");
// Let metrics update twice.
final CountDownLatch latch = new CountDownLatch(2);
ignite.events().localListen(new IgnitePredicate<Event>() {
@Override public boolean apply(Event evt) {
assert evt.type() == EVT_NODE_METRICS_UPDATED;
latch.countDown();
return true;
}
}, EVT_NODE_METRICS_UPDATED);
// Wait for metrics update.
latch.await();
ClusterMetrics metrics = ignite.cluster().localNode().metrics();
info("Node metrics: " + metrics);
assert metrics.getAverageActiveJobs() > 0;
assert metrics.getAverageCancelledJobs() == 0;
assert metrics.getAverageJobExecuteTime() >= 0;
assert metrics.getAverageJobWaitTime() >= 0;
assert metrics.getAverageRejectedJobs() == 0;
assert metrics.getAverageWaitingJobs() == 0;
assert metrics.getCurrentActiveJobs() == 0;
assert metrics.getCurrentCancelledJobs() == 0;
assert metrics.getCurrentJobExecuteTime() == 0;
assert metrics.getCurrentJobWaitTime() == 0;
assert metrics.getCurrentWaitingJobs() == 0;
assert metrics.getMaximumActiveJobs() == 1;
assert metrics.getMaximumCancelledJobs() == 0;
assert metrics.getMaximumJobExecuteTime() >= 0;
assert metrics.getMaximumJobWaitTime() >= 0;
assert metrics.getMaximumRejectedJobs() == 0;
assert metrics.getMaximumWaitingJobs() == 0;
assert metrics.getTotalCancelledJobs() == 0;
assert metrics.getTotalExecutedJobs() == 1;
assert metrics.getTotalRejectedJobs() == 0;
assert metrics.getTotalExecutedTasks() == 1;
assertTrue("MaximumJobExecuteTime=" + metrics.getMaximumJobExecuteTime() +
" is less than AverageJobExecuteTime=" + metrics.getAverageJobExecuteTime(),
metrics.getMaximumJobExecuteTime() >= metrics.getAverageJobExecuteTime());
}
/**
* @throws Exception If failed.
*/
public void testInternalTaskMetrics() throws Exception {
Ignite ignite = grid();
// Visor task is internal and should not affect metrics.
ignite.compute().withName("visor-test-task").execute(new TestInternalTask(), "testArg");
// Let metrics update twice.
final CountDownLatch latch = new CountDownLatch(2);
ignite.events().localListen(new IgnitePredicate<Event>() {
@Override public boolean apply(Event evt) {
assert evt.type() == EVT_NODE_METRICS_UPDATED;
latch.countDown();
return true;
}
}, EVT_NODE_METRICS_UPDATED);
// Wait for metrics update.
latch.await();
ClusterMetrics metrics = ignite.cluster().localNode().metrics();
info("Node metrics: " + metrics);
assert metrics.getAverageActiveJobs() == 0;
assert metrics.getAverageCancelledJobs() == 0;
assert metrics.getAverageJobExecuteTime() == 0;
assert metrics.getAverageJobWaitTime() == 0;
assert metrics.getAverageRejectedJobs() == 0;
assert metrics.getAverageWaitingJobs() == 0;
assert metrics.getCurrentActiveJobs() == 0;
assert metrics.getCurrentCancelledJobs() == 0;
assert metrics.getCurrentJobExecuteTime() == 0;
assert metrics.getCurrentJobWaitTime() == 0;
assert metrics.getCurrentWaitingJobs() == 0;
assert metrics.getMaximumActiveJobs() == 0;
assert metrics.getMaximumCancelledJobs() == 0;
assert metrics.getMaximumJobExecuteTime() == 0;
assert metrics.getMaximumJobWaitTime() == 0;
assert metrics.getMaximumRejectedJobs() == 0;
assert metrics.getMaximumWaitingJobs() == 0;
assert metrics.getTotalCancelledJobs() == 0;
assert metrics.getTotalExecutedJobs() == 0;
assert metrics.getTotalRejectedJobs() == 0;
assert metrics.getTotalExecutedTasks() == 0;
assertTrue("MaximumJobExecuteTime=" + metrics.getMaximumJobExecuteTime() +
" is less than AverageJobExecuteTime=" + metrics.getAverageJobExecuteTime(),
metrics.getMaximumJobExecuteTime() >= metrics.getAverageJobExecuteTime());
}
/**
* @throws Exception If failed.
*/
public void testIoMetrics() throws Exception {
Ignite ignite0 = grid();
Ignite ignite1 = startGrid(1);
Object msg = new TestMessage();
int size = ignite0.configuration().getMarshaller().marshal(msg).length;
assert size > MSG_SIZE;
final CountDownLatch latch = new CountDownLatch(MSG_CNT);
ignite0.message().localListen(null, new MessagingListenActor<TestMessage>() {
@Override protected void receive(UUID nodeId, TestMessage rcvMsg) throws Throwable {
latch.countDown();
}
});
ignite1.message().localListen(null, new MessagingListenActor<TestMessage>() {
@Override protected void receive(UUID nodeId, TestMessage rcvMsg) throws Throwable {
respond(rcvMsg);
}
});
for (int i = 0; i < MSG_CNT; i++)
message(ignite0.cluster().forRemotes()).send(null, msg);
latch.await();
ClusterMetrics metrics = ignite0.cluster().localNode().metrics();
info("Node 0 metrics: " + metrics);
// Time sync messages are being sent.
assert metrics.getSentMessagesCount() >= MSG_CNT;
assert metrics.getSentBytesCount() > size * MSG_CNT;
assert metrics.getReceivedMessagesCount() >= MSG_CNT;
assert metrics.getReceivedBytesCount() > size * MSG_CNT;
metrics = ignite1.cluster().localNode().metrics();
info("Node 1 metrics: " + metrics);
// Time sync messages are being sent.
assert metrics.getSentMessagesCount() >= MSG_CNT;
assert metrics.getSentBytesCount() > size * MSG_CNT;
assert metrics.getReceivedMessagesCount() >= MSG_CNT;
assert metrics.getReceivedBytesCount() > size * MSG_CNT;
}
/**
* @throws Exception If failed.
*/
public void testClusterNodeMetrics() throws Exception {
final Ignite ignite0 = grid();
final Ignite ignite1 = startGrid(1);
GridTestUtils.waitForCondition(new GridAbsPredicate() {
@Override
public boolean apply() {
return ignite0.cluster().nodes().size() == 2 && ignite1.cluster().nodes().size() == 2;
}
}, 3000L);
ClusterMetrics metrics0 = ignite0.cluster().localNode().metrics();
ClusterMetrics nodesMetrics =
ignite0.cluster().forNode(ignite0.cluster().localNode(), ignite1.cluster().localNode()).metrics();
assertEquals(metrics0.getTotalCpus(), nodesMetrics.getTotalCpus());
assertEquals(1, metrics0.getTotalNodes());
assertEquals(2, nodesMetrics.getTotalNodes());
assert metrics0.getHeapMemoryUsed() > 0;
assert metrics0.getHeapMemoryTotal() > 0;
assert metrics0.getNonHeapMemoryMaximum() > 0;
}
/**
* Test message.
*/
@SuppressWarnings("UnusedDeclaration")
private static class TestMessage implements Serializable {
/** */
private final byte[] arr = new byte[MSG_SIZE];
}
/**
* Test internal task.
*/
@GridInternal
private static class TestInternalTask extends GridTestTask {
// No-op.
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver12;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
import java.util.Arrays;
class OFPacketInVer12 implements OFPacketIn {
private static final Logger logger = LoggerFactory.getLogger(OFPacketInVer12.class);
// version: 1.2
final static byte WIRE_VERSION = 3;
final static int MINIMUM_LENGTH = 26;
private final static long DEFAULT_XID = 0x0L;
private final static OFBufferId DEFAULT_BUFFER_ID = OFBufferId.NO_BUFFER;
private final static int DEFAULT_TOTAL_LEN = 0x0;
private final static TableId DEFAULT_TABLE_ID = TableId.ALL;
private final static Match DEFAULT_MATCH = OFFactoryVer12.MATCH_WILDCARD_ALL;
private final static byte[] DEFAULT_DATA = new byte[0];
// OF message fields
private final long xid;
private final OFBufferId bufferId;
private final int totalLen;
private final OFPacketInReason reason;
private final TableId tableId;
private final Match match;
private final byte[] data;
//
// package private constructor - used by readers, builders, and factory
OFPacketInVer12(long xid, OFBufferId bufferId, int totalLen, OFPacketInReason reason, TableId tableId, Match match, byte[] data) {
if(bufferId == null) {
throw new NullPointerException("OFPacketInVer12: property bufferId cannot be null");
}
if(reason == null) {
throw new NullPointerException("OFPacketInVer12: property reason cannot be null");
}
if(tableId == null) {
throw new NullPointerException("OFPacketInVer12: property tableId cannot be null");
}
if(match == null) {
throw new NullPointerException("OFPacketInVer12: property match cannot be null");
}
if(data == null) {
throw new NullPointerException("OFPacketInVer12: property data cannot be null");
}
this.xid = xid;
this.bufferId = bufferId;
this.totalLen = totalLen;
this.reason = reason;
this.tableId = tableId;
this.match = match;
this.data = data;
}
// Accessors for OF message fields
@Override
public OFVersion getVersion() {
return OFVersion.OF_12;
}
@Override
public OFType getType() {
return OFType.PACKET_IN;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFBufferId getBufferId() {
return bufferId;
}
@Override
public int getTotalLen() {
return totalLen;
}
@Override
public OFPacketInReason getReason() {
return reason;
}
@Override
public TableId getTableId() {
return tableId;
}
@Override
public U64 getCookie()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property cookie not supported in version 1.2");
}
@Override
public Match getMatch() {
return match;
}
@Override
public byte[] getData() {
return data;
}
@Override
public OFPort getInPort()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property inPort not supported in version 1.2");
}
@Override
public OFPort getInPhyPort()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property inPhyPort not supported in version 1.2");
}
public OFPacketIn.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFPacketIn.Builder {
final OFPacketInVer12 parentMessage;
// OF message fields
private boolean xidSet;
private long xid;
private boolean bufferIdSet;
private OFBufferId bufferId;
private boolean totalLenSet;
private int totalLen;
private boolean reasonSet;
private OFPacketInReason reason;
private boolean tableIdSet;
private TableId tableId;
private boolean matchSet;
private Match match;
private boolean dataSet;
private byte[] data;
BuilderWithParent(OFPacketInVer12 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_12;
}
@Override
public OFType getType() {
return OFType.PACKET_IN;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFPacketIn.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public OFBufferId getBufferId() {
return bufferId;
}
@Override
public OFPacketIn.Builder setBufferId(OFBufferId bufferId) {
this.bufferId = bufferId;
this.bufferIdSet = true;
return this;
}
@Override
public int getTotalLen() {
return totalLen;
}
@Override
public OFPacketIn.Builder setTotalLen(int totalLen) {
this.totalLen = totalLen;
this.totalLenSet = true;
return this;
}
@Override
public OFPacketInReason getReason() {
return reason;
}
@Override
public OFPacketIn.Builder setReason(OFPacketInReason reason) {
this.reason = reason;
this.reasonSet = true;
return this;
}
@Override
public TableId getTableId() {
return tableId;
}
@Override
public OFPacketIn.Builder setTableId(TableId tableId) {
this.tableId = tableId;
this.tableIdSet = true;
return this;
}
@Override
public U64 getCookie()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property cookie not supported in version 1.2");
}
@Override
public OFPacketIn.Builder setCookie(U64 cookie) throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property cookie not supported in version 1.2");
}
@Override
public Match getMatch() {
return match;
}
@Override
public OFPacketIn.Builder setMatch(Match match) {
this.match = match;
this.matchSet = true;
return this;
}
@Override
public byte[] getData() {
return data;
}
@Override
public OFPacketIn.Builder setData(byte[] data) {
this.data = data;
this.dataSet = true;
return this;
}
@Override
public OFPort getInPort()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property inPort not supported in version 1.2");
}
@Override
public OFPacketIn.Builder setInPort(OFPort inPort) throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property inPort not supported in version 1.2");
}
@Override
public OFPort getInPhyPort()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property inPhyPort not supported in version 1.2");
}
@Override
public OFPacketIn.Builder setInPhyPort(OFPort inPhyPort) throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property inPhyPort not supported in version 1.2");
}
@Override
public OFPacketIn build() {
long xid = this.xidSet ? this.xid : parentMessage.xid;
OFBufferId bufferId = this.bufferIdSet ? this.bufferId : parentMessage.bufferId;
if(bufferId == null)
throw new NullPointerException("Property bufferId must not be null");
int totalLen = this.totalLenSet ? this.totalLen : parentMessage.totalLen;
OFPacketInReason reason = this.reasonSet ? this.reason : parentMessage.reason;
if(reason == null)
throw new NullPointerException("Property reason must not be null");
TableId tableId = this.tableIdSet ? this.tableId : parentMessage.tableId;
if(tableId == null)
throw new NullPointerException("Property tableId must not be null");
Match match = this.matchSet ? this.match : parentMessage.match;
if(match == null)
throw new NullPointerException("Property match must not be null");
byte[] data = this.dataSet ? this.data : parentMessage.data;
if(data == null)
throw new NullPointerException("Property data must not be null");
//
return new OFPacketInVer12(
xid,
bufferId,
totalLen,
reason,
tableId,
match,
data
);
}
}
static class Builder implements OFPacketIn.Builder {
// OF message fields
private boolean xidSet;
private long xid;
private boolean bufferIdSet;
private OFBufferId bufferId;
private boolean totalLenSet;
private int totalLen;
private boolean reasonSet;
private OFPacketInReason reason;
private boolean tableIdSet;
private TableId tableId;
private boolean matchSet;
private Match match;
private boolean dataSet;
private byte[] data;
@Override
public OFVersion getVersion() {
return OFVersion.OF_12;
}
@Override
public OFType getType() {
return OFType.PACKET_IN;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFPacketIn.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public OFBufferId getBufferId() {
return bufferId;
}
@Override
public OFPacketIn.Builder setBufferId(OFBufferId bufferId) {
this.bufferId = bufferId;
this.bufferIdSet = true;
return this;
}
@Override
public int getTotalLen() {
return totalLen;
}
@Override
public OFPacketIn.Builder setTotalLen(int totalLen) {
this.totalLen = totalLen;
this.totalLenSet = true;
return this;
}
@Override
public OFPacketInReason getReason() {
return reason;
}
@Override
public OFPacketIn.Builder setReason(OFPacketInReason reason) {
this.reason = reason;
this.reasonSet = true;
return this;
}
@Override
public TableId getTableId() {
return tableId;
}
@Override
public OFPacketIn.Builder setTableId(TableId tableId) {
this.tableId = tableId;
this.tableIdSet = true;
return this;
}
@Override
public U64 getCookie()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property cookie not supported in version 1.2");
}
@Override
public OFPacketIn.Builder setCookie(U64 cookie) throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property cookie not supported in version 1.2");
}
@Override
public Match getMatch() {
return match;
}
@Override
public OFPacketIn.Builder setMatch(Match match) {
this.match = match;
this.matchSet = true;
return this;
}
@Override
public byte[] getData() {
return data;
}
@Override
public OFPacketIn.Builder setData(byte[] data) {
this.data = data;
this.dataSet = true;
return this;
}
@Override
public OFPort getInPort()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property inPort not supported in version 1.2");
}
@Override
public OFPacketIn.Builder setInPort(OFPort inPort) throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property inPort not supported in version 1.2");
}
@Override
public OFPort getInPhyPort()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property inPhyPort not supported in version 1.2");
}
@Override
public OFPacketIn.Builder setInPhyPort(OFPort inPhyPort) throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property inPhyPort not supported in version 1.2");
}
//
@Override
public OFPacketIn build() {
long xid = this.xidSet ? this.xid : DEFAULT_XID;
OFBufferId bufferId = this.bufferIdSet ? this.bufferId : DEFAULT_BUFFER_ID;
if(bufferId == null)
throw new NullPointerException("Property bufferId must not be null");
int totalLen = this.totalLenSet ? this.totalLen : DEFAULT_TOTAL_LEN;
if(!this.reasonSet)
throw new IllegalStateException("Property reason doesn't have default value -- must be set");
if(reason == null)
throw new NullPointerException("Property reason must not be null");
TableId tableId = this.tableIdSet ? this.tableId : DEFAULT_TABLE_ID;
if(tableId == null)
throw new NullPointerException("Property tableId must not be null");
Match match = this.matchSet ? this.match : DEFAULT_MATCH;
if(match == null)
throw new NullPointerException("Property match must not be null");
byte[] data = this.dataSet ? this.data : DEFAULT_DATA;
if(data == null)
throw new NullPointerException("Property data must not be null");
return new OFPacketInVer12(
xid,
bufferId,
totalLen,
reason,
tableId,
match,
data
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFPacketIn> {
@Override
public OFPacketIn readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property version == 3
byte version = bb.readByte();
if(version != (byte) 0x3)
throw new OFParseError("Wrong version: Expected=OFVersion.OF_12(3), got="+version);
// fixed value property type == 10
byte type = bb.readByte();
if(type != (byte) 0xa)
throw new OFParseError("Wrong type: Expected=OFType.PACKET_IN(10), got="+type);
int length = U16.f(bb.readShort());
if(length < MINIMUM_LENGTH)
throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long xid = U32.f(bb.readInt());
OFBufferId bufferId = OFBufferId.of(bb.readInt());
int totalLen = U16.f(bb.readShort());
OFPacketInReason reason = OFPacketInReasonSerializerVer12.readFrom(bb);
TableId tableId = TableId.readByte(bb);
Match match = ChannelUtilsVer12.readOFMatch(bb);
// pad: 2 bytes
bb.skipBytes(2);
byte[] data = ChannelUtils.readBytes(bb, length - (bb.readerIndex() - start));
OFPacketInVer12 packetInVer12 = new OFPacketInVer12(
xid,
bufferId,
totalLen,
reason,
tableId,
match,
data
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", packetInVer12);
return packetInVer12;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFPacketInVer12Funnel FUNNEL = new OFPacketInVer12Funnel();
static class OFPacketInVer12Funnel implements Funnel<OFPacketInVer12> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFPacketInVer12 message, PrimitiveSink sink) {
// fixed value property version = 3
sink.putByte((byte) 0x3);
// fixed value property type = 10
sink.putByte((byte) 0xa);
// FIXME: skip funnel of length
sink.putLong(message.xid);
message.bufferId.putTo(sink);
sink.putInt(message.totalLen);
OFPacketInReasonSerializerVer12.putTo(message.reason, sink);
message.tableId.putTo(sink);
message.match.putTo(sink);
// skip pad (2 bytes)
sink.putBytes(message.data);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFPacketInVer12> {
@Override
public void write(ByteBuf bb, OFPacketInVer12 message) {
int startIndex = bb.writerIndex();
// fixed value property version = 3
bb.writeByte((byte) 0x3);
// fixed value property type = 10
bb.writeByte((byte) 0xa);
// length is length of variable message, will be updated at the end
int lengthIndex = bb.writerIndex();
bb.writeShort(U16.t(0));
bb.writeInt(U32.t(message.xid));
bb.writeInt(message.bufferId.getInt());
bb.writeShort(U16.t(message.totalLen));
OFPacketInReasonSerializerVer12.writeTo(bb, message.reason);
message.tableId.writeByte(bb);
message.match.writeTo(bb);
// pad: 2 bytes
bb.writeZero(2);
bb.writeBytes(message.data);
// update length field
int length = bb.writerIndex() - startIndex;
bb.setShort(lengthIndex, length);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFPacketInVer12(");
b.append("xid=").append(xid);
b.append(", ");
b.append("bufferId=").append(bufferId);
b.append(", ");
b.append("totalLen=").append(totalLen);
b.append(", ");
b.append("reason=").append(reason);
b.append(", ");
b.append("tableId=").append(tableId);
b.append(", ");
b.append("match=").append(match);
b.append(", ");
b.append("data=").append(Arrays.toString(data));
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFPacketInVer12 other = (OFPacketInVer12) obj;
if( xid != other.xid)
return false;
if (bufferId == null) {
if (other.bufferId != null)
return false;
} else if (!bufferId.equals(other.bufferId))
return false;
if( totalLen != other.totalLen)
return false;
if (reason == null) {
if (other.reason != null)
return false;
} else if (!reason.equals(other.reason))
return false;
if (tableId == null) {
if (other.tableId != null)
return false;
} else if (!tableId.equals(other.tableId))
return false;
if (match == null) {
if (other.match != null)
return false;
} else if (!match.equals(other.match))
return false;
if (!Arrays.equals(data, other.data))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (xid ^ (xid >>> 32));
result = prime * result + ((bufferId == null) ? 0 : bufferId.hashCode());
result = prime * result + totalLen;
result = prime * result + ((reason == null) ? 0 : reason.hashCode());
result = prime * result + ((tableId == null) ? 0 : tableId.hashCode());
result = prime * result + ((match == null) ? 0 : match.hashCode());
result = prime * result + Arrays.hashCode(data);
return result;
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.api.response;
import java.util.Date;
import org.apache.cloudstack.api.ApiConstants;
import org.apache.cloudstack.api.BaseResponse;
import org.apache.cloudstack.api.EntityReference;
import com.cloud.offering.DiskOffering;
import com.cloud.serializer.Param;
import com.google.gson.annotations.SerializedName;
@EntityReference(value = DiskOffering.class)
public class DiskOfferingResponse extends BaseResponse {
@SerializedName(ApiConstants.ID)
@Param(description = "unique ID of the disk offering")
private String id;
@SerializedName(ApiConstants.DOMAIN_ID)
@Param(description = "the domain ID(s) this disk offering belongs to. Ignore this information as it is not currently applicable.")
private String domainId;
@SerializedName(ApiConstants.DOMAIN)
@Param(description = "the domain name(s) this disk offering belongs to. Ignore this information as it is not currently applicable.")
private String domain;
@SerializedName(ApiConstants.ZONE_ID)
@Param(description = "the zone ID(s) this disk offering belongs to. Ignore this information as it is not currently applicable.", since = "4.13.0")
private String zoneId;
@SerializedName(ApiConstants.ZONE)
@Param(description = "the zone name(s) this disk offering belongs to. Ignore this information as it is not currently applicable.", since = "4.13.0")
private String zone;
@SerializedName(ApiConstants.NAME)
@Param(description = "the name of the disk offering")
private String name;
@SerializedName(ApiConstants.DISPLAY_TEXT)
@Param(description = "an alternate display text of the disk offering.")
private String displayText;
@SerializedName(ApiConstants.DISK_SIZE)
@Param(description = "the size of the disk offering in GB")
private Long diskSize;
@SerializedName(ApiConstants.CREATED)
@Param(description = "the date this disk offering was created")
private Date created;
@SerializedName("iscustomized")
@Param(description = "true if disk offering uses custom size, false otherwise")
private Boolean customized;
@SerializedName("iscustomizediops")
@Param(description = "true if disk offering uses custom iops, false otherwise")
private Boolean customizedIops;
@SerializedName(ApiConstants.MIN_IOPS)
@Param(description = "the min iops of the disk offering")
private Long minIops;
@SerializedName(ApiConstants.MAX_IOPS)
@Param(description = "the max iops of the disk offering")
private Long maxIops;
@SerializedName(ApiConstants.HYPERVISOR_SNAPSHOT_RESERVE)
@Param(description = "Hypervisor snapshot reserve space as a percent of a volume (for managed storage using Xen or VMware)", since = "4.4")
private Integer hypervisorSnapshotReserve;
@SerializedName(ApiConstants.TAGS)
@Param(description = "the tags for the disk offering")
private String tags;
@SerializedName("storagetype")
@Param(description = "the storage type for this disk offering")
private String storageType;
@SerializedName("provisioningtype") @Param(description="provisioning type used to create volumes. Valid values are thin, sparse, fat.", since = "4.4.0")
private String provisioningType;
@SerializedName("diskBytesReadRate")
@Param(description = "bytes read rate of the disk offering")
private Long bytesReadRate;
@SerializedName("diskBytesReadRateMax")
@Param(description = "burst bytes read rate of the disk offering")
private Long bytesReadRateMax;
@SerializedName("diskBytesReadRateMaxLength")
@Param(description = "length (in seconds) of the burst")
private Long bytesReadRateMaxLength;
@SerializedName("diskBytesWriteRate")
@Param(description = "bytes write rate of the disk offering")
private Long bytesWriteRate;
@SerializedName("diskBytesWriteRateMax")
@Param(description = "burst bytes write rate of the disk offering")
private Long bytesWriteRateMax;
@SerializedName("diskBytesWriteRateMaxLength")
@Param(description = "length (in seconds) of the burst")
private Long bytesWriteRateMaxLength;
@SerializedName("diskIopsReadRate")
@Param(description = "io requests read rate of the disk offering")
private Long iopsReadRate;
@SerializedName("diskIopsReadRateMax")
@Param(description = "burst io requests read rate of the disk offering")
private Long iopsReadRateMax;
@SerializedName("diskIopsReadRateMaxLength")
@Param(description = "length (in second) of the burst")
private Long iopsReadRateMaxLength;
@SerializedName("diskIopsWriteRate")
@Param(description = "io requests write rate of the disk offering")
private Long iopsWriteRate;
@SerializedName("diskIopsWriteRateMax")
@Param(description = "burst io requests write rate of the disk offering")
private Long iopsWriteRateMax;
@SerializedName("diskIopsWriteRateMaxLength")
@Param(description = "length (in seconds) of the burst")
private Long iopsWriteRateMaxLength;
@SerializedName("cacheMode")
@Param(description = "the cache mode to use for this disk offering. none, writeback or writethrough", since = "4.4")
private String cacheMode;
@SerializedName("displayoffering")
@Param(description = "whether to display the offering to the end user or not.")
private Boolean displayOffering;
@SerializedName("vspherestoragepolicy")
@Param(description = "the vsphere storage policy tagged to the disk offering in case of VMware", since = "4.15")
private String vsphereStoragePolicy;
public Boolean getDisplayOffering() {
return displayOffering;
}
public void setDisplayOffering(Boolean displayOffering) {
this.displayOffering = displayOffering;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getDomainId() {
return domainId;
}
public void setDomainId(String domainId) {
this.domainId = domainId;
}
public String getDomain() {
return domain;
}
public void setDomain(String domain) {
this.domain = domain;
}
public String getZoneId() {
return zoneId;
}
public void setZoneId(String zoneId) {
this.zoneId = zoneId;
}
public String getZone() {
return zone;
}
public void setZone(String zone) {
this.zone = zone;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDisplayText() {
return displayText;
}
public void setDisplayText(String displayText) {
this.displayText = displayText;
}
public Long getDiskSize() {
return diskSize;
}
public void setDiskSize(Long diskSize) {
this.diskSize = diskSize;
}
public Date getCreated() {
return created;
}
public void setCreated(Date created) {
this.created = created;
}
public String getTags() {
return tags;
}
public void setTags(String tags) {
this.tags = tags;
}
public Boolean isCustomized() {
return customized;
}
public void setCustomized(Boolean customized) {
this.customized = customized;
}
public Boolean isCustomizedIops() {
return customizedIops;
}
public void setCustomizedIops(Boolean customizedIops) {
this.customizedIops = customizedIops;
}
public Long getMinIops() {
return minIops;
}
public void setMinIops(Long minIops) {
this.minIops = minIops;
}
public Long getMaxIops() {
return maxIops;
}
public void setMaxIops(Long maxIops) {
this.maxIops = maxIops;
}
public Integer getHypervisorSnapshotReserve() {
return hypervisorSnapshotReserve;
}
public void setHypervisorSnapshotReserve(Integer hypervisorSnapshotReserve) {
this.hypervisorSnapshotReserve = hypervisorSnapshotReserve;
}
public String getCacheMode() {
return cacheMode;
}
public void setCacheMode(String cacheMode) {
this.cacheMode = cacheMode;
}
public String getStorageType() {
return storageType;
}
public void setStorageType(String storageType) {
this.storageType = storageType;
}
public String getProvisioningType(){
return provisioningType;
}
public void setProvisioningType(String provisioningType){
this.provisioningType = provisioningType;
}
public void setBytesReadRate(Long bytesReadRate) {
this.bytesReadRate = bytesReadRate;
}
public void setBytesReadRateMax(Long bytesReadRateMax) {
this.bytesReadRateMax = bytesReadRateMax;
}
public void setBytesReadRateMaxLength(Long bytesReadRateMaxLength) {
this.bytesReadRateMaxLength = bytesReadRateMaxLength;
}
public void setBytesWriteRate(Long bytesWriteRate) {
this.bytesWriteRate = bytesWriteRate;
}
public void setBytesWriteRateMax(Long bytesWriteRateMax) {
this.bytesWriteRateMax = bytesWriteRateMax;
}
public void setBytesWriteRateMaxLength(Long bytesWriteRateMaxLength) {
this.bytesWriteRateMaxLength = bytesWriteRateMaxLength;
}
public void setIopsReadRate(Long iopsReadRate) {
this.iopsReadRate = iopsReadRate;
}
public void setIopsReadRateMax(Long iopsReadRateMax) {
this.iopsReadRateMax = iopsReadRateMax;
}
public void setIopsReadRateMaxLength(Long iopsReadRateMaxLength) {
this.iopsReadRateMaxLength = iopsReadRateMaxLength;
}
public void setIopsWriteRate(Long iopsWriteRate) {
this.iopsWriteRate = iopsWriteRate;
}
public void setIopsWriteRateMax(Long iopsWriteRateMax) {
this.iopsWriteRateMax = iopsWriteRateMax;
}
public void setIopsWriteRateMaxLength(Long iopsWriteRateMaxLength) {
this.iopsWriteRateMaxLength = iopsWriteRateMaxLength;
}
public String getVsphereStoragePolicy() {
return vsphereStoragePolicy;
}
public void setVsphereStoragePolicy(String vsphereStoragePolicy) {
this.vsphereStoragePolicy = vsphereStoragePolicy;
}
}
| |
/*
* Copyright (c) 2010-2013 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.web.component.wf.processes.itemApproval;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.web.component.util.VisibleEnableBehaviour;
import com.evolveum.midpoint.web.component.wf.DecisionsPanel;
import com.evolveum.midpoint.web.component.wf.WorkItemsPanel;
import com.evolveum.midpoint.web.page.admin.workflow.dto.DecisionDto;
import com.evolveum.midpoint.web.page.admin.workflow.dto.ProcessInstanceDto;
import com.evolveum.midpoint.web.page.admin.workflow.dto.WorkItemDto;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ApprovalLevelType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ApprovalSchemaType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.AssignmentType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.DecisionType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ExpressionType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectReferenceType;
import com.evolveum.midpoint.xml.ns.model.workflow.process_instance_state_3.ItemApprovalProcessState;
import com.evolveum.midpoint.xml.ns.model.workflow.process_instance_state_3.ItemApprovalRequestType;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.Validate;
import org.apache.wicket.AttributeModifier;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.model.AbstractReadOnlyModel;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.PropertyModel;
import org.apache.wicket.model.ResourceModel;
import org.apache.wicket.model.StringResourceModel;
import java.util.ArrayList;
import java.util.List;
/**
* @author mederly
*/
public class ItemApprovalPanel extends Panel {
private static final Trace LOGGER = TraceManager.getTrace(ItemApprovalPanel.class);
private static final String ID_ITEM_TO_BE_APPROVED_LABEL = "itemToBeApprovedLabel";
private static final String ID_ITEM_TO_BE_APPROVED = "itemToBeApproved";
//private static final String ID_RESULT = "result";
private static final String ID_APPROVAL_SCHEMA = "approvalSchema";
private static final String ID_DECISIONS_DONE_LABEL = "decisionsDoneLabel";
private static final String ID_DECISIONS_DONE = "decisionsDone";
private static final String ID_CURRENT_WORK_ITEMS = "currentWorkItems";
private static final String ID_CURRENT_WORK_ITEMS_LABEL = "currentWorkItemsLabel";
private IModel<ProcessInstanceDto> model;
public ItemApprovalPanel(String id, IModel<ProcessInstanceDto> model) {
super(id);
Validate.notNull(model);
this.model = model;
initLayout();
}
private void initLayout() {
Label itemToBeApprovedLabel = new Label(ID_ITEM_TO_BE_APPROVED_LABEL, new StringResourceModel("${}", new AbstractReadOnlyModel<String>() {
@Override
public String getObject() {
if (!model.getObject().isAnswered()) {
return "ItemApprovalPanel.itemToBeApproved";
} else {
Boolean result = model.getObject().getAnswerAsBoolean();
if (result == null) {
return "ItemApprovalPanel.itemThatWasCompleted"; // actually, this should not happen, if the process is ItemApproval
} else if (result) {
return "ItemApprovalPanel.itemThatWasApproved";
} else {
return "ItemApprovalPanel.itemThatWasRejected";
}
}
}
}));
itemToBeApprovedLabel.add(new AttributeModifier("color", new AbstractReadOnlyModel<String>() {
@Override
public String getObject() {
if (!model.getObject().isAnswered()) {
return "black"; // should not be visible, anyway
} else {
Boolean result = model.getObject().getAnswerAsBoolean();
if (result == null) {
return "black"; // actually, this should not happen, if the process is ItemApproval
} else if (result) {
return "green";
} else {
return "red";
}
}
}
}));
add(itemToBeApprovedLabel);
Label itemToBeApproved = new Label(ID_ITEM_TO_BE_APPROVED, new AbstractReadOnlyModel<String>() {
@Override
public String getObject() {
ItemApprovalProcessState instanceState = (ItemApprovalProcessState) model.getObject().getInstanceState().getProcessSpecificState();
ItemApprovalRequestType approvalRequestType = instanceState.getApprovalRequest();
if (approvalRequestType == null) {
return "?";
} else {
Object item = approvalRequestType.getItemToApprove();
if (item instanceof AssignmentType) {
AssignmentType assignmentType = (AssignmentType) item;
if (assignmentType.getTarget() != null) {
return assignmentType.getTarget().toString();
} else if (assignmentType.getTargetRef() != null) {
return assignmentType.getTargetRef().getOid() + " (" + assignmentType.getTargetRef().getType() + ")";
} else {
return "?";
}
} else {
return item != null ? item.toString() : "(none)";
}
}
}
});
add(itemToBeApproved);
// todo i18n
Label approvalSchema = new Label(ID_APPROVAL_SCHEMA, new AbstractReadOnlyModel() {
@Override
public Object getObject() {
StringBuilder retval = new StringBuilder();
ItemApprovalProcessState instanceState = (ItemApprovalProcessState) model.getObject().getInstanceState().getProcessSpecificState();
ItemApprovalRequestType approvalRequestType = instanceState.getApprovalRequest();
if (approvalRequestType == null) {
return "?";
} else {
ApprovalSchemaType approvalSchema = approvalRequestType.getApprovalSchema();
if (approvalSchema != null) {
if (approvalSchema.getName() != null) {
retval.append("<b>");
retval.append(StringEscapeUtils.escapeHtml(approvalSchema.getName()));
retval.append("</b>");
}
if (approvalSchema.getDescription() != null) {
retval.append(" (");
retval.append(StringEscapeUtils.escapeHtml(approvalSchema.getDescription()));
retval.append(")");
}
if (approvalSchema.getName() != null || approvalSchema.getDescription() != null) {
retval.append("<br/>");
}
retval.append("Levels:<p/><ol>");
for (ApprovalLevelType level : approvalSchema.getLevel()) {
retval.append("<li>");
if (level.getName() != null) {
retval.append(StringEscapeUtils.escapeHtml(level.getName()));
} else {
retval.append("unnamed level");
}
if (level.getDescription() != null) {
retval.append(" (");
retval.append(StringEscapeUtils.escapeHtml(level.getDescription()));
retval.append(")");
}
if (level.getEvaluationStrategy() != null) {
retval.append(" [" + level.getEvaluationStrategy() + "]");
}
if (level.getAutomaticallyApproved() != null) {
String desc = level.getAutomaticallyApproved().getDescription();
if (desc != null) {
retval.append(" (auto-approval condition: " + StringEscapeUtils.escapeHtml(desc) + ")");
} else {
retval.append(" (auto-approval condition present)");
}
}
retval.append("<br/>Approvers:<ul>");
for (ObjectReferenceType approverRef : level.getApproverRef()) {
retval.append("<li>");
retval.append(approverRef.getOid());
if (approverRef.getType() != null) {
retval.append(" (" + approverRef.getType().getLocalPart() + ")");
}
if (approverRef.getDescription() != null) {
retval.append (" - " + approverRef.getDescription());
}
retval.append("</li>");
}
for (ExpressionType expression : level.getApproverExpression()) {
retval.append("<li>Expression: ");
// todo display the expression
if (expression.getDescription() != null) {
retval.append(StringEscapeUtils.escapeHtml(expression.getDescription()));
} else {
retval.append("(...)");
}
retval.append("</li>");
}
}
retval.append("</ul>"); // ends the list of approvers
retval.append("</ol>"); // ends the list of levels
}
}
return retval.toString();
}
});
approvalSchema.setEscapeModelStrings(false);
add(approvalSchema);
add(new Label(ID_DECISIONS_DONE_LABEL, new StringResourceModel("ItemApprovalPanel.decisionsDoneWhenFinishedIs_${finished}", model)));
add(new DecisionsPanel(ID_DECISIONS_DONE, new AbstractReadOnlyModel<List<DecisionDto>>() {
@Override
public List<DecisionDto> getObject() {
List<DecisionDto> retval = new ArrayList<>();
ProcessInstanceDto processInstanceDto = model.getObject();
processInstanceDto.reviveIfNeeded(ItemApprovalPanel.this);
ItemApprovalProcessState instanceState = (ItemApprovalProcessState) processInstanceDto.getInstanceState().getProcessSpecificState();
List<DecisionType> allDecisions = instanceState.getDecisions();
if (allDecisions != null) {
for (DecisionType decision : allDecisions) {
retval.add(new DecisionDto(decision));
}
}
return retval;
}
}));
VisibleEnableBehaviour visibleIfRunning = new VisibleEnableBehaviour() {
@Override
public boolean isVisible() {
return !model.getObject().isFinished();
}
};
Label workItemsPanelLabel = new Label(ID_CURRENT_WORK_ITEMS_LABEL, new ResourceModel("ItemApprovalPanel.currentWorkItems"));
workItemsPanelLabel.add(visibleIfRunning);
add(workItemsPanelLabel);
WorkItemsPanel workItemsPanel = new WorkItemsPanel(ID_CURRENT_WORK_ITEMS, new PropertyModel<List<WorkItemDto>>(model, "workItems"));
workItemsPanel.add(visibleIfRunning);
add(workItemsPanel);
}
}
| |
package com.jorgeyp.brewtutor;
import android.app.Activity;
import android.app.ActionBar;
import android.app.Fragment;
import android.app.FragmentManager;
import android.content.Intent;
import android.support.v4.app.ActionBarDrawerToggle;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.Toast;
/**
* Fragment used for managing interactions for and presentation of a navigation drawer.
* See the <a href="https://developer.android.com/design/patterns/navigation-drawer.html#Interaction">
* design guidelines</a> for a complete explanation of the behaviors implemented here.
*/
public class NavigationDrawerFragment extends Fragment {
/**
* Remember the position of the selected item.
*/
private static final String STATE_SELECTED_POSITION = "selected_navigation_drawer_position";
/**
* Per the design guidelines, you should show the drawer on launch until the user manually
* expands it. This shared preference tracks this.
*/
private static final String PREF_USER_LEARNED_DRAWER = "navigation_drawer_learned";
/**
* A pointer to the current callbacks instance (the Activity).
*/
private NavigationDrawerCallbacks mCallbacks;
/**
* Helper component that ties the action bar to the navigation drawer.
*/
private ActionBarDrawerToggle mDrawerToggle;
private DrawerLayout mDrawerLayout;
private ListView mDrawerListView;
private View mFragmentContainerView;
private int mCurrentSelectedPosition = 0;
private boolean mFromSavedInstanceState;
private boolean mUserLearnedDrawer;
public NavigationDrawerFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Read in the flag indicating whether or not the user has demonstrated awareness of the
// drawer. See PREF_USER_LEARNED_DRAWER for details.
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getActivity());
mUserLearnedDrawer = sp.getBoolean(PREF_USER_LEARNED_DRAWER, false);
if (savedInstanceState != null) {
mCurrentSelectedPosition = savedInstanceState.getInt(STATE_SELECTED_POSITION);
mFromSavedInstanceState = true;
}
// Select either the default item (0) or the last selected item.
selectItem(mCurrentSelectedPosition);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// Indicate that this fragment would like to influence the set of actions in the action bar.
setHasOptionsMenu(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
mDrawerListView = (ListView) inflater.inflate(
R.layout.fragment_navigation_drawer, container, false);
mDrawerListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
selectItem(position);
}
});
mDrawerListView.setAdapter(new ArrayAdapter<String>(
getActivity(),
// getActionBar().getThemedContext(), TODO investigar esto
android.R.layout.simple_list_item_activated_1,
android.R.id.text1,
new String[]{
getString(R.string.title_section_learn),
getString(R.string.title_section_brew),
getString(R.string.title_section_log),
getString(R.string.title_section_settings),
getString(R.string.title_section_help)
}));
mDrawerListView.setItemChecked(mCurrentSelectedPosition, true);
mDrawerListView.setFitsSystemWindows(true);
return mDrawerListView;
}
public boolean isDrawerOpen() {
return mDrawerLayout != null && mDrawerLayout.isDrawerOpen(mFragmentContainerView);
}
/**
* Users of this fragment must call this method to set up the navigation drawer interactions.
*
* @param fragmentId The android:id of this fragment in its activity's layout.
* @param drawerLayout The DrawerLayout containing this fragment's UI.
*/
public void setUp(int fragmentId, DrawerLayout drawerLayout) {
mFragmentContainerView = getActivity().findViewById(fragmentId);
mDrawerLayout = drawerLayout;
// set a custom shadow that overlays the main content when the drawer opens
mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START);
// set up the drawer's list view with items and click listener
ActionBar actionBar = getActionBar();
actionBar.setDisplayHomeAsUpEnabled(true);
actionBar.setHomeButtonEnabled(true);
// ActionBarDrawerToggle ties together the the proper interactions
// between the navigation drawer and the action bar app icon.
mDrawerToggle = new ActionBarDrawerToggle(
getActivity(), /* host Activity */
mDrawerLayout, /* DrawerLayout object */
R.drawable.ic_drawer, /* nav drawer image to replace 'Up' caret */
R.string.navigation_drawer_open, /* "open drawer" description for accessibility */
R.string.navigation_drawer_close /* "close drawer" description for accessibility */
) {
@Override
public void onDrawerClosed(View drawerView) {
super.onDrawerClosed(drawerView);
if (!isAdded()) {
return;
}
getActivity().invalidateOptionsMenu(); // calls onPrepareOptionsMenu()
}
@Override
public void onDrawerOpened(View drawerView) {
super.onDrawerOpened(drawerView);
if (!isAdded()) {
return;
}
if (!mUserLearnedDrawer) {
// The user manually opened the drawer; store this flag to prevent auto-showing
// the navigation drawer automatically in the future.
mUserLearnedDrawer = true;
SharedPreferences sp = PreferenceManager
.getDefaultSharedPreferences(getActivity());
sp.edit().putBoolean(PREF_USER_LEARNED_DRAWER, true).apply();
}
getActivity().invalidateOptionsMenu(); // calls onPrepareOptionsMenu()
}
};
// If the user hasn't 'learned' about the drawer, open it to introduce them to the drawer,
// per the navigation drawer design guidelines.
if (!mUserLearnedDrawer && !mFromSavedInstanceState) {
mDrawerLayout.openDrawer(mFragmentContainerView);
}
// Defer code dependent on restoration of previous instance state.
mDrawerLayout.post(new Runnable() {
@Override
public void run() {
mDrawerToggle.syncState();
}
});
mDrawerLayout.setDrawerListener(mDrawerToggle);
}
private void selectItem(int position) {
switch (position) {
case 1:
// Intent intent = new Intent(getActivity(), BrewActivity.class);
// getActivity().startActivity(intent);
// Create a new fragment and specify the planet to show based on position
Fragment fragment = new BrewStylesFragment();
Bundle args = new Bundle();
// Insert the fragment by replacing any existing fragment
FragmentManager fragmentManager = getFragmentManager();
fragmentManager.beginTransaction()
.replace(R.id.container, fragment)
.commit();
mDrawerLayout.closeDrawers();
}
// mCurrentSelectedPosition = position;
// if (mDrawerListView != null) {
// mDrawerListView.setItemChecked(position, true);
// }
// if (mDrawerLayout != null) {
// mDrawerLayout.closeDrawer(mFragmentContainerView);
// }
// if (mCallbacks != null) {
// mCallbacks.onNavigationDrawerItemSelected(position);
// }
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
try {
mCallbacks = (NavigationDrawerCallbacks) activity;
} catch (ClassCastException e) {
throw new ClassCastException("Activity must implement NavigationDrawerCallbacks.");
}
}
@Override
public void onDetach() {
super.onDetach();
mCallbacks = null;
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
// Forward the new configuration the drawer toggle component.
mDrawerToggle.onConfigurationChanged(newConfig);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
// If the drawer is open, show the global app actions in the action bar. See also
// showGlobalContextActionBar, which controls the top-left area of the action bar.
if (mDrawerLayout != null && isDrawerOpen()) {
inflater.inflate(R.menu.global, menu);
showGlobalContextActionBar();
}
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (mDrawerToggle.onOptionsItemSelected(item)) {
return true;
}
if (item.getItemId() == R.id.action_example) {
Toast.makeText(getActivity(), "Example action.", Toast.LENGTH_SHORT).show();
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* Per the navigation drawer design guidelines, updates the action bar to show the global app
* 'context', rather than just what's in the current screen.
*/
private void showGlobalContextActionBar() {
ActionBar actionBar = getActionBar();
actionBar.setDisplayShowTitleEnabled(true);
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
actionBar.setTitle(R.string.app_name);
}
private ActionBar getActionBar() {
return getActivity().getActionBar();
}
/**
* Callbacks interface that all activities using this fragment must implement.
*/
public static interface NavigationDrawerCallbacks {
/**
* Called when an item in the navigation drawer is selected.
*/
void onNavigationDrawerItemSelected(int position);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.datatorrent.stram;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import com.google.common.collect.Sets;
import com.datatorrent.api.Context.PortContext;
import com.datatorrent.api.DAG.Locality;
import com.datatorrent.api.InputOperator;
import com.datatorrent.api.Operator;
import com.datatorrent.api.Operator.ProcessingMode;
import com.datatorrent.api.StorageAgent;
import com.datatorrent.api.StreamCodec;
import com.datatorrent.api.annotation.Stateless;
import com.datatorrent.stram.api.Checkpoint;
import com.datatorrent.stram.api.OperatorDeployInfo;
import com.datatorrent.stram.api.OperatorDeployInfo.InputDeployInfo;
import com.datatorrent.stram.api.OperatorDeployInfo.OperatorType;
import com.datatorrent.stram.api.OperatorDeployInfo.OutputDeployInfo;
import com.datatorrent.stram.api.OperatorDeployInfo.UnifierDeployInfo;
import com.datatorrent.stram.api.StreamingContainerUmbilicalProtocol.StramToNodeRequest;
import com.datatorrent.stram.api.StreamingContainerUmbilicalProtocol.StreamingContainerContext;
import com.datatorrent.stram.engine.OperatorContext;
import com.datatorrent.stram.plan.logical.LogicalPlan;
import com.datatorrent.stram.plan.logical.LogicalPlan.InputPortMeta;
import com.datatorrent.stram.plan.logical.LogicalPlan.StreamMeta;
import com.datatorrent.stram.plan.physical.PTContainer;
import com.datatorrent.stram.plan.physical.PTOperator;
import com.datatorrent.stram.plan.physical.PTOperator.State;
import com.datatorrent.stram.plan.physical.PhysicalPlan;
import com.datatorrent.stram.util.ConfigUtils;
import com.datatorrent.stram.webapp.ContainerInfo;
/**
*
* Representation of child container (execution layer) in the master<p>
* Created when resource for container was allocated.
* Destroyed after resource is deallocated (container released, killed etc.)
* <br>
*
* @since 0.3.2
*/
public class StreamingContainerAgent
{
private static final Logger LOG = LoggerFactory.getLogger(StreamingContainerAgent.class);
public static class ContainerStartRequest
{
final PTContainer container;
ContainerStartRequest(PTContainer container)
{
this.container = container;
}
}
public StreamingContainerAgent(PTContainer container, StreamingContainerContext initCtx, StreamingContainerManager
dnmgr)
{
this.container = container;
this.initCtx = initCtx;
this.memoryMBFree = this.container.getAllocatedMemoryMB();
this.dnmgr = dnmgr;
}
boolean shutdownRequested = false;
boolean stackTraceRequested = false;
Set<PTOperator> deployOpers = Sets.newHashSet();
Set<Integer> undeployOpers = Sets.newHashSet();
int deployCnt = 0;
long lastHeartbeatMillis = 0;
long createdMillis = System.currentTimeMillis();
final PTContainer container;
final StreamingContainerContext initCtx;
String jvmName;
int memoryMBFree;
long gcCollectionCount;
long gcCollectionTime;
final StreamingContainerManager dnmgr;
private final ConcurrentLinkedQueue<StramToNodeRequest> operatorRequests = new ConcurrentLinkedQueue<>();
public StreamingContainerContext getInitContext()
{
return initCtx;
}
public PTContainer getContainer()
{
return container;
}
public boolean hasPendingWork()
{
for (PTOperator oper : container.getOperators()) {
if (oper.getState() == PTOperator.State.PENDING_DEPLOY) {
return true;
}
}
return false;
}
public void addOperatorRequest(StramToNodeRequest r)
{
LOG.info("Adding operator request {} {}", container.getExternalId(), r);
this.operatorRequests.add(r);
}
@SuppressWarnings("ReturnOfCollectionOrArrayField")
protected ConcurrentLinkedQueue<StramToNodeRequest> getOperatorRequests()
{
return this.operatorRequests;
}
/**
* Create deploy info for StramChild.
*
* @param operators
* @return StreamingContainerContext
*/
public List<OperatorDeployInfo> getDeployInfoList(Collection<PTOperator> operators)
{
if (container.bufferServerAddress == null) {
throw new AssertionError("No buffer server address assigned");
}
Map<OperatorDeployInfo, PTOperator> nodes = new LinkedHashMap<>();
HashSet<PTOperator.PTOutput> publishers = new HashSet<>();
PhysicalPlan physicalPlan = dnmgr.getPhysicalPlan();
for (PTOperator oper : operators) {
if (oper.getState() != State.PENDING_DEPLOY) {
LOG.debug("Skipping deploy for operator {} state {}", oper, oper.getState());
continue;
}
OperatorDeployInfo ndi = createOperatorDeployInfo(oper);
nodes.put(ndi, oper);
ndi.inputs = new ArrayList<>(oper.getInputs().size());
ndi.outputs = new ArrayList<>(oper.getOutputs().size());
for (PTOperator.PTOutput out : oper.getOutputs()) {
final StreamMeta streamMeta = out.logicalStream;
// buffer server or inline publisher
OutputDeployInfo portInfo = new OutputDeployInfo();
portInfo.declaredStreamId = streamMeta.getName();
portInfo.portName = out.portName;
try {
portInfo.contextAttributes = streamMeta.getSource().getAttributes().clone();
} catch (CloneNotSupportedException ex) {
throw new RuntimeException("Cannot clone attributes", ex);
}
boolean outputUnified = false;
for (PTOperator.PTInput input : out.sinks) {
if (input.target.isUnifier()) {
outputUnified = true;
break;
}
}
portInfo.contextAttributes.put(PortContext.IS_OUTPUT_UNIFIED, outputUnified);
if (ndi.type == OperatorDeployInfo.OperatorType.UNIFIER) {
// input attributes of the downstream operator
for (InputPortMeta sink : streamMeta.getSinks()) {
portInfo.contextAttributes = sink.getAttributes();
break;
}
}
if (!out.isDownStreamInline()) {
portInfo.bufferServerHost = oper.getContainer().bufferServerAddress.getHostName();
portInfo.bufferServerPort = oper.getContainer().bufferServerAddress.getPort();
portInfo.bufferServerToken = oper.getContainer().getBufferServerToken();
// Build the stream codec configuration of all sinks connected to this port
for (PTOperator.PTInput input : out.sinks) {
// Create mappings for all non-inline operators
if (input.target.getContainer() != out.source.getContainer()) {
InputPortMeta inputPortMeta = getIdentifyingInputPortMeta(input);
StreamCodec<?> streamCodecInfo = getStreamCodec(inputPortMeta);
Integer id = physicalPlan.getStreamCodecIdentifier(streamCodecInfo);
if (!portInfo.streamCodecs.containsKey(id)) {
portInfo.streamCodecs.put(id, streamCodecInfo);
}
}
}
}
ndi.outputs.add(portInfo);
publishers.add(out);
}
}
// after we know all publishers within container, determine subscribers
for (Map.Entry<OperatorDeployInfo, PTOperator> operEntry : nodes.entrySet()) {
OperatorDeployInfo ndi = operEntry.getKey();
PTOperator oper = operEntry.getValue();
for (PTOperator.PTInput in : oper.getInputs()) {
final StreamMeta streamMeta = in.logicalStream;
if (streamMeta.getSource() == null) {
throw new AssertionError("source is null: " + in);
}
PTOperator.PTOutput sourceOutput = in.source;
InputDeployInfo inputInfo = new InputDeployInfo();
inputInfo.declaredStreamId = streamMeta.getName();
inputInfo.portName = in.portName;
InputPortMeta inputPortMeta = getInputPortMeta(oper.getOperatorMeta(), streamMeta);
if (inputPortMeta != null) {
inputInfo.contextAttributes = inputPortMeta.getAttributes();
}
if (inputInfo.contextAttributes == null && ndi.type == OperatorDeployInfo.OperatorType.UNIFIER) {
inputInfo.contextAttributes = in.source.logicalStream.getSource().getAttributes();
}
inputInfo.sourceNodeId = sourceOutput.source.getId();
inputInfo.sourcePortName = sourceOutput.portName;
if (in.partitions != null && in.partitions.mask != 0) {
inputInfo.partitionMask = in.partitions.mask;
inputInfo.partitionKeys = in.partitions.partitions;
}
if (sourceOutput.source.getContainer() == oper.getContainer()) {
// both operators in same container
if (!publishers.contains(sourceOutput)) {
throw new AssertionError("Source not deployed for container local stream " + sourceOutput + " " + in);
}
if (streamMeta.getLocality() == Locality.THREAD_LOCAL) {
inputInfo.locality = Locality.THREAD_LOCAL;
ndi.type = OperatorType.OIO;
} else {
inputInfo.locality = Locality.CONTAINER_LOCAL;
}
} else {
// buffer server input
PTContainer container = sourceOutput.source.getContainer();
InetSocketAddress addr = container.bufferServerAddress;
if (addr == null) {
throw new AssertionError("upstream address not assigned: " + sourceOutput);
}
inputInfo.bufferServerHost = addr.getHostName();
inputInfo.bufferServerPort = addr.getPort();
inputInfo.bufferServerToken = container.getBufferServerToken();
}
// On the input side there is a unlikely scenario of partitions even for inline stream that is being
// handled. Always specifying a stream codec configuration in case that scenario happens.
InputPortMeta idInputPortMeta = getIdentifyingInputPortMeta(in);
StreamCodec<?> streamCodecInfo = getStreamCodec(idInputPortMeta);
Integer id = physicalPlan.getStreamCodecIdentifier(streamCodecInfo);
inputInfo.streamCodecs.put(id, streamCodecInfo);
ndi.inputs.add(inputInfo);
}
}
return new ArrayList<>(nodes.keySet());
}
public static InputPortMeta getInputPortMeta(LogicalPlan.OperatorMeta operatorMeta, StreamMeta streamMeta)
{
InputPortMeta inputPortMeta = null;
Map<InputPortMeta, StreamMeta> inputStreams = operatorMeta.getInputStreams();
for (Map.Entry<InputPortMeta, StreamMeta> entry : inputStreams.entrySet()) {
if (entry.getValue() == streamMeta) {
inputPortMeta = entry.getKey();
break;
}
}
return inputPortMeta;
}
public static InputPortMeta getIdentifyingInputPortMeta(PTOperator.PTInput input)
{
InputPortMeta inputPortMeta;
PTOperator inputTarget = input.target;
StreamMeta streamMeta = input.logicalStream;
if (!inputTarget.isUnifier()) {
inputPortMeta = getInputPortMeta(inputTarget.getOperatorMeta(), streamMeta);
} else {
PTOperator destTarget = getIdentifyingOperator(inputTarget);
inputPortMeta = getInputPortMeta(destTarget.getOperatorMeta(), streamMeta);
}
return inputPortMeta;
}
public static PTOperator getIdentifyingOperator(PTOperator operator)
{
while ((operator != null) && operator.isUnifier()) {
PTOperator idOperator = null;
List<PTOperator.PTOutput> outputs = operator.getOutputs();
// Since it is a unifier, getting the downstream operator it is connected to which is on the first port
if (outputs.size() > 0) {
List<PTOperator.PTInput> sinks = outputs.get(0).sinks;
if (sinks.size() > 0) {
PTOperator.PTInput sink = sinks.get(0);
idOperator = sink.target;
}
}
operator = idOperator;
}
return operator;
}
public static StreamCodec<?> getStreamCodec(InputPortMeta inputPortMeta)
{
if (inputPortMeta != null) {
StreamCodec<?> codec = inputPortMeta.getValue(PortContext.STREAM_CODEC);
if (codec == null) {
// it cannot be this object that gets returned. Depending on this value is dangerous
codec = inputPortMeta.getPortObject().getStreamCodec();
if (codec != null) {
// don't create codec multiple times - it will assign a new identifier
inputPortMeta.getAttributes().put(PortContext.STREAM_CODEC, codec);
}
}
return codec;
}
return null;
}
/**
* Create deploy info for operator.
* <p>
*
* @return {@link com.datatorrent.stram.api.OperatorDeployInfo}
*/
private OperatorDeployInfo createOperatorDeployInfo(PTOperator oper)
{
OperatorDeployInfo ndi;
if (oper.isUnifier()) {
UnifierDeployInfo udi = new UnifierDeployInfo(); /* the constructor auto sets the type */
try {
udi.operatorAttributes = oper.getUnifiedOperatorMeta().getAttributes().clone();
} catch (CloneNotSupportedException ex) {
throw new RuntimeException("Cannot clone unifier attributes", ex);
}
ndi = udi;
} else {
ndi = new OperatorDeployInfo();
Operator operator = oper.getOperatorMeta().getOperator();
if (operator instanceof InputOperator) {
ndi.type = OperatorType.INPUT;
if (!oper.getInputs().isEmpty()) {
//If there are no input ports then it has to be an input operator. But if there are input ports then
//we check if any input port is connected which would make it a Generic operator.
for (PTOperator.PTInput ptInput : oper.getInputs()) {
if (ptInput.logicalStream != null && ptInput.logicalStream.getSource() != null) {
ndi.type = OperatorType.GENERIC;
break;
}
}
}
} else {
ndi.type = OperatorType.GENERIC;
}
}
Checkpoint checkpoint = oper.getRecoveryCheckpoint();
ProcessingMode pm = oper.getOperatorMeta().getValue(OperatorContext.PROCESSING_MODE);
if (pm == ProcessingMode.AT_MOST_ONCE || pm == ProcessingMode.EXACTLY_ONCE) {
// TODO: following should be handled in the container at deploy time
// for exactly once container should also purge previous checkpoint
// whenever new checkpoint is written.
StorageAgent agent = oper.getOperatorMeta().getAttributes().get(OperatorContext.STORAGE_AGENT);
if (agent == null) {
agent = initCtx.getValue(OperatorContext.STORAGE_AGENT);
}
// pick checkpoint most recently written
try {
long[] windowIds = agent.getWindowIds(oper.getId());
long checkpointId = Stateless.WINDOW_ID;
for (long windowId : windowIds) {
if (windowId > checkpointId) {
checkpointId = windowId;
}
}
if (checkpoint == null || checkpoint.windowId != checkpointId) {
checkpoint = new Checkpoint(checkpointId, 0, 0);
}
} catch (Exception e) {
throw new RuntimeException("Failed to determine checkpoint window id " + oper, e);
}
}
LOG.debug("{} recovery checkpoint {}", oper, checkpoint);
ndi.checkpoint = checkpoint;
ndi.name = oper.getOperatorMeta().getName();
ndi.id = oper.getId();
try {
// clone map before modifying it
ndi.contextAttributes = oper.getOperatorMeta().getAttributes().clone();
} catch (CloneNotSupportedException ex) {
throw new RuntimeException("Cannot clone operator attributes", ex);
}
if (oper.isOperatorStateLess()) {
ndi.contextAttributes.put(OperatorContext.STATELESS, true);
}
return ndi;
}
public ContainerInfo getContainerInfo()
{
ContainerInfo ci = new ContainerInfo();
ci.id = container.getExternalId();
ci.host = container.host;
ci.state = container.getState().name();
ci.jvmName = this.jvmName;
ci.numOperators = container.getOperators().size();
ci.operators = new TreeMap<>();
for (PTOperator ptOperator : container.getOperators()) {
ci.operators.put(ptOperator.getId(), ptOperator.getName());
}
ci.memoryMBAllocated = container.getAllocatedMemoryMB();
ci.lastHeartbeat = lastHeartbeatMillis;
ci.memoryMBFree = this.memoryMBFree;
ci.gcCollectionCount = this.gcCollectionCount;
ci.gcCollectionTime = this.gcCollectionTime;
ci.startedTime = container.getStartedTime();
ci.finishedTime = container.getFinishedTime();
if (this.container.nodeHttpAddress != null) {
YarnConfiguration conf = new YarnConfiguration();
ci.containerLogsUrl = ConfigUtils
.getSchemePrefix(conf) + this.container.nodeHttpAddress + "/node/containerlogs/" + ci.id + "/" + System
.getenv(ApplicationConstants.Environment.USER.toString());
ci.rawContainerLogsUrl = ConfigUtils
.getRawContainerLogsUrl(conf, container.nodeHttpAddress, container.getPlan().getLogicalPlan().getAttributes()
.get(LogicalPlan.APPLICATION_ID), ci.id);
}
return ci;
}
public String getStackTrace()
{
stackTraceRequested = true;
return containerStackTrace;
}
public volatile String containerStackTrace = null;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.