gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sling.resourceresolver.impl.observation;
import static org.apache.sling.api.resource.observation.ResourceChangeListener.CHANGES;
import static org.apache.sling.api.resource.observation.ResourceChangeListener.PATHS;
import static org.apache.sling.commons.osgi.PropertiesUtil.toStringArray;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.TreeSet;
import org.apache.sling.api.resource.ResourceUtil;
import org.apache.sling.api.resource.observation.ExternalResourceChangeListener;
import org.apache.sling.api.resource.observation.ResourceChange.ChangeType;
import org.apache.sling.api.resource.observation.ResourceChangeListener;
import org.apache.sling.api.resource.path.Path;
import org.apache.sling.api.resource.path.PathSet;
import org.apache.sling.commons.osgi.PropertiesUtil;
import org.osgi.framework.ServiceReference;
/**
* Information about a resource change listener.
*/
public class ResourceChangeListenerInfo implements Comparable<ResourceChangeListenerInfo> {
private static final Set<ChangeType> DEFAULT_CHANGE_RESOURCE_TYPES = EnumSet.of(ChangeType.ADDED, ChangeType.REMOVED, ChangeType.CHANGED);
private static final Set<ChangeType> DEFAULT_CHANGE_PROVIDER_TYPES = EnumSet.of(ChangeType.PROVIDER_ADDED, ChangeType.PROVIDER_REMOVED);
private final PathSet paths;
private final Set<ChangeType> resourceChangeTypes;
private final Set<ChangeType> providerChangeTypes;
private final Set<String> propertyNamesHint;
private final boolean valid;
private volatile boolean external = false;
private volatile ResourceChangeListener listener;
public ResourceChangeListenerInfo(final ServiceReference<ResourceChangeListener> ref, final String[] searchPaths) {
boolean configValid = true;
final Set<String> pathsSet = new HashSet<String>();
final String paths[] = toStringArray(ref.getProperty(PATHS), null);
if ( paths != null ) {
for(final String p : paths) {
boolean isGlobPattern = false;
String normalisedPath = ResourceUtil.normalize(p);
if (p.startsWith(Path.GLOB_PREFIX)) {
isGlobPattern = true;
normalisedPath = ResourceUtil.normalize(p.substring(Path.GLOB_PREFIX.length()));
}
if (!".".equals(p) && normalisedPath.isEmpty()) {
configValid = false;
} else if ( normalisedPath.startsWith("/") && !isGlobPattern ) {
pathsSet.add(normalisedPath);
} else if (normalisedPath.startsWith("/") && isGlobPattern) {
pathsSet.add(Path.GLOB_PREFIX + normalisedPath);
} else {
for(final String sp : searchPaths) {
if ( p.equals(".") ) {
pathsSet.add(sp);
} else {
if (isGlobPattern) {
pathsSet.add(Path.GLOB_PREFIX + ResourceUtil.normalize(sp + normalisedPath));
} else {
pathsSet.add(ResourceUtil.normalize(sp + normalisedPath));
}
}
}
}
}
}
if ( pathsSet.isEmpty() ) {
configValid = false;
} else {
// check for sub paths
final Iterator<String> iter = pathsSet.iterator();
while ( iter.hasNext() ) {
final String path = iter.next();
boolean remove = false;
for(final String p : pathsSet) {
if ( p.length() > path.length() && path.startsWith(p + "/") ) {
remove = true;
break;
}
}
if ( remove ) {
iter.remove();
}
}
}
this.paths = PathSet.fromStringCollection(pathsSet);
if (ref.getProperty(CHANGES) != null ) {
final Set<ChangeType> rts = new HashSet<ChangeType>();
final Set<ChangeType> pts = new HashSet<ChangeType>();
for (final String changeName : toStringArray(ref.getProperty(CHANGES))) {
try {
final ChangeType ct = ChangeType.valueOf(changeName);
if ( ct.ordinal() < ChangeType.PROVIDER_ADDED.ordinal()) {
rts.add(ct);
} else {
pts.add(ct);
}
} catch ( final IllegalArgumentException iae) {
configValid = false;
}
}
if ( rts.isEmpty() ) {
this.resourceChangeTypes = Collections.emptySet();
} else if ( rts.size() == 3 ) {
this.resourceChangeTypes = DEFAULT_CHANGE_RESOURCE_TYPES;
} else {
this.resourceChangeTypes = Collections.unmodifiableSet(rts);
}
if ( pts.isEmpty() ) {
this.providerChangeTypes = Collections.emptySet();
} else if ( pts.size() == 2 ) {
this.providerChangeTypes = DEFAULT_CHANGE_PROVIDER_TYPES;
} else {
this.providerChangeTypes = Collections.unmodifiableSet(pts);
}
} else {
// default is added, changed, removed for resources and
// added and removed for providers
this.resourceChangeTypes = DEFAULT_CHANGE_RESOURCE_TYPES;
this.providerChangeTypes = DEFAULT_CHANGE_PROVIDER_TYPES;
}
if ( ref.getProperty(ResourceChangeListener.PROPERTY_NAMES_HINT) != null ) {
this.propertyNamesHint = new HashSet<String>();
for(final String val : PropertiesUtil.toStringArray(ref.getProperty(ResourceChangeListener.PROPERTY_NAMES_HINT)) ) {
this.propertyNamesHint.add(val);
}
} else {
this.propertyNamesHint = null;
}
this.valid = configValid;
}
public boolean isValid() {
return this.valid;
}
public Set<ChangeType> getResourceChangeTypes() {
return this.resourceChangeTypes;
}
public Set<ChangeType> getProviderChangeTypes() {
return this.providerChangeTypes;
}
public PathSet getPaths() {
return this.paths;
}
/**
* Return a set of property name hints
* @return The set of names or {@code null}.
*/
public Set<String> getPropertyNamesHint() {
return this.propertyNamesHint;
}
public boolean isExternal() {
return this.external;
}
public ResourceChangeListener getListener() {
return listener;
}
public void setListener(final ResourceChangeListener listener) {
this.listener = listener;
this.external = listener instanceof ExternalResourceChangeListener;
}
private int compareSet(final Set<String> t, final Set<String> o) {
if ( t == null && o == null ) {
return 0;
}
if ( t == null ) {
return -1;
}
if ( o == null ) {
return 1;
}
final Set<String> tPaths = new TreeSet<>(t);
final Set<String> oPaths = new TreeSet<>(o);
int result = tPaths.size() - oPaths.size();
if ( result == 0 ) {
final Iterator<String> tPathsIter = tPaths.iterator();
final Iterator<String> oPathsIter = oPaths.iterator();
while ( result == 0 && tPathsIter.hasNext() ) {
result = tPathsIter.next().compareTo(oPathsIter.next());
}
}
return result;
}
private int compareChangeTypes(final Set<ChangeType> t, final Set<ChangeType> o) {
int result = t.size() - o.size();
if ( result == 0 ) {
final Iterator<ChangeType> tIter = t.iterator();
final Iterator<ChangeType> oIter = o.iterator();
while ( result == 0 && tIter.hasNext() ) {
result = tIter.next().compareTo(oIter.next());
}
}
return result;
}
@Override
public int compareTo(final ResourceChangeListenerInfo o) {
// paths first
int result = compareSet(this.paths.toStringSet(), o.paths.toStringSet());
if ( result == 0 ) {
// hints
result = compareSet(this.propertyNamesHint, o.propertyNamesHint);
if ( result == 0 ) {
// external next
result = Boolean.valueOf(this.external).compareTo(o.external);
if ( result == 0 ) {
result = compareChangeTypes(this.resourceChangeTypes, o.resourceChangeTypes);
if ( result == 0 ) {
result = compareChangeTypes(this.providerChangeTypes, o.providerChangeTypes);
}
}
}
}
return result;
}
@Override
public String toString() {
return "ResourceChangeListenerInfo [paths=" + paths + ", resourceChangeTypes=" + resourceChangeTypes
+ ", providerChangeTypes=" + providerChangeTypes + ", propertyNamesHint=" + propertyNamesHint
+ ", valid=" + valid + ", external=" + external + ", listener=" + listener + "]";
}
}
| |
/*
*
* Copyright 2016 Robert Winkler, Lucas Lech, Mahmoud Romeh
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package io.github.resilience4j.bulkhead;
import io.github.resilience4j.core.ClassUtils;
import io.github.resilience4j.core.ContextPropagator;
import io.github.resilience4j.core.lang.Nullable;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import static java.util.Arrays.stream;
import static java.util.stream.Collectors.toList;
/**
* A {@link ThreadPoolBulkheadConfig} configures a {@link Bulkhead}
*/
public class ThreadPoolBulkheadConfig {
public static final int DEFAULT_QUEUE_CAPACITY = 100;
public static final Duration DEFAULT_KEEP_ALIVE_DURATION = Duration.ofMillis(20);
public static final int DEFAULT_CORE_THREAD_POOL_SIZE =
Runtime.getRuntime().availableProcessors() > 1 ? Runtime.getRuntime().availableProcessors()
- 1 : 1;
public static final int DEFAULT_MAX_THREAD_POOL_SIZE = Runtime.getRuntime()
.availableProcessors();
public static final boolean DEFAULT_WRITABLE_STACK_TRACE_ENABLED = true;
private int maxThreadPoolSize = DEFAULT_MAX_THREAD_POOL_SIZE;
private int coreThreadPoolSize = DEFAULT_CORE_THREAD_POOL_SIZE;
private int queueCapacity = DEFAULT_QUEUE_CAPACITY;
private Duration keepAliveDuration = DEFAULT_KEEP_ALIVE_DURATION;
private boolean writableStackTraceEnabled = DEFAULT_WRITABLE_STACK_TRACE_ENABLED;
private List<ContextPropagator> contextPropagators = new ArrayList<>();
private RejectedExecutionHandler rejectedExecutionHandler = new ThreadPoolExecutor.AbortPolicy();
private ThreadPoolBulkheadConfig() {
}
/**
* Returns a builder to create a custom ThreadPoolBulkheadConfig.
*
* @return a {@link Builder}
*/
public static Builder custom() {
return new Builder();
}
/**
* Returns a builder to create a custom ThreadPoolBulkheadConfig.
*
* @return a {@link Builder}
*/
public static Builder from(ThreadPoolBulkheadConfig threadPoolBulkheadConfig) {
return new Builder(threadPoolBulkheadConfig);
}
/**
* Creates a default Bulkhead configuration.
*
* @return a default Bulkhead configuration.
*/
public static ThreadPoolBulkheadConfig ofDefaults() {
return new Builder().build();
}
public Duration getKeepAliveDuration() {
return keepAliveDuration;
}
public int getQueueCapacity() {
return queueCapacity;
}
public int getMaxThreadPoolSize() {
return maxThreadPoolSize;
}
public int getCoreThreadPoolSize() { return coreThreadPoolSize; }
public boolean isWritableStackTraceEnabled() {
return writableStackTraceEnabled;
}
public List<ContextPropagator> getContextPropagator() {
return contextPropagators;
}
public RejectedExecutionHandler getRejectedExecutionHandler() {
return rejectedExecutionHandler;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("ThreadPoolBulkheadConfig{");
sb.append("maxThreadPoolSize=").append(maxThreadPoolSize);
sb.append(", coreThreadPoolSize=").append(coreThreadPoolSize);
sb.append(", queueCapacity=").append(queueCapacity);
sb.append(", keepAliveDuration=").append(keepAliveDuration);
sb.append(", writableStackTraceEnabled=").append(writableStackTraceEnabled);
sb.append(", contextPropagators=").append(contextPropagators);
sb.append(", rejectExecutionHandle=").append(rejectedExecutionHandler.getClass().getSimpleName());
sb.append('}');
return sb.toString();
}
public static class Builder {
private Class<? extends ContextPropagator>[] contextPropagatorClasses = new Class[0];
private List<? extends ContextPropagator> contextPropagators = new ArrayList<>();
private ThreadPoolBulkheadConfig config;
public Builder(ThreadPoolBulkheadConfig bulkheadConfig) {
this.config = bulkheadConfig;
}
public Builder() {
config = new ThreadPoolBulkheadConfig();
}
/**
* Configures the max thread pool size.
*
* @param maxThreadPoolSize max thread pool size
* @return the BulkheadConfig.Builder
*/
public Builder maxThreadPoolSize(int maxThreadPoolSize) {
if (maxThreadPoolSize < 1) {
throw new IllegalArgumentException(
"maxThreadPoolSize must be a positive integer value >= 1");
}
config.maxThreadPoolSize = maxThreadPoolSize;
return this;
}
/**
* Configures the core thread pool size.
*
* @param coreThreadPoolSize core thread pool size
* @return the BulkheadConfig.Builder
*/
public Builder coreThreadPoolSize(int coreThreadPoolSize) {
if (coreThreadPoolSize < 1) {
throw new IllegalArgumentException(
"coreThreadPoolSize must be a positive integer value >= 1");
}
config.coreThreadPoolSize = coreThreadPoolSize;
return this;
}
/**
* Configures the context propagator class.
*
* @return the BulkheadConfig.Builder
*/
public final Builder contextPropagator(
@Nullable Class<? extends ContextPropagator>... contextPropagatorClasses) {
this.contextPropagatorClasses = contextPropagatorClasses != null
? contextPropagatorClasses
: new Class[0];
return this;
}
public final Builder contextPropagator(ContextPropagator... contextPropagators) {
this.contextPropagators = contextPropagators != null ?
Arrays.stream(contextPropagators).collect(toList()) :
new ArrayList<>();
return this;
}
/**
* Configures the capacity of the queue.
* Use {@link SynchronousQueue} when {@code queueCapacity == 0} or {@link ArrayBlockingQueue} when {@code queueCapacity > 0}.
*
* @param queueCapacity max concurrent calls
* @return the BulkheadConfig.Builder
*/
public Builder queueCapacity(int queueCapacity) {
if (queueCapacity < 0) {
throw new IllegalArgumentException(
"queueCapacity must be a positive integer value >= 0");
}
config.queueCapacity = queueCapacity;
return this;
}
/**
* When the number of threads is greater than the core, this is the maximum time duration
* that excess idle threads will wait for new tasks before terminating.
*
* @param keepAliveDuration maximum wait duration for bulkhead thread pool idle thread
* @return the BulkheadConfig.Builder
*/
public Builder keepAliveDuration(Duration keepAliveDuration) {
if (keepAliveDuration.toMillis() < 0) {
throw new IllegalArgumentException(
"keepAliveDuration must be a positive integer value >= 0");
}
config.keepAliveDuration = keepAliveDuration;
return this;
}
/**
* Enables writable stack traces. When set to false, {@link Exception#getStackTrace()}
* returns a zero length array. This may be used to reduce log spam when the circuit breaker
* is open as the cause of the exceptions is already known (the circuit breaker is
* short-circuiting calls).
*
* @param writableStackTraceEnabled flag to control if stack trace is writable
* @return the BulkheadConfig.Builder
*/
public Builder writableStackTraceEnabled(boolean writableStackTraceEnabled) {
config.writableStackTraceEnabled = writableStackTraceEnabled;
return this;
}
/**
* Replaces the default {@link java.util.concurrent.ThreadPoolExecutor.AbortPolicy}
* with the {@link RejectedExecutionHandler} provided.
*
* @param rejectedExecutionHandler handler to use for rejection of execution.
* @return the BulkheadConfig.Builder
*/
public Builder rejectedExecutionHandler(RejectedExecutionHandler rejectedExecutionHandler) {
Objects.requireNonNull(rejectedExecutionHandler);
config.rejectedExecutionHandler = rejectedExecutionHandler;
return this;
}
/**
* Builds a BulkheadConfig
*
* @return the BulkheadConfig
*/
public ThreadPoolBulkheadConfig build() {
if (config.maxThreadPoolSize < config.coreThreadPoolSize) {
throw new IllegalArgumentException(
"maxThreadPoolSize must be a greater than or equals to coreThreadPoolSize");
}
if (contextPropagatorClasses.length > 0) {
config.contextPropagators.addAll(stream(contextPropagatorClasses)
.map(ClassUtils::instantiateClassDefConstructor)
.collect(toList()));
}
//setting bean of type context propagator overrides the class type.
if (!contextPropagators.isEmpty()){
config.contextPropagators.addAll(this.contextPropagators);
}
return config;
}
}
}
| |
/*
* Copyright (C) 2014 Thomas Broyer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package bullet.impl;
import static javax.lang.model.element.Modifier.*;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.Set;
import javax.annotation.Generated;
import javax.annotation.processing.ProcessingEnvironment;
import javax.lang.model.element.Element;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.util.ElementFilter;
import javax.tools.Diagnostic;
import com.google.auto.common.BasicAnnotationProcessor;
import com.google.auto.common.MoreElements;
import com.google.auto.common.MoreTypes;
import com.google.auto.common.Visibility;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.SetMultimap;
import com.google.common.collect.Sets;
import com.squareup.javapoet.AnnotationSpec;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.JavaFile;
import com.squareup.javapoet.MethodSpec;
import com.squareup.javapoet.ParameterizedTypeName;
import com.squareup.javapoet.TypeSpec;
import com.squareup.javapoet.TypeVariableName;
import bullet.impl.ComponentMethodDescriptor.ComponentMethodKind;
import dagger.Component;
import dagger.Subcomponent;
class ComponentProcessingStep implements BasicAnnotationProcessor.ProcessingStep {
private final ProcessingEnvironment processingEnv;
ComponentProcessingStep(ProcessingEnvironment processingEnv) {
this.processingEnv = processingEnv;
}
@Override
public Set<? extends Class<? extends Annotation>> annotations() {
return ImmutableSet.of(Component.class, Subcomponent.class);
}
@Override
public void process(SetMultimap<Class<? extends Annotation>, Element> elementsByAnnotation) {
Set<Element> componentElements = Sets.union(
elementsByAnnotation.get(Component.class),
elementsByAnnotation.get(Subcomponent.class));
for (Element element : componentElements) {
TypeElement componentElement = MoreElements.asType(element);
generateObjectGraph(componentElement);
}
}
private void generateObjectGraph(TypeElement element) {
DeclaredType component = MoreTypes.asDeclared(element.asType());
ArrayList<ComponentMethodDescriptor> provisionMethods = new ArrayList<>();
MembersInjectionMethodsBuilder membersInjectionMethods = new MembersInjectionMethodsBuilder(processingEnv.getTypeUtils());
PackageElement packageElement = processingEnv.getElementUtils().getPackageOf(element);
TypeElement objectElement = processingEnv.getElementUtils().getTypeElement(Object.class.getCanonicalName());
for (ExecutableElement method : ElementFilter.methodsIn(processingEnv.getElementUtils().getAllMembers(element))) {
if (method.getEnclosingElement().equals(objectElement)) {
continue;
}
if (!isVisibleFrom(method, packageElement)) {
continue;
}
Optional<ComponentMethodDescriptor> optMethodDescriptor =
ComponentMethodDescriptor.forComponentMethod(processingEnv.getTypeUtils(), component, method);
if (!optMethodDescriptor.isPresent()) {
continue;
}
ComponentMethodDescriptor methodDescriptor = optMethodDescriptor.get();
if (!isVisibleFrom(processingEnv.getTypeUtils().asElement(methodDescriptor.type()), packageElement)) {
continue;
}
switch (methodDescriptor.kind()) {
case SIMPLE_PROVISION:
case PROVIDER_OR_LAZY:
provisionMethods.add(methodDescriptor);
break;
case SIMPLE_MEMBERS_INJECTION:
case MEMBERS_INJECTOR:
membersInjectionMethods.add(methodDescriptor);
break;
default:
throw new AssertionError();
}
}
final ClassName elementName = ClassName.get(element);
final TypeSpec.Builder classBuilder = TypeSpec.classBuilder("Bullet" + Joiner.on("_").join(elementName.simpleNames()))
.addOriginatingElement(element)
.addAnnotation(AnnotationSpec.builder(Generated.class)
.addMember("value", "$S", ComponentProcessor.class.getCanonicalName())
.build())
.addModifiers(PUBLIC, FINAL)
.addSuperinterface(ClassName.get("bullet", "ObjectGraph"))
.addField(elementName, "component", PRIVATE, FINAL)
.addMethod(MethodSpec.constructorBuilder()
.addModifiers(PUBLIC)
.addParameter(elementName, "component", FINAL)
.addCode("this.component = component;\n")
.build());
final TypeVariableName t = TypeVariableName.get("T");
final MethodSpec.Builder getBuilder = MethodSpec.methodBuilder("get")
.addAnnotation(Override.class)
.addModifiers(PUBLIC)
.addTypeVariable(t)
.returns(t)
.addParameter(ParameterizedTypeName.get(ClassName.get(Class.class), t), "type", FINAL);
for (ComponentMethodDescriptor method : provisionMethods) {
getBuilder.addCode(
"if (type == $T.class) {\n$>" +
"return type.cast(this.component.$N()$L);\n" +
"$<}\n",
method.type(), method.name(), method.kind() == ComponentMethodKind.PROVIDER_OR_LAZY ? ".get()" : "");
}
// TODO: exception message
getBuilder.addCode("throw new $T();\n", IllegalArgumentException.class);
classBuilder.addMethod(getBuilder.build());
final MethodSpec.Builder injectWriter = MethodSpec.methodBuilder("inject")
.addAnnotation(Override.class)
.addModifiers(PUBLIC)
.addTypeVariable(t)
.returns(t)
.addParameter(t, "instance", FINAL);
for (ComponentMethodDescriptor method : membersInjectionMethods.build()) {
injectWriter.addCode(
"if (instance instanceof $T) {\n$>" +
"this.component.$N$L(($T) instance);\n" +
"return instance;\n" +
"$<}\n",
method.type(), method.name(), method.kind() == ComponentMethodKind.MEMBERS_INJECTOR ? "().injectMembers" : "", method.type());
}
// TODO: exception message
injectWriter.addCode("throw new $T();\n", IllegalArgumentException.class);
classBuilder.addMethod(injectWriter.build());
try {
JavaFile.builder(elementName.packageName(), classBuilder.build())
.build()
.writeTo(processingEnv.getFiler());
} catch (IOException ioe) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
pw.println("Error generating source file for type " + classBuilder.build().name);
ioe.printStackTrace(pw);
pw.close();
processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, sw.toString());
}
}
private boolean isVisibleFrom(Element target, PackageElement from) {
switch (Visibility.effectiveVisibilityOfElement(target)) {
case PUBLIC:
return true;
case PROTECTED:
case DEFAULT:
return MoreElements.getPackage(target).equals(from);
case PRIVATE:
return false;
default:
throw new AssertionError();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.plugin.inputformat.json;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.collect.Sets;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.pinot.spi.data.readers.AbstractRecordExtractorTest;
import org.apache.pinot.spi.data.readers.RecordReader;
import org.apache.pinot.spi.utils.JsonUtils;
/**
* Tests the {@link JSONRecordExtractor}
*/
public class JSONRecordExtractorTest extends AbstractRecordExtractorTest {
private final File _dataFile = new File(_tempDir, "events.json");
private static final String NULL_FIELD = "myNull";
private static final String INT_FIELD = "myInt";
private static final String LONG_FIELD = "myLong";
private static final String DOUBLE_FIELD = "myDouble";
private static final String STRING_FIELD = "myString";
private static final String INT_ARRAY_FIELD = "myIntArray";
private static final String DOUBLE_ARRAY_FIELD = "myDoubleArray";
private static final String STRING_ARRAY_FIELD = "myStringArray";
private static final String COMPLEX_ARRAY_1_FIELD = "myComplexArray1";
private static final String COMPLEX_ARRAY_2_FIELD = "myComplexArray2";
private static final String MAP_1_FIELD = "myMap1";
private static final String MAP_2_FIELD = "myMap2";
@Override
protected List<Map<String, Object>> getInputRecords() {
return Arrays.asList(createRecord1(), createRecord2());
}
@Override
protected Set<String> getSourceFields() {
return Sets.newHashSet(NULL_FIELD, INT_FIELD, LONG_FIELD, DOUBLE_FIELD, STRING_FIELD, INT_ARRAY_FIELD,
DOUBLE_ARRAY_FIELD, STRING_ARRAY_FIELD, COMPLEX_ARRAY_1_FIELD, COMPLEX_ARRAY_2_FIELD, MAP_1_FIELD,
MAP_2_FIELD);
}
/**
* Create a JSONRecordReader
*/
@Override
protected RecordReader createRecordReader(Set<String> fieldsToRead)
throws IOException {
JSONRecordReader recordReader = new JSONRecordReader();
recordReader.init(_dataFile, fieldsToRead, null);
return recordReader;
}
/**
* Create a JSON input file using the input records
*/
@Override
protected void createInputFile()
throws IOException {
try (FileWriter fileWriter = new FileWriter(_dataFile)) {
for (Map<String, Object> inputRecord : _inputRecords) {
ObjectNode jsonRecord = JsonUtils.newObjectNode();
for (String key : inputRecord.keySet()) {
jsonRecord.set(key, JsonUtils.objectToJsonNode(inputRecord.get(key)));
}
fileWriter.write(jsonRecord.toString());
}
}
}
private Map<String, Object> createRecord1() {
Map<String, Object> record = new HashMap<>();
record.put(NULL_FIELD, null);
record.put(INT_FIELD, 10);
record.put(LONG_FIELD, 1588469340000L);
record.put(DOUBLE_FIELD, 10.2);
record.put(STRING_FIELD, "foo");
record.put(INT_ARRAY_FIELD, Arrays.asList(10, 20, 30));
record.put(DOUBLE_ARRAY_FIELD, Arrays.asList(10.2, 12.1, 1.1));
record.put(STRING_ARRAY_FIELD, Arrays.asList("foo", "bar"));
Map<String, Object> map1 = new HashMap<>();
map1.put("one", 1);
map1.put("two", "too");
Map<String, Object> map2 = new HashMap<>();
map2.put("one", 11);
map2.put("two", "roo");
record.put(COMPLEX_ARRAY_1_FIELD, Arrays.asList(map1, map2));
Map<String, Object> map3 = new HashMap<>();
map3.put("one", 1);
Map<String, Object> map31 = new HashMap<>();
map31.put("sub1", 1.1);
map31.put("sub2", 1.2);
map3.put("two", map31);
map3.put("three", Arrays.asList("a", "b"));
Map<String, Object> map4 = new HashMap<>();
map4.put("one", 11);
Map<String, Object> map41 = new HashMap<>();
map41.put("sub1", 11.1);
map41.put("sub2", 11.2);
map4.put("two", map41);
map4.put("three", Arrays.asList("aa", "bb"));
record.put(COMPLEX_ARRAY_2_FIELD, Arrays.asList(map3, map4));
Map<String, Object> map5 = new HashMap<>();
map5.put("k1", "foo");
map5.put("k2", "bar");
record.put(MAP_1_FIELD, map5);
Map<String, Object> map6 = new HashMap<>();
Map<String, Object> map61 = new HashMap<>();
map61.put("sub1", 10);
map61.put("sub2", 1.0);
map6.put("k3", map61);
map6.put("k4", "baz");
map6.put("k5", Arrays.asList(1, 2, 3));
record.put(MAP_2_FIELD, map6);
return record;
}
private Map<String, Object> createRecord2() {
Map<String, Object> record = new HashMap<>();
record.put(NULL_FIELD, null);
record.put(INT_FIELD, 20);
record.put(LONG_FIELD, 998732130000L);
record.put(DOUBLE_FIELD, 11.2);
record.put(STRING_FIELD, "hello");
record.put(INT_ARRAY_FIELD, Arrays.asList(100, 200, 300));
record.put(DOUBLE_ARRAY_FIELD, Arrays.asList(20.1, 30.2, 40.3));
record.put(STRING_ARRAY_FIELD, Arrays.asList("hello", "world!"));
Map<String, Object> map1 = new HashMap<>();
map1.put("two", 2);
map1.put("three", "tree");
Map<String, Object> map2 = new HashMap<>();
map2.put("two", 22);
map2.put("three", "free");
record.put(COMPLEX_ARRAY_1_FIELD, Arrays.asList(map1, map2));
Map<String, Object> map3 = new HashMap<>();
map3.put("two", 2);
Map<String, Object> map31 = new HashMap<>();
map31.put("sub1", 1.2);
map31.put("sub2", 2.3);
map3.put("two", map31);
map3.put("three", Arrays.asList("b", "c"));
Map<String, Object> map4 = new HashMap<>();
map4.put("one", 12);
Map<String, Object> map41 = new HashMap<>();
map41.put("sub1", 12.2);
map41.put("sub2", 10.1);
map4.put("two", map41);
map4.put("three", Arrays.asList("cc", "cc"));
record.put(COMPLEX_ARRAY_2_FIELD, Arrays.asList(map3, map4));
Map<String, Object> map5 = new HashMap<>();
map5.put("k1", "hello");
map5.put("k2", "world");
record.put(MAP_1_FIELD, map5);
Map<String, Object> map6 = new HashMap<>();
Map<String, Object> map61 = new HashMap<>();
map61.put("sub1", 20);
map61.put("sub2", 2.0);
map6.put("k3", map61);
map6.put("k4", "abc");
map6.put("k5", Arrays.asList(3, 2, 1));
record.put(MAP_2_FIELD, map6);
return record;
}
}
| |
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.settings;
import android.app.Activity;
import android.app.AlarmManager;
import android.app.DatePickerDialog;
import android.app.Dialog;
import android.app.TimePickerDialog;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.SharedPreferences.OnSharedPreferenceChangeListener;
import android.os.Bundle;
import android.os.SystemClock;
import android.preference.CheckBoxPreference;
import android.preference.ListPreference;
import android.preference.Preference;
import android.preference.PreferenceScreen;
import android.provider.Settings;
import android.provider.Settings.SettingNotFoundException;
import android.text.format.DateFormat;
import android.widget.DatePicker;
import android.widget.TimePicker;
import java.util.Calendar;
import java.util.Date;
import java.util.TimeZone;
public class DateTimeSettings extends SettingsPreferenceFragment
implements OnSharedPreferenceChangeListener,
TimePickerDialog.OnTimeSetListener, DatePickerDialog.OnDateSetListener {
private static final String HOURS_12 = "12";
private static final String HOURS_24 = "24";
// Used for showing the current date format, which looks like "12/31/2010", "2010/12/13", etc.
// The date value is dummy (independent of actual date).
private Calendar mDummyDate;
private static final String KEY_DATE_FORMAT = "date_format";
private static final String KEY_AUTO_TIME = "auto_time";
private static final String KEY_AUTO_TIME_ZONE = "auto_zone";
private static final int DIALOG_DATEPICKER = 0;
private static final int DIALOG_TIMEPICKER = 1;
// have we been launched from the setup wizard?
protected static final String EXTRA_IS_FIRST_RUN = "firstRun";
private CheckBoxPreference mAutoTimePref;
private Preference mTimePref;
private Preference mTime24Pref;
private CheckBoxPreference mAutoTimeZonePref;
private Preference mTimeZone;
private Preference mDatePref;
private ListPreference mDateFormat;
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
addPreferencesFromResource(R.xml.date_time_prefs);
initUI();
}
private void initUI() {
boolean autoTimeEnabled = getAutoState(Settings.Global.AUTO_TIME);
boolean autoTimeZoneEnabled = getAutoState(Settings.Global.AUTO_TIME_ZONE);
Intent intent = getActivity().getIntent();
boolean isFirstRun = intent.getBooleanExtra(EXTRA_IS_FIRST_RUN, false);
mDummyDate = Calendar.getInstance();
mAutoTimePref = (CheckBoxPreference) findPreference(KEY_AUTO_TIME);
mAutoTimePref.setChecked(autoTimeEnabled);
mAutoTimeZonePref = (CheckBoxPreference) findPreference(KEY_AUTO_TIME_ZONE);
// Override auto-timezone if it's a wifi-only device or if we're still in setup wizard.
// TODO: Remove the wifiOnly test when auto-timezone is implemented based on wifi-location.
if (Utils.isWifiOnly(getActivity()) || isFirstRun) {
getPreferenceScreen().removePreference(mAutoTimeZonePref);
autoTimeZoneEnabled = false;
}
mAutoTimeZonePref.setChecked(autoTimeZoneEnabled);
mTimePref = findPreference("time");
mTime24Pref = findPreference("24 hour");
mTimeZone = findPreference("timezone");
mDatePref = findPreference("date");
mDateFormat = (ListPreference) findPreference(KEY_DATE_FORMAT);
if (isFirstRun) {
getPreferenceScreen().removePreference(mTime24Pref);
getPreferenceScreen().removePreference(mDateFormat);
}
String [] dateFormats = getResources().getStringArray(R.array.date_format_values);
String [] formattedDates = new String[dateFormats.length];
String currentFormat = getDateFormat();
// Initialize if DATE_FORMAT is not set in the system settings
// This can happen after a factory reset (or data wipe)
if (currentFormat == null) {
currentFormat = "";
}
// Prevents duplicated values on date format selector.
mDummyDate.set(mDummyDate.get(Calendar.YEAR), mDummyDate.DECEMBER, 31, 13, 0, 0);
for (int i = 0; i < formattedDates.length; i++) {
String formatted =
DateFormat.getDateFormatForSetting(getActivity(), dateFormats[i])
.format(mDummyDate.getTime());
if (dateFormats[i].length() == 0) {
formattedDates[i] = getResources().
getString(R.string.normal_date_format, formatted);
} else {
formattedDates[i] = formatted;
}
}
mDateFormat.setEntries(formattedDates);
mDateFormat.setEntryValues(R.array.date_format_values);
mDateFormat.setValue(currentFormat);
mTimePref.setEnabled(!autoTimeEnabled);
mDatePref.setEnabled(!autoTimeEnabled);
mTimeZone.setEnabled(!autoTimeZoneEnabled);
}
@Override
public void onResume() {
super.onResume();
getPreferenceScreen().getSharedPreferences()
.registerOnSharedPreferenceChangeListener(this);
((CheckBoxPreference)mTime24Pref).setChecked(is24Hour());
// Register for time ticks and other reasons for time change
IntentFilter filter = new IntentFilter();
filter.addAction(Intent.ACTION_TIME_TICK);
filter.addAction(Intent.ACTION_TIME_CHANGED);
filter.addAction(Intent.ACTION_TIMEZONE_CHANGED);
getActivity().registerReceiver(mIntentReceiver, filter, null, null);
updateTimeAndDateDisplay(getActivity());
}
@Override
public void onPause() {
super.onPause();
getActivity().unregisterReceiver(mIntentReceiver);
getPreferenceScreen().getSharedPreferences()
.unregisterOnSharedPreferenceChangeListener(this);
}
public void updateTimeAndDateDisplay(Context context) {
java.text.DateFormat shortDateFormat = DateFormat.getDateFormat(context);
final Calendar now = Calendar.getInstance();
mDummyDate.setTimeZone(now.getTimeZone());
// We use December 31st because it's unambiguous when demonstrating the date format.
// We use 13:00 so we can demonstrate the 12/24 hour options.
mDummyDate.set(now.get(Calendar.YEAR), 11, 31, 13, 0, 0);
Date dummyDate = mDummyDate.getTime();
mTimePref.setSummary(DateFormat.getTimeFormat(getActivity()).format(now.getTime()));
mTimeZone.setSummary(getTimeZoneText(now.getTimeZone()));
mDatePref.setSummary(shortDateFormat.format(now.getTime()));
mDateFormat.setSummary(shortDateFormat.format(dummyDate));
mTime24Pref.setSummary(DateFormat.getTimeFormat(getActivity()).format(dummyDate));
}
@Override
public void onDateSet(DatePicker view, int year, int month, int day) {
final Activity activity = getActivity();
if (activity != null) {
setDate(activity, year, month, day);
updateTimeAndDateDisplay(activity);
}
}
@Override
public void onTimeSet(TimePicker view, int hourOfDay, int minute) {
final Activity activity = getActivity();
if (activity != null) {
setTime(activity, hourOfDay, minute);
updateTimeAndDateDisplay(activity);
}
// We don't need to call timeUpdated() here because the TIME_CHANGED
// broadcast is sent by the AlarmManager as a side effect of setting the
// SystemClock time.
}
@Override
public void onSharedPreferenceChanged(SharedPreferences preferences, String key) {
if (key.equals(KEY_DATE_FORMAT)) {
String format = preferences.getString(key,
getResources().getString(R.string.default_date_format));
Settings.System.putString(getContentResolver(),
Settings.System.DATE_FORMAT, format);
updateTimeAndDateDisplay(getActivity());
} else if (key.equals(KEY_AUTO_TIME)) {
boolean autoEnabled = preferences.getBoolean(key, true);
Settings.Global.putInt(getContentResolver(), Settings.Global.AUTO_TIME,
autoEnabled ? 1 : 0);
mTimePref.setEnabled(!autoEnabled);
mDatePref.setEnabled(!autoEnabled);
} else if (key.equals(KEY_AUTO_TIME_ZONE)) {
boolean autoZoneEnabled = preferences.getBoolean(key, true);
Settings.Global.putInt(
getContentResolver(), Settings.Global.AUTO_TIME_ZONE, autoZoneEnabled ? 1 : 0);
mTimeZone.setEnabled(!autoZoneEnabled);
}
}
@Override
public Dialog onCreateDialog(int id) {
Dialog d;
switch (id) {
case DIALOG_DATEPICKER: {
final Calendar calendar = Calendar.getInstance();
d = new DatePickerDialog(
getActivity(),
this,
calendar.get(Calendar.YEAR),
calendar.get(Calendar.MONTH),
calendar.get(Calendar.DAY_OF_MONTH));
// The system clock can't represent dates outside this range.
DatePickerDialog datePicker = (DatePickerDialog)d;
Calendar t = Calendar.getInstance();
t.clear();
t.set(1970, Calendar.JANUARY, 1);
datePicker.getDatePicker().setMinDate(t.getTimeInMillis());
t.clear();
t.set(2037, Calendar.DECEMBER, 31);
datePicker.getDatePicker().setMaxDate(t.getTimeInMillis());
break;
}
case DIALOG_TIMEPICKER: {
final Calendar calendar = Calendar.getInstance();
d = new TimePickerDialog(
getActivity(),
this,
calendar.get(Calendar.HOUR_OF_DAY),
calendar.get(Calendar.MINUTE),
DateFormat.is24HourFormat(getActivity()));
break;
}
default:
d = null;
break;
}
return d;
}
/*
@Override
public void onPrepareDialog(int id, Dialog d) {
switch (id) {
case DIALOG_DATEPICKER: {
DatePickerDialog datePicker = (DatePickerDialog)d;
final Calendar calendar = Calendar.getInstance();
datePicker.updateDate(
calendar.get(Calendar.YEAR),
calendar.get(Calendar.MONTH),
calendar.get(Calendar.DAY_OF_MONTH));
break;
}
case DIALOG_TIMEPICKER: {
TimePickerDialog timePicker = (TimePickerDialog)d;
final Calendar calendar = Calendar.getInstance();
timePicker.updateTime(
calendar.get(Calendar.HOUR_OF_DAY),
calendar.get(Calendar.MINUTE));
break;
}
default:
break;
}
}
*/
@Override
public boolean onPreferenceTreeClick(PreferenceScreen preferenceScreen, Preference preference) {
if (preference == mDatePref) {
showDialog(DIALOG_DATEPICKER);
} else if (preference == mTimePref) {
// The 24-hour mode may have changed, so recreate the dialog
removeDialog(DIALOG_TIMEPICKER);
showDialog(DIALOG_TIMEPICKER);
} else if (preference == mTime24Pref) {
set24Hour(((CheckBoxPreference)mTime24Pref).isChecked());
updateTimeAndDateDisplay(getActivity());
timeUpdated();
}
return super.onPreferenceTreeClick(preferenceScreen, preference);
}
@Override
public void onActivityResult(int requestCode, int resultCode,
Intent data) {
updateTimeAndDateDisplay(getActivity());
}
private void timeUpdated() {
Intent timeChanged = new Intent(Intent.ACTION_TIME_CHANGED);
getActivity().sendBroadcast(timeChanged);
}
/* Get & Set values from the system settings */
private boolean is24Hour() {
return DateFormat.is24HourFormat(getActivity());
}
private void set24Hour(boolean is24Hour) {
Settings.System.putString(getContentResolver(),
Settings.System.TIME_12_24,
is24Hour? HOURS_24 : HOURS_12);
}
private String getDateFormat() {
return Settings.System.getString(getContentResolver(),
Settings.System.DATE_FORMAT);
}
private boolean getAutoState(String name) {
try {
return Settings.Global.getInt(getContentResolver(), name) > 0;
} catch (SettingNotFoundException snfe) {
return false;
}
}
/* package */ static void setDate(Context context, int year, int month, int day) {
Calendar c = Calendar.getInstance();
c.set(Calendar.YEAR, year);
c.set(Calendar.MONTH, month);
c.set(Calendar.DAY_OF_MONTH, day);
long when = c.getTimeInMillis();
if (when / 1000 < Integer.MAX_VALUE) {
((AlarmManager) context.getSystemService(Context.ALARM_SERVICE)).setTime(when);
}
}
/* package */ static void setTime(Context context, int hourOfDay, int minute) {
Calendar c = Calendar.getInstance();
c.set(Calendar.HOUR_OF_DAY, hourOfDay);
c.set(Calendar.MINUTE, minute);
c.set(Calendar.SECOND, 0);
c.set(Calendar.MILLISECOND, 0);
long when = c.getTimeInMillis();
if (when / 1000 < Integer.MAX_VALUE) {
((AlarmManager) context.getSystemService(Context.ALARM_SERVICE)).setTime(when);
}
}
/* Helper routines to format timezone */
/* package */ static String getTimeZoneText(TimeZone tz) {
// Similar to new SimpleDateFormat("'GMT'Z, zzzz").format(new Date()), but
// we want "GMT-03:00" rather than "GMT-0300".
Date now = new Date();
return formatOffset(new StringBuilder(), tz, now).
append(", ").
append(tz.getDisplayName(tz.inDaylightTime(now), TimeZone.LONG)).toString();
}
private static StringBuilder formatOffset(StringBuilder sb, TimeZone tz, Date d) {
int off = tz.getOffset(d.getTime()) / 1000 / 60;
sb.append("GMT");
if (off < 0) {
sb.append('-');
off = -off;
} else {
sb.append('+');
}
int hours = off / 60;
int minutes = off % 60;
sb.append((char) ('0' + hours / 10));
sb.append((char) ('0' + hours % 10));
sb.append(':');
sb.append((char) ('0' + minutes / 10));
sb.append((char) ('0' + minutes % 10));
return sb;
}
private BroadcastReceiver mIntentReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
final Activity activity = getActivity();
if (activity != null) {
updateTimeAndDateDisplay(activity);
}
}
};
}
| |
/** Notice of modification as required by the LGPL
* This file was modified by Gemstone Systems Inc. on
* $Date$
**/
//$Id: TcpRingNode.java,v 1.4 2005/08/08 12:45:41 belaban Exp $
package com.gemstone.org.jgroups.protocols.ring;
import com.gemstone.org.jgroups.util.GemFireTracer;
import com.gemstone.org.jgroups.util.ExternalStrings;
import com.gemstone.org.jgroups.Address;
import com.gemstone.org.jgroups.SuspectedException;
import com.gemstone.org.jgroups.TimeoutException;
import com.gemstone.org.jgroups.blocks.GroupRequest;
import com.gemstone.org.jgroups.stack.IpAddress;
import com.gemstone.org.jgroups.stack.RpcProtocol;
import com.gemstone.org.jgroups.util.Util;
import java.io.*;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.Vector;
public class TcpRingNode implements RingNode
{
final ServerSocket tokenReceiver;
Socket previous,next;
final Address thisNode;
Address nextNode;
ObjectInputStream ios;
ObjectOutputStream oos;
final RpcProtocol rpcProtocol;
static/*GemStoneAddition*/ final boolean failedOnTokenLostException = false;
final Object socketMutex = new Object();
protected final GemFireTracer log=GemFireTracer.getLog(this.getClass());
public TcpRingNode(RpcProtocol owner, Address memberAddress)
{
tokenReceiver = Util.createServerSocket(12000);
rpcProtocol = owner;
thisNode = memberAddress;
nextNode = null;
}
public IpAddress getTokenReceiverAddress()
{
return new IpAddress(tokenReceiver.getLocalPort());
}
public Object receiveToken(int timeout) throws TokenLostException
{
RingToken token = null;
Address wasNextNode = nextNode;
try
{
if (previous == null)
{
previous = tokenReceiver.accept();
ios = new ObjectInputStream((previous.getInputStream()));
}
previous.setSoTimeout(timeout);
token = new RingToken();
token.readExternal(ios);
}
catch (InterruptedIOException io)
{
//read was blocked for more than a timeout, assume token lost
throw new TokenLostException(io.getMessage(), io, wasNextNode, TokenLostException.WHILE_RECEIVING);
}
catch (ClassNotFoundException cantHappen)
{
}
catch (IOException ioe)
{
closeSocket(previous);
previous = null;
if (ios != null)
{
try
{
ios.close();
}
catch (IOException ignored)
{
}
}
token = (RingToken) receiveToken(timeout);
}
return token;
}
public Object receiveToken() throws TokenLostException
{
return receiveToken(0);
}
public void passToken(Object token) throws TokenLostException
{
synchronized (socketMutex)
{
try
{
((Externalizable)token).writeExternal(oos);
oos.flush();
oos.reset();
}
catch (IOException e)
{
e.printStackTrace();
//something went wrong with the next neighbour while it was receiving
//token, assume token lost
throw new TokenLostException(e.getMessage(), e, nextNode, TokenLostException.WHILE_SENDING);
}
}
}
public void tokenArrived(Object token)
{
//not needed , callback for udp ring
}
public void reconfigureAll(Vector newMembers)
{
}
public void reconfigure(Vector newMembers)
{
if (isNextNeighbourChanged(newMembers))
{
IpAddress tokenReceiverAddress = null; // GemStoneModification
synchronized (socketMutex)
{
nextNode = getNextNode(newMembers);
if(log.isInfoEnabled()) log.info(ExternalStrings.TcpRingNode_NEXT_NODE__0, nextNode);
try
{
tokenReceiverAddress = (IpAddress) rpcProtocol.callRemoteMethod(nextNode, "getTokenReceiverAddress", GroupRequest.GET_FIRST, 0);
}
catch (TimeoutException tim)
{
if(log.isErrorEnabled()) log.error(ExternalStrings.TcpRingNode_TIMEOUTED_WHILE_DOING_RPC_CALL_GETTOKENRECEIVERADDRESS_0, tim);
tim.printStackTrace();
}
catch (SuspectedException sus)
{
if(log.isErrorEnabled()) log.error(ExternalStrings.TcpRingNode_SUSPECTED_NODE_WHILE_DOING_RPC_CALL_GETTOKENRECEIVERADDRESS_0, sus);
sus.printStackTrace();
}
try
{
closeSocket(next);
next = new Socket(tokenReceiverAddress.getIpAddress(), tokenReceiverAddress.getPort());
next.setTcpNoDelay(true);
oos = new ObjectOutputStream(next.getOutputStream());
}
catch (IOException ioe)
{
if(log.isErrorEnabled()) log.error(ExternalStrings.TcpRingNode_COULD_NOT_CONNECT_TO_NEXT_NODE__0, ioe);
ioe.printStackTrace();
}
}
}
}
private void closeSocket(Socket socket)
{
if (socket == null) return;
try
{
socket.close();
}
catch (IOException ioe)
{
ioe.printStackTrace();
}
}
private boolean isNextNeighbourChanged(Vector newMembers)
{
Address oldNeighbour = nextNode;
Address newNeighbour = getNextNode(newMembers);
return !(newNeighbour.equals(oldNeighbour));
}
private Address getNextNode(Vector otherNodes)
{
int myIndex = otherNodes.indexOf(thisNode);
return (myIndex == otherNodes.size() - 1)?
(Address) otherNodes.firstElement():
(Address) otherNodes.elementAt(myIndex + 1);
}
}
| |
/*
* Copyright (C) 2012 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.reflect;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.testing.MapTestSuiteBuilder;
import com.google.common.collect.testing.features.CollectionFeature;
import com.google.common.collect.testing.features.CollectionSize;
import com.google.common.collect.testing.features.MapFeature;
import com.google.common.collect.testing.testers.MapPutTester;
import com.google.common.reflect.ImmutableTypeToInstanceMapTest.TestTypeToInstanceMapGenerator;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import java.lang.reflect.Method;
import java.util.Map;
import java.util.Map.Entry;
/**
* Unit test of {@link MutableTypeToInstanceMap}.
*
* @author Ben Yu
*/
public class MutableTypeToInstanceMapTest extends TestCase {
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTestSuite(MutableTypeToInstanceMapTest.class);
// Suppress this one because the tester framework doesn't understand that
// *some* remappings will be allowed and others not.
Method remapTest = null;
try {
remapTest = MapPutTester.class.getMethod(
"testPut_replaceNullValueWithNonNullSupported");
} catch (NoSuchMethodException e) {
throw new AssertionError();
}
suite.addTest(MapTestSuiteBuilder
.using(new TestTypeToInstanceMapGenerator() {
// Other tests will verify what real, warning-free usage looks like
// but here we have to do some serious fudging
@Override
@SuppressWarnings("unchecked")
public Map<TypeToken, Object> create(Object... elements) {
MutableTypeToInstanceMap<Object> map
= new MutableTypeToInstanceMap<Object>();
for (Object object : elements) {
Entry<TypeToken, Object> entry = (Entry<TypeToken, Object>) object;
map.putInstance(entry.getKey(), entry.getValue());
}
return (Map) map;
}
})
.named("MutableTypeToInstanceMap")
.withFeatures(
MapFeature.SUPPORTS_REMOVE,
MapFeature.RESTRICTS_KEYS,
MapFeature.ALLOWS_NULL_VALUES,
CollectionFeature.SUPPORTS_ITERATOR_REMOVE,
CollectionSize.ANY,
MapFeature.ALLOWS_ANY_NULL_QUERIES)
.suppressing(remapTest)
.createTestSuite());
return suite;
}
private TypeToInstanceMap<Object> map;
@Override protected void setUp() throws Exception {
map = new MutableTypeToInstanceMap<Object>();
}
public void testPutThrows() {
try {
map.put(TypeToken.of(Integer.class), new Integer(5));
fail();
} catch (UnsupportedOperationException expected) {}
}
public void testPutAllThrows() {
try {
map.putAll(ImmutableMap.of(TypeToken.of(Integer.class), new Integer(5)));
fail();
} catch (UnsupportedOperationException expected) {}
}
public void testEntrySetMutationThrows() {
map.putInstance(String.class, "test");
assertEquals(TypeToken.of(String.class), map.entrySet().iterator().next().getKey());
assertEquals("test", map.entrySet().iterator().next().getValue());
try {
map.entrySet().iterator().next().setValue(1);
fail();
} catch (UnsupportedOperationException expected) {}
}
public void testEntrySetToArrayMutationThrows() {
map.putInstance(String.class, "test");
@SuppressWarnings("unchecked") // Should get a CCE later if cast is wrong
Entry<Object, Object> entry = (Entry<Object, Object>) map.entrySet().toArray()[0];
assertEquals(TypeToken.of(String.class), entry.getKey());
assertEquals("test", entry.getValue());
try {
entry.setValue(1);
fail();
} catch (UnsupportedOperationException expected) {}
}
public void testEntrySetToTypedArrayMutationThrows() {
map.putInstance(String.class, "test");
@SuppressWarnings("unchecked") // Should get a CCE later if cast is wrong
Entry<Object, Object> entry = map.entrySet().toArray(new Entry[0])[0];
assertEquals(TypeToken.of(String.class), entry.getKey());
assertEquals("test", entry.getValue());
try {
entry.setValue(1);
fail();
} catch (UnsupportedOperationException expected) {}
}
public void testPutAndGetInstance() {
assertNull(map.putInstance(Integer.class, new Integer(5)));
Integer oldValue = map.putInstance(Integer.class, new Integer(7));
assertEquals(5, (int) oldValue);
Integer newValue = map.getInstance(Integer.class);
assertEquals(7, (int) newValue);
assertEquals(7, (int) map.getInstance(TypeToken.of(Integer.class)));
// Won't compile: map.putInstance(Double.class, new Long(42));
}
public void testNull() {
try {
map.putInstance((TypeToken) null, new Integer(1));
fail();
} catch (NullPointerException expected) {
}
map.putInstance(Integer.class, null);
assertNull(map.get(Integer.class));
assertNull(map.getInstance(Integer.class));
map.putInstance(Long.class, null);
assertNull(map.get(Long.class));
assertNull(map.getInstance(Long.class));
}
public void testPrimitiveAndWrapper() {
assertNull(map.getInstance(int.class));
assertNull(map.getInstance(Integer.class));
assertNull(map.putInstance(int.class, 0));
assertNull(map.putInstance(Integer.class, 1));
assertEquals(2, map.size());
assertEquals(0, (int) map.getInstance(int.class));
assertEquals(1, (int) map.getInstance(Integer.class));
assertEquals(0, (int) map.putInstance(int.class, null));
assertEquals(1, (int) map.putInstance(Integer.class, null));
assertNull(map.getInstance(int.class));
assertNull(map.getInstance(Integer.class));
assertEquals(2, map.size());
}
public void testParameterizedType() {
TypeToken<ImmutableList<Integer>> type = new TypeToken<ImmutableList<Integer>>() {};
map.putInstance(type, ImmutableList.of(1));
assertEquals(1, map.size());
assertEquals(ImmutableList.of(1), map.getInstance(type));
}
public void testGenericArrayType() {
@SuppressWarnings("unchecked") // Trying to test generic array
ImmutableList<Integer>[] array = new ImmutableList[] {ImmutableList.of(1)};
TypeToken<ImmutableList<Integer>[]> type = new TypeToken<ImmutableList<Integer>[]>() {};
map.putInstance(type, array);
assertEquals(1, map.size());
assertThat(map.getInstance(type)).asList().containsExactly(array[0]);
}
public void testWildcardType() {
TypeToken<ImmutableList<?>> type = new TypeToken<ImmutableList<?>>() {};
map.putInstance(type, ImmutableList.of(1));
assertEquals(1, map.size());
assertEquals(ImmutableList.of(1), map.getInstance(type));
}
public void testGetInstance_withTypeVariable() {
try {
map.getInstance(this.<Number>anyIterableType());
fail();
} catch (IllegalArgumentException expected) {}
}
public void testPutInstance_withTypeVariable() {
try {
map.putInstance(this.<Integer>anyIterableType(), ImmutableList.of(1));
fail();
} catch (IllegalArgumentException expected) {}
}
private <T> TypeToken<Iterable<T>> anyIterableType() {
return new TypeToken<Iterable<T>>() {};
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.jdbc;
import com.facebook.presto.server.testing.TestingPrestoServer;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import io.airlift.log.Logging;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.sql.Connection;
import java.sql.Date;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.Statement;
import java.sql.Time;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.ArrayList;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.Set;
import static com.facebook.presto.server.testing.TestingPrestoServer.TEST_CATALOG;
import static java.lang.String.format;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
public class TestDriver
{
private static final DateTimeZone ASIA_ORAL_ZONE = DateTimeZone.forID("Asia/Oral");
private static final GregorianCalendar ASIA_ORAL_CALENDAR = new GregorianCalendar(ASIA_ORAL_ZONE.toTimeZone());
private TestingPrestoServer server;
@BeforeClass
public void setup()
throws Exception
{
Logging.initialize();
server = new TestingPrestoServer();
}
@AfterClass
public void teardown()
{
closeQuietly(server);
}
@Test
public void testDriverManager()
throws Exception
{
try (Connection connection = createConnection()) {
try (Statement statement = connection.createStatement()) {
try (ResultSet rs = statement.executeQuery("SELECT 123 x, 'foo' y")) {
ResultSetMetaData metadata = rs.getMetaData();
assertEquals(metadata.getColumnCount(), 2);
assertEquals(metadata.getColumnLabel(1), "x");
assertEquals(metadata.getColumnType(1), Types.BIGINT);
assertEquals(metadata.getColumnLabel(2), "y");
assertEquals(metadata.getColumnType(2), Types.LONGNVARCHAR);
assertTrue(rs.next());
assertEquals(rs.getLong(1), 123);
assertEquals(rs.getLong("x"), 123);
assertEquals(rs.getString(2), "foo");
assertEquals(rs.getString("y"), "foo");
assertFalse(rs.next());
}
}
}
}
@Test
public void testTypes()
throws Exception
{
try (Connection connection = createConnection()) {
try (Statement statement = connection.createStatement()) {
try (ResultSet rs = statement.executeQuery("SELECT " +
" TIME '3:04:05' as a" +
", TIME '6:07:08 +06:17' as b" +
", TIME '9:10:11 Europe/Berlin' as c" +
", TIMESTAMP '2001-02-03 3:04:05' as d" +
", TIMESTAMP '2004-05-06 6:07:08 +06:17' as e" +
", TIMESTAMP '2007-08-09 9:10:11 Europe/Berlin' as f" +
", DATE '2013-03-22' as g" +
", INTERVAL '123-11' YEAR TO MONTH as h" +
", INTERVAL '11 22:33:44.555' DAY TO SECOND as i" +
"")) {
assertTrue(rs.next());
assertEquals(rs.getTime(1), new Time(new DateTime(1970, 1, 1, 3, 4, 5).getMillis()));
assertEquals(rs.getTime(1, ASIA_ORAL_CALENDAR), new Time(new DateTime(1970, 1, 1, 3, 4, 5, ASIA_ORAL_ZONE).getMillis()));
assertEquals(rs.getObject(1), new Time(new DateTime(1970, 1, 1, 3, 4, 5).getMillis()));
assertEquals(rs.getTime("a"), new Time(new DateTime(1970, 1, 1, 3, 4, 5).getMillis()));
assertEquals(rs.getTime("a", ASIA_ORAL_CALENDAR), new Time(new DateTime(1970, 1, 1, 3, 4, 5, ASIA_ORAL_ZONE).getMillis()));
assertEquals(rs.getObject("a"), new Time(new DateTime(1970, 1, 1, 3, 4, 5).getMillis()));
assertEquals(rs.getTime(2), new Time(new DateTime(1970, 1, 1, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getTime(2, ASIA_ORAL_CALENDAR), new Time(new DateTime(1970, 1, 1, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getObject(2), new Time(new DateTime(1970, 1, 1, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getTime("b"), new Time(new DateTime(1970, 1, 1, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getTime("b", ASIA_ORAL_CALENDAR), new Time(new DateTime(1970, 1, 1, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getObject("b"), new Time(new DateTime(1970, 1, 1, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getTime(3), new Time(new DateTime(1970, 1, 1, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getTime(3, ASIA_ORAL_CALENDAR), new Time(new DateTime(1970, 1, 1, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getObject(3), new Time(new DateTime(1970, 1, 1, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getTime("c"), new Time(new DateTime(1970, 1, 1, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getTime("c", ASIA_ORAL_CALENDAR), new Time(new DateTime(1970, 1, 1, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getObject("c"), new Time(new DateTime(1970, 1, 1, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getTimestamp(4), new Timestamp(new DateTime(2001, 2, 3, 3, 4, 5).getMillis()));
assertEquals(rs.getTimestamp(4, ASIA_ORAL_CALENDAR), new Timestamp(new DateTime(2001, 2, 3, 3, 4, 5, ASIA_ORAL_ZONE).getMillis()));
assertEquals(rs.getObject(4), new Timestamp(new DateTime(2001, 2, 3, 3, 4, 5).getMillis()));
assertEquals(rs.getTimestamp("d"), new Timestamp(new DateTime(2001, 2, 3, 3, 4, 5).getMillis()));
assertEquals(rs.getTimestamp("d", ASIA_ORAL_CALENDAR), new Timestamp(new DateTime(2001, 2, 3, 3, 4, 5, ASIA_ORAL_ZONE).getMillis()));
assertEquals(rs.getObject("d"), new Timestamp(new DateTime(2001, 2, 3, 3, 4, 5).getMillis()));
assertEquals(rs.getTimestamp(5), new Timestamp(new DateTime(2004, 5, 6, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getTimestamp(5, ASIA_ORAL_CALENDAR), new Timestamp(new DateTime(2004, 5, 6, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getObject(5), new Timestamp(new DateTime(2004, 5, 6, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getTimestamp("e"), new Timestamp(new DateTime(2004, 5, 6, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getTimestamp("e", ASIA_ORAL_CALENDAR), new Timestamp(new DateTime(2004, 5, 6, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getObject("e"), new Timestamp(new DateTime(2004, 5, 6, 6, 7, 8, DateTimeZone.forOffsetHoursMinutes(6, 17)).getMillis()));
assertEquals(rs.getTimestamp(6), new Timestamp(new DateTime(2007, 8, 9, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getTimestamp(6, ASIA_ORAL_CALENDAR), new Timestamp(new DateTime(2007, 8, 9, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getObject(6), new Timestamp(new DateTime(2007, 8, 9, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getTimestamp("f"), new Timestamp(new DateTime(2007, 8, 9, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getTimestamp("f", ASIA_ORAL_CALENDAR), new Timestamp(new DateTime(2007, 8, 9, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getObject("f"), new Timestamp(new DateTime(2007, 8, 9, 9, 10, 11, DateTimeZone.forID("Europe/Berlin")).getMillis()));
assertEquals(rs.getDate(7), new Date(new DateTime(2013, 3, 22, 0, 0).getMillis()));
assertEquals(rs.getDate(7, ASIA_ORAL_CALENDAR), new Date(new DateTime(2013, 3, 22, 0, 0, ASIA_ORAL_ZONE).getMillis()));
assertEquals(rs.getObject(7), new Date(new DateTime(2013, 3, 22, 0, 0).getMillis()));
assertEquals(rs.getDate("g"), new Date(new DateTime(2013, 3, 22, 0, 0).getMillis()));
assertEquals(rs.getDate("g", ASIA_ORAL_CALENDAR), new Date(new DateTime(2013, 3, 22, 0, 0, ASIA_ORAL_ZONE).getMillis()));
assertEquals(rs.getObject("g"), new Date(new DateTime(2013, 3, 22, 0, 0).getMillis()));
assertEquals(rs.getObject(8), new PrestoIntervalYearMonth(123, 11));
assertEquals(rs.getObject("h"), new PrestoIntervalYearMonth(123, 11));
assertEquals(rs.getObject(9), new PrestoIntervalDayTime(11, 22, 33, 44, 555));
assertEquals(rs.getObject("i"), new PrestoIntervalDayTime(11, 22, 33, 44, 555));
assertFalse(rs.next());
}
}
}
}
@Test
public void testGetCatalogs()
throws Exception
{
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getCatalogs()) {
assertRowCount(rs, 1);
ResultSetMetaData metadata = rs.getMetaData();
assertEquals(metadata.getColumnCount(), 1);
assertEquals(metadata.getColumnLabel(1), "TABLE_CAT");
assertEquals(metadata.getColumnType(1), Types.LONGNVARCHAR);
}
}
}
@Test
public void testGetSchemas()
throws Exception
{
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getSchemas()) {
assertGetSchemasResult(rs, 2);
}
try (ResultSet rs = connection.getMetaData().getSchemas(null, null)) {
assertGetSchemasResult(rs, 2);
}
try (ResultSet rs = connection.getMetaData().getSchemas(TEST_CATALOG, null)) {
assertGetSchemasResult(rs, 2);
}
try (ResultSet rs = connection.getMetaData().getSchemas("", null)) {
// all schemas in presto have a catalog name
assertGetSchemasResult(rs, 0);
}
try (ResultSet rs = connection.getMetaData().getSchemas(TEST_CATALOG, "sys")) {
assertGetSchemasResult(rs, 1);
}
try (ResultSet rs = connection.getMetaData().getSchemas(null, "sys")) {
assertGetSchemasResult(rs, 1);
}
try (ResultSet rs = connection.getMetaData().getSchemas(null, "s_s")) {
assertGetSchemasResult(rs, 1);
}
try (ResultSet rs = connection.getMetaData().getSchemas(null, "%s%")) {
assertGetSchemasResult(rs, 2);
}
try (ResultSet rs = connection.getMetaData().getSchemas("unknown", null)) {
assertGetSchemasResult(rs, 0);
}
try (ResultSet rs = connection.getMetaData().getSchemas("unknown", "sys")) {
assertGetSchemasResult(rs, 0);
}
try (ResultSet rs = connection.getMetaData().getSchemas(null, "unknown")) {
assertGetSchemasResult(rs, 0);
}
try (ResultSet rs = connection.getMetaData().getSchemas(TEST_CATALOG, "unknown")) {
assertGetSchemasResult(rs, 0);
}
try (ResultSet rs = connection.getMetaData().getSchemas("unknown", "unknown")) {
assertGetSchemasResult(rs, 0);
}
}
}
private static void assertGetSchemasResult(ResultSet rs, int expectedRows)
throws SQLException
{
assertRowCount(rs, expectedRows);
ResultSetMetaData metadata = rs.getMetaData();
assertEquals(metadata.getColumnCount(), 2);
assertEquals(metadata.getColumnLabel(1), "TABLE_SCHEM");
assertEquals(metadata.getColumnType(1), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(2), "TABLE_CATALOG");
assertEquals(metadata.getColumnType(2), Types.LONGNVARCHAR);
}
@Test
public void testGetTables()
throws Exception
{
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(null, null, null, null)) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertTrue(rows.contains(getTablesRow("information_schema", "schemata")));
assertTrue(rows.contains(getTablesRow("sys", "node")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, null, null, null)) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertTrue(rows.contains(getTablesRow("information_schema", "schemata")));
assertTrue(rows.contains(getTablesRow("sys", "node")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables("", null, null, null)) {
assertTableMetadata(rs);
// all tables in presto have a catalog name
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertEquals(rows.size(), 0);
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", null, null)) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertTrue(rows.contains(getTablesRow("information_schema", "schemata")));
assertFalse(rows.contains(getTablesRow("sys", "node")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "", null, null)) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertEquals(rows.size(), 0);
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tables", null)) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertFalse(rows.contains(getTablesRow("information_schema", "schemata")));
assertFalse(rows.contains(getTablesRow("sys", "node")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tables", new String[] {"BASE TABLE"})) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertFalse(rows.contains(getTablesRow("information_schema", "schemata")));
assertFalse(rows.contains(getTablesRow("sys", "node")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(null, "information_schema", null, null)) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertTrue(rows.contains(getTablesRow("information_schema", "schemata")));
assertFalse(rows.contains(getTablesRow("sys", "node")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(null, null, "tables", null)) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertFalse(rows.contains(getTablesRow("information_schema", "schemata")));
assertFalse(rows.contains(getTablesRow("sys", "node")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(null, null, null, new String[] {"BASE TABLE"})) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertTrue(rows.contains(getTablesRow("information_schema", "schemata")));
assertTrue(rows.contains(getTablesRow("sys", "node")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "inf%", "tables", null)) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertFalse(rows.contains(getTablesRow("information_schema", "schemata")));
assertFalse(rows.contains(getTablesRow("sys", "node")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tab%", null)) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertFalse(rows.contains(getTablesRow("information_schema", "schemata")));
assertFalse(rows.contains(getTablesRow("sys", "node")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables("unknown", "information_schema", "tables", new String[] {"BASE TABLE"})) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertFalse(rows.contains(getTablesRow("information_schema", "tables")));
assertFalse(rows.contains(getTablesRow("information_schema", "schemata")));
assertFalse(rows.contains(getTablesRow("sys", "node")));
}
}
// todo why does Presto require that the schema name be lower case
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "unknown", "tables", new String[] {"BASE TABLE"})) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertFalse(rows.contains(getTablesRow("information_schema", "tables")));
assertFalse(rows.contains(getTablesRow("information_schema", "schemata")));
assertFalse(rows.contains(getTablesRow("sys", "node")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "unknown", new String[] {"BASE TABLE"})) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertFalse(rows.contains(getTablesRow("information_schema", "tables")));
assertFalse(rows.contains(getTablesRow("information_schema", "schemata")));
assertFalse(rows.contains(getTablesRow("sys", "node")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tables", new String[] {"unknown"})) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertFalse(rows.contains(getTablesRow("information_schema", "tables")));
assertFalse(rows.contains(getTablesRow("information_schema", "schemata")));
assertFalse(rows.contains(getTablesRow("sys", "node")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tables", new String[] {"unknown", "BASE TABLE"})) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertFalse(rows.contains(getTablesRow("information_schema", "schemata")));
assertFalse(rows.contains(getTablesRow("sys", "node")));
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getTables(TEST_CATALOG, "information_schema", "tables", new String[] {})) {
assertTableMetadata(rs);
Set<List<Object>> rows = ImmutableSet.copyOf(readRows(rs));
assertTrue(rows.contains(getTablesRow("information_schema", "tables")));
assertFalse(rows.contains(getTablesRow("information_schema", "schemata")));
assertFalse(rows.contains(getTablesRow("sys", "node")));
}
}
}
private static List<Object> getTablesRow(String schema, String table)
{
return ImmutableList.<Object>of(TEST_CATALOG, schema, table, "BASE TABLE", "", "", "", "", "", "");
}
private static void assertTableMetadata(ResultSet rs)
throws SQLException
{
ResultSetMetaData metadata = rs.getMetaData();
assertEquals(metadata.getColumnCount(), 10);
assertEquals(metadata.getColumnLabel(1), "TABLE_CAT");
assertEquals(metadata.getColumnType(1), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(2), "TABLE_SCHEM");
assertEquals(metadata.getColumnType(2), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(3), "TABLE_NAME");
assertEquals(metadata.getColumnType(3), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(4), "TABLE_TYPE");
assertEquals(metadata.getColumnType(4), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(5), "REMARKS");
assertEquals(metadata.getColumnType(5), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(6), "TYPE_CAT");
assertEquals(metadata.getColumnType(6), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(7), "TYPE_SCHEM");
assertEquals(metadata.getColumnType(7), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(8), "TYPE_NAME");
assertEquals(metadata.getColumnType(8), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(9), "SELF_REFERENCING_COL_NAME");
assertEquals(metadata.getColumnType(9), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(10), "REF_GENERATION");
assertEquals(metadata.getColumnType(10), Types.LONGNVARCHAR);
}
@Test
public void testGetTableTypes()
throws Exception
{
try (Connection connection = createConnection()) {
try (ResultSet tableTypes = connection.getMetaData().getTableTypes()) {
List<List<Object>> data = readRows(tableTypes);
assertEquals(data.size(), 1);
assertEquals(data.get(0).get(0), "BASE TABLE");
ResultSetMetaData metadata = tableTypes.getMetaData();
assertEquals(metadata.getColumnCount(), 1);
assertEquals(metadata.getColumnLabel(1), "TABLE_TYPE");
assertEquals(metadata.getColumnType(1), Types.LONGNVARCHAR);
}
}
}
@Test
public void testGetColumns()
throws Exception
{
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getColumns(null, null, "tables", "column_name")) {
assertColumnMetadata(rs);
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getColumns(TEST_CATALOG, null, "tables", "column_name")) {
assertColumnMetadata(rs);
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getColumns(null, "information_schema", "tables", "column_name")) {
assertColumnMetadata(rs);
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getColumns(TEST_CATALOG, "information_schema", "tables", "column_name")) {
assertColumnMetadata(rs);
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getColumns(TEST_CATALOG, "inf%", "tables", "column_name")) {
assertColumnMetadata(rs);
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getColumns(TEST_CATALOG, "information_schema", "tab%", "column_name")) {
assertColumnMetadata(rs);
}
}
try (Connection connection = createConnection()) {
try (ResultSet rs = connection.getMetaData().getColumns(TEST_CATALOG, "information_schema", "tables", "col%")) {
assertColumnMetadata(rs);
}
}
}
private static void assertColumnMetadata(ResultSet rs)
throws SQLException
{
ResultSetMetaData metadata = rs.getMetaData();
assertEquals(metadata.getColumnCount(), 24);
assertEquals(metadata.getColumnLabel(1), "TABLE_CAT");
assertEquals(metadata.getColumnType(1), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(2), "TABLE_SCHEM");
assertEquals(metadata.getColumnType(2), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(3), "TABLE_NAME");
assertEquals(metadata.getColumnType(3), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(4), "COLUMN_NAME");
assertEquals(metadata.getColumnType(4), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(5), "DATA_TYPE");
assertEquals(metadata.getColumnType(5), Types.BIGINT);
assertEquals(metadata.getColumnLabel(6), "TYPE_NAME");
assertEquals(metadata.getColumnType(6), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(7), "COLUMN_SIZE");
assertEquals(metadata.getColumnType(7), Types.BIGINT);
assertEquals(metadata.getColumnLabel(8), "BUFFER_LENGTH");
assertEquals(metadata.getColumnType(8), Types.BIGINT);
assertEquals(metadata.getColumnLabel(9), "DECIMAL_DIGITS");
assertEquals(metadata.getColumnType(9), Types.BIGINT);
assertEquals(metadata.getColumnLabel(10), "NUM_PREC_RADIX");
assertEquals(metadata.getColumnType(10), Types.BIGINT);
assertEquals(metadata.getColumnLabel(11), "NULLABLE");
assertEquals(metadata.getColumnType(11), Types.BIGINT);
assertEquals(metadata.getColumnLabel(12), "REMARKS");
assertEquals(metadata.getColumnType(12), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(13), "COLUMN_DEF");
assertEquals(metadata.getColumnType(13), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(14), "SQL_DATA_TYPE");
assertEquals(metadata.getColumnType(14), Types.BIGINT);
assertEquals(metadata.getColumnLabel(15), "SQL_DATETIME_SUB");
assertEquals(metadata.getColumnType(15), Types.BIGINT);
assertEquals(metadata.getColumnLabel(16), "CHAR_OCTET_LENGTH");
assertEquals(metadata.getColumnType(16), Types.BIGINT);
assertEquals(metadata.getColumnLabel(17), "ORDINAL_POSITION");
assertEquals(metadata.getColumnType(17), Types.BIGINT);
assertEquals(metadata.getColumnLabel(18), "IS_NULLABLE");
assertEquals(metadata.getColumnType(18), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(19), "SCOPE_CATALOG");
assertEquals(metadata.getColumnType(19), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(20), "SCOPE_SCHEMA");
assertEquals(metadata.getColumnType(20), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(21), "SCOPE_TABLE");
assertEquals(metadata.getColumnType(21), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(22), "SOURCE_DATA_TYPE");
assertEquals(metadata.getColumnType(22), Types.BIGINT);
assertEquals(metadata.getColumnLabel(23), "IS_AUTOINCREMENT");
assertEquals(metadata.getColumnType(23), Types.LONGNVARCHAR);
assertEquals(metadata.getColumnLabel(24), "IS_GENERATEDCOLUMN");
assertEquals(metadata.getColumnType(24), Types.LONGNVARCHAR);
}
@Test
public void testExecute()
throws Exception
{
try (Connection connection = createConnection()) {
try (Statement statement = connection.createStatement()) {
assertTrue(statement.execute("SELECT 123 x, 'foo' y"));
ResultSet rs = statement.getResultSet();
assertTrue(rs.next());
assertEquals(rs.getLong(1), 123);
assertEquals(rs.getLong("x"), 123);
assertEquals(rs.getString(2), "foo");
assertEquals(rs.getString("y"), "foo");
assertFalse(rs.next());
}
}
}
@Test
public void testGetUpdateCount()
throws Exception
{
try (Connection connection = createConnection()) {
try (Statement statement = connection.createStatement()) {
assertTrue(statement.execute("SELECT 123 x, 'foo' y"));
assertEquals(statement.getUpdateCount(), -1);
}
}
}
@Test
public void testResultSetClose()
throws Exception
{
try (Connection connection = createConnection()) {
try (Statement statement = connection.createStatement()) {
assertTrue(statement.execute("SELECT 123 x, 'foo' y"));
ResultSet result = statement.getResultSet();
assertFalse(result.isClosed());
result.close();
assertTrue(result.isClosed());
}
}
}
@Test
public void testGetResultSet()
throws Exception
{
try (Connection connection = createConnection()) {
try (Statement statement = connection.createStatement()) {
assertTrue(statement.execute("SELECT 123 x, 'foo' y"));
ResultSet result = statement.getResultSet();
assertNotNull(result);
assertFalse(result.isClosed());
statement.getMoreResults();
assertTrue(result.isClosed());
assertTrue(statement.execute("SELECT 123 x, 'foo' y"));
result = statement.getResultSet();
assertNotNull(result);
assertFalse(result.isClosed());
assertTrue(statement.execute("SELECT 123 x, 'foo' y"));
assertFalse(statement.getMoreResults(Statement.CLOSE_CURRENT_RESULT));
}
}
}
@Test(expectedExceptions = SQLFeatureNotSupportedException.class, expectedExceptionsMessageRegExp = "Multiple open results not supported")
public void testGetMoreResultsException()
throws Exception
{
try (Connection connection = createConnection()) {
try (Statement statement = connection.createStatement()) {
assertTrue(statement.execute("SELECT 123 x, 'foo' y"));
statement.getMoreResults(Statement.KEEP_CURRENT_RESULT);
}
}
}
@Test
public void testConnectionStringWithCatalogAndSchema()
throws Exception
{
String prefix = format("jdbc:presto://%s", server.getAddress());
Connection connection;
connection = DriverManager.getConnection(prefix + "/a/b/", "test", null);
assertEquals(connection.getCatalog(), "a");
assertEquals(connection.getSchema(), "b");
connection = DriverManager.getConnection(prefix + "/a/b", "test", null);
assertEquals(connection.getCatalog(), "a");
assertEquals(connection.getSchema(), "b");
connection = DriverManager.getConnection(prefix + "/a/", "test", null);
assertEquals(connection.getCatalog(), "a");
assertEquals(connection.getSchema(), TEST_CATALOG);
connection = DriverManager.getConnection(prefix + "/a", "test", null);
assertEquals(connection.getCatalog(), "a");
assertEquals(connection.getSchema(), TEST_CATALOG);
connection = DriverManager.getConnection(prefix + "/", "test", null);
assertEquals(connection.getCatalog(), TEST_CATALOG);
assertEquals(connection.getSchema(), TEST_CATALOG);
connection = DriverManager.getConnection(prefix, "test", null);
assertEquals(connection.getCatalog(), TEST_CATALOG);
assertEquals(connection.getSchema(), TEST_CATALOG);
}
@Test
public void testConnectionWithCatalogAndSchema()
throws Exception
{
try (Connection connection = createConnection(TEST_CATALOG, "information_schema")) {
try (Statement statement = connection.createStatement()) {
try (ResultSet rs = statement.executeQuery("" +
"SELECT table_catalog, table_schema " +
"FROM tables " +
"WHERE table_schema = 'sys' AND table_name = 'node'")) {
ResultSetMetaData metadata = rs.getMetaData();
assertEquals(metadata.getColumnCount(), 2);
assertEquals(metadata.getColumnLabel(1), "table_catalog");
assertEquals(metadata.getColumnLabel(2), "table_schema");
assertTrue(rs.next());
assertEquals(rs.getString("table_catalog"), TEST_CATALOG);
}
}
}
}
@Test
public void testConnectionWithCatalog()
throws Exception
{
try (Connection connection = createConnection(TEST_CATALOG)) {
try (Statement statement = connection.createStatement()) {
try (ResultSet rs = statement.executeQuery("" +
"SELECT table_catalog, table_schema " +
"FROM information_schema.tables " +
"WHERE table_schema = 'sys' AND table_name = 'node'")) {
ResultSetMetaData metadata = rs.getMetaData();
assertEquals(metadata.getColumnCount(), 2);
assertEquals(metadata.getColumnLabel(1), "table_catalog");
assertEquals(metadata.getColumnLabel(2), "table_schema");
assertTrue(rs.next());
assertEquals(rs.getString("table_catalog"), TEST_CATALOG);
}
}
}
}
@Test
public void testConnectionResourceHandling()
throws Exception
{
List<Connection> connections = new ArrayList<>();
for (int i = 0; i < 100; i++) {
Connection connection = createConnection();
connections.add(connection);
try (Statement statement = connection.createStatement();
ResultSet rs = statement.executeQuery("SELECT 123")) {
assertTrue(rs.next());
}
}
for (Connection connection : connections) {
connection.close();
}
}
@Test(expectedExceptions = SQLException.class, expectedExceptionsMessageRegExp = ".* does not exist")
public void testBadQuery()
throws Exception
{
try (Connection connection = createConnection()) {
try (Statement statement = connection.createStatement()) {
try (ResultSet ignored = statement.executeQuery("SELECT * FROM bad_table")) {
fail("expected exception");
}
}
}
}
@Test(expectedExceptions = SQLException.class, expectedExceptionsMessageRegExp = "Username property \\(user\\) must be set")
public void testUserIsRequired()
throws Exception
{
try (Connection ignored = DriverManager.getConnection("jdbc:presto://test.invalid/")) {
fail("expected exception");
}
}
@Test(expectedExceptions = SQLException.class, expectedExceptionsMessageRegExp = "Invalid path segments in URL: .*")
public void testBadUrlExtraPathSegments()
throws Exception
{
String url = format("jdbc:presto://%s/hive/default/bad_string", server.getAddress());
try (Connection ignored = DriverManager.getConnection(url, "test", null)) {
fail("expected exception");
}
}
@Test(expectedExceptions = SQLException.class, expectedExceptionsMessageRegExp = "Catalog name is empty: .*")
public void testBadUrlMissingCatalog()
throws Exception
{
String url = format("jdbc:presto://%s//default", server.getAddress());
try (Connection ignored = DriverManager.getConnection(url, "test", null)) {
fail("expected exception");
}
}
@Test(expectedExceptions = SQLException.class, expectedExceptionsMessageRegExp = "Catalog name is empty: .*")
public void testBadUrlEndsInSlashes()
throws Exception
{
String url = format("jdbc:presto://%s//", server.getAddress());
try (Connection ignored = DriverManager.getConnection(url, "test", null)) {
fail("expected exception");
}
}
@Test(expectedExceptions = SQLException.class, expectedExceptionsMessageRegExp = "Schema name is empty: .*")
public void testBadUrlMissingSchema()
throws Exception
{
String url = format("jdbc:presto://%s/a//", server.getAddress());
try (Connection ignored = DriverManager.getConnection(url, "test", null)) {
fail("expected exception");
}
}
private Connection createConnection()
throws SQLException
{
String url = format("jdbc:presto://%s", server.getAddress());
return DriverManager.getConnection(url, "test", null);
}
private Connection createConnection(String catalog)
throws SQLException
{
String url = format("jdbc:presto://%s/%s", server.getAddress(), catalog);
return DriverManager.getConnection(url, "test", null);
}
private Connection createConnection(String catalog, String schema)
throws SQLException
{
String url = format("jdbc:presto://%s/%s/%s", server.getAddress(), catalog, schema);
return DriverManager.getConnection(url, "test", null);
}
private static void assertRowCount(ResultSet rs, int expected)
throws SQLException
{
List<List<Object>> data = readRows(rs);
assertEquals(data.size(), expected);
}
private static List<List<Object>> readRows(ResultSet rs)
throws SQLException
{
ImmutableList.Builder<List<Object>> rows = ImmutableList.builder();
int columnCount = rs.getMetaData().getColumnCount();
while (rs.next()) {
ImmutableList.Builder<Object> row = ImmutableList.builder();
for (int i = 0; i < columnCount; i++) {
row.add(rs.getObject(i + 1));
}
rows.add(row.build());
}
return rows.build();
}
static void closeQuietly(AutoCloseable closeable)
{
try {
closeable.close();
}
catch (Exception ignored) {
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices.cluster;
import com.carrotsearch.hppc.IntHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.google.common.base.Predicate;
import com.google.common.collect.Lists;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction;
import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.*;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexShardAlreadyExistsException;
import org.elasticsearch.index.IndexShardMissingException;
import org.elasticsearch.index.aliases.IndexAliasesService;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.settings.IndexSettingsService;
import org.elasticsearch.index.shard.*;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.recovery.RecoveryFailedException;
import org.elasticsearch.indices.recovery.RecoveryState;
import org.elasticsearch.indices.recovery.RecoveryStatus;
import org.elasticsearch.indices.recovery.RecoveryTarget;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import static org.elasticsearch.ExceptionsHelper.detailedMessage;
/**
*
*/
public class IndicesClusterStateService extends AbstractLifecycleComponent<IndicesClusterStateService> implements ClusterStateListener {
private final IndicesService indicesService;
private final ClusterService clusterService;
private final ThreadPool threadPool;
private final RecoveryTarget recoveryTarget;
private final ShardStateAction shardStateAction;
private final NodeIndexDeletedAction nodeIndexDeletedAction;
private final NodeMappingRefreshAction nodeMappingRefreshAction;
// a map of mappings type we have seen per index due to cluster state
// we need this so we won't remove types automatically created as part of the indexing process
private final ConcurrentMap<Tuple<String, String>, Boolean> seenMappings = ConcurrentCollections.newConcurrentMap();
// a list of shards that failed during recovery
// we keep track of these shards in order to prevent repeated recovery of these shards on each cluster state update
private final ConcurrentMap<ShardId, FailedShard> failedShards = ConcurrentCollections.newConcurrentMap();
static class FailedShard {
public final long version;
public final long timestamp;
FailedShard(long version) {
this.version = version;
this.timestamp = System.currentTimeMillis();
}
}
private final Object mutex = new Object();
private final FailedEngineHandler failedEngineHandler = new FailedEngineHandler();
private final boolean sendRefreshMapping;
@Inject
public IndicesClusterStateService(Settings settings, IndicesService indicesService, ClusterService clusterService,
ThreadPool threadPool, RecoveryTarget recoveryTarget,
ShardStateAction shardStateAction,
NodeIndexDeletedAction nodeIndexDeletedAction,
NodeMappingRefreshAction nodeMappingRefreshAction) {
super(settings);
this.indicesService = indicesService;
this.clusterService = clusterService;
this.threadPool = threadPool;
this.recoveryTarget = recoveryTarget;
this.shardStateAction = shardStateAction;
this.nodeIndexDeletedAction = nodeIndexDeletedAction;
this.nodeMappingRefreshAction = nodeMappingRefreshAction;
this.sendRefreshMapping = this.settings.getAsBoolean("indices.cluster.send_refresh_mapping", true);
}
@Override
protected void doStart() {
clusterService.addFirst(this);
}
@Override
protected void doStop() {
clusterService.remove(this);
}
@Override
protected void doClose() {
}
@Override
public void clusterChanged(final ClusterChangedEvent event) {
if (!indicesService.changesAllowed()) {
return;
}
if (!lifecycle.started()) {
return;
}
synchronized (mutex) {
// we need to clean the shards and indices we have on this node, since we
// are going to recover them again once state persistence is disabled (no master / not recovered)
// TODO: this feels a bit hacky here, a block disables state persistence, and then we clean the allocated shards, maybe another flag in blocks?
if (event.state().blocks().disableStatePersistence()) {
for (IndexService indexService : indicesService) {
String index = indexService.index().getName();
for (Integer shardId : indexService.shardIds()) {
logger.debug("[{}][{}] removing shard (disabled block persistence)", index, shardId);
try {
indexService.removeShard(shardId, "removing shard (disabled block persistence)");
} catch (Throwable e) {
logger.warn("[{}] failed to remove shard (disabled block persistence)", e, index);
}
}
removeIndex(index, "cleaning index (disabled block persistence)");
}
return;
}
cleanFailedShards(event);
applyDeletedIndices(event);
applyNewIndices(event);
applyMappings(event);
applyAliases(event);
applyNewOrUpdatedShards(event);
applyDeletedShards(event);
applyCleanedIndices(event);
applySettings(event);
}
}
private void applyCleanedIndices(final ClusterChangedEvent event) {
// handle closed indices, since they are not allocated on a node once they are closed
// so applyDeletedIndices might not take them into account
for (IndexService indexService : indicesService) {
String index = indexService.index().getName();
IndexMetaData indexMetaData = event.state().metaData().index(index);
if (indexMetaData != null && indexMetaData.state() == IndexMetaData.State.CLOSE) {
for (Integer shardId : indexService.shardIds()) {
logger.debug("[{}][{}] removing shard (index is closed)", index, shardId);
try {
indexService.removeShard(shardId, "removing shard (index is closed)");
} catch (Throwable e) {
logger.warn("[{}] failed to remove shard (index is closed)", e, index);
}
}
}
}
for (IndexService indexService : indicesService) {
String index = indexService.index().getName();
if (indexService.shardIds().isEmpty()) {
if (logger.isDebugEnabled()) {
logger.debug("[{}] cleaning index (no shards allocated)", index);
}
// clean the index
removeIndex(index, "removing index (no shards allocated)");
}
}
}
private void applyDeletedIndices(final ClusterChangedEvent event) {
final ClusterState previousState = event.previousState();
final String localNodeId = event.state().nodes().localNodeId();
assert localNodeId != null;
for (IndexService indexService : indicesService) {
IndexMetaData indexMetaData = event.state().metaData().index(indexService.index().name());
if (indexMetaData != null) {
if (!indexMetaData.isSameUUID(indexService.indexUUID())) {
logger.debug("[{}] mismatch on index UUIDs between cluster state and local state, cleaning the index so it will be recreated", indexMetaData.index());
deleteIndex(indexMetaData.index(), "mismatch on index UUIDs between cluster state and local state, cleaning the index so it will be recreated");
}
}
}
for (String index : event.indicesDeleted()) {
if (logger.isDebugEnabled()) {
logger.debug("[{}] cleaning index, no longer part of the metadata", index);
}
final Settings indexSettings;
final IndexService idxService = indicesService.indexService(index);
if (idxService != null) {
indexSettings = idxService.getIndexSettings();
deleteIndex(index, "index no longer part of the metadata");
} else {
final IndexMetaData metaData = previousState.metaData().index(index);
assert metaData != null;
indexSettings = metaData.settings();
indicesService.deleteClosedIndex("closed index no longer part of the metadata", metaData, event.state());
}
try {
nodeIndexDeletedAction.nodeIndexDeleted(event.state(), index, indexSettings, localNodeId);
} catch (Throwable e) {
logger.debug("failed to send to master index {} deleted event", e, index);
}
}
}
private void applyDeletedShards(final ClusterChangedEvent event) {
RoutingNodes.RoutingNodeIterator routingNode = event.state().readOnlyRoutingNodes().routingNodeIter(event.state().nodes().localNodeId());
if (routingNode == null) {
return;
}
IntHashSet newShardIds = new IntHashSet();
for (IndexService indexService : indicesService) {
String index = indexService.index().name();
IndexMetaData indexMetaData = event.state().metaData().index(index);
if (indexMetaData == null) {
continue;
}
// now, go over and delete shards that needs to get deleted
newShardIds.clear();
for (ShardRouting shard : routingNode) {
if (shard.index().equals(index)) {
newShardIds.add(shard.id());
}
}
for (Integer existingShardId : indexService.shardIds()) {
if (!newShardIds.contains(existingShardId)) {
if (indexMetaData.state() == IndexMetaData.State.CLOSE) {
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}] removing shard (index is closed)", index, existingShardId);
}
indexService.removeShard(existingShardId, "removing shard (index is closed)");
} else {
// we can just remove the shard, without cleaning it locally, since we will clean it
// when all shards are allocated in the IndicesStore
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}] removing shard (not allocated)", index, existingShardId);
}
indexService.removeShard(existingShardId, "removing shard (not allocated)");
}
}
}
}
}
private void applyNewIndices(final ClusterChangedEvent event) {
// we only create indices for shards that are allocated
RoutingNodes.RoutingNodeIterator routingNode = event.state().readOnlyRoutingNodes().routingNodeIter(event.state().nodes().localNodeId());
if (routingNode == null) {
return;
}
for (ShardRouting shard : routingNode) {
if (!indicesService.hasIndex(shard.index())) {
final IndexMetaData indexMetaData = event.state().metaData().index(shard.index());
if (logger.isDebugEnabled()) {
logger.debug("[{}] creating index", indexMetaData.index());
}
try {
indicesService.createIndex(indexMetaData.index(), indexMetaData.settings(), event.state().nodes().localNode().id());
} catch (Throwable e) {
sendFailShard(shard, indexMetaData.getUUID(), "failed to create index", e);
}
}
}
}
private void applySettings(ClusterChangedEvent event) {
if (!event.metaDataChanged()) {
return;
}
for (IndexMetaData indexMetaData : event.state().metaData()) {
if (!indicesService.hasIndex(indexMetaData.index())) {
// we only create / update here
continue;
}
// if the index meta data didn't change, no need check for refreshed settings
if (!event.indexMetaDataChanged(indexMetaData)) {
continue;
}
String index = indexMetaData.index();
IndexService indexService = indicesService.indexService(index);
if (indexService == null) {
// already deleted on us, ignore it
continue;
}
IndexSettingsService indexSettingsService = indexService.injector().getInstance(IndexSettingsService.class);
indexSettingsService.refreshSettings(indexMetaData.settings());
}
}
private void applyMappings(ClusterChangedEvent event) {
// go over and update mappings
for (IndexMetaData indexMetaData : event.state().metaData()) {
if (!indicesService.hasIndex(indexMetaData.index())) {
// we only create / update here
continue;
}
List<String> typesToRefresh = Lists.newArrayList();
String index = indexMetaData.index();
IndexService indexService = indicesService.indexService(index);
if (indexService == null) {
// got deleted on us, ignore (closing the node)
return;
}
try {
MapperService mapperService = indexService.mapperService();
// first, go over and update the _default_ mapping (if exists)
if (indexMetaData.mappings().containsKey(MapperService.DEFAULT_MAPPING)) {
boolean requireRefresh = processMapping(index, mapperService, MapperService.DEFAULT_MAPPING, indexMetaData.mapping(MapperService.DEFAULT_MAPPING).source());
if (requireRefresh) {
typesToRefresh.add(MapperService.DEFAULT_MAPPING);
}
}
// go over and add the relevant mappings (or update them)
for (ObjectCursor<MappingMetaData> cursor : indexMetaData.mappings().values()) {
MappingMetaData mappingMd = cursor.value;
String mappingType = mappingMd.type();
CompressedXContent mappingSource = mappingMd.source();
if (mappingType.equals(MapperService.DEFAULT_MAPPING)) { // we processed _default_ first
continue;
}
boolean requireRefresh = processMapping(index, mapperService, mappingType, mappingSource);
if (requireRefresh) {
typesToRefresh.add(mappingType);
}
}
if (!typesToRefresh.isEmpty() && sendRefreshMapping) {
nodeMappingRefreshAction.nodeMappingRefresh(event.state(),
new NodeMappingRefreshAction.NodeMappingRefreshRequest(index, indexMetaData.uuid(),
typesToRefresh.toArray(new String[typesToRefresh.size()]), event.state().nodes().localNodeId())
);
}
} catch (Throwable t) {
// if we failed the mappings anywhere, we need to fail the shards for this index, note, we safeguard
// by creating the processing the mappings on the master, or on the node the mapping was introduced on,
// so this failure typically means wrong node level configuration or something similar
for (IndexShard indexShard : indexService) {
ShardRouting shardRouting = indexShard.routingEntry();
failAndRemoveShard(shardRouting, indexService, true, "failed to update mappings", t);
}
}
}
}
private boolean processMapping(String index, MapperService mapperService, String mappingType, CompressedXContent mappingSource) throws Throwable {
if (!seenMappings.containsKey(new Tuple<>(index, mappingType))) {
seenMappings.put(new Tuple<>(index, mappingType), true);
}
// refresh mapping can happen for 2 reasons. The first is less urgent, and happens when the mapping on this
// node is ahead of what there is in the cluster state (yet an update-mapping has been sent to it already,
// it just hasn't been processed yet and published). Eventually, the mappings will converge, and the refresh
// mapping sent is more of a safe keeping (assuming the update mapping failed to reach the master, ...)
// the second case is where the parsing/merging of the mapping from the metadata doesn't result in the same
// mapping, in this case, we send to the master to refresh its own version of the mappings (to conform with the
// merge version of it, which it does when refreshing the mappings), and warn log it.
boolean requiresRefresh = false;
try {
if (!mapperService.hasMapping(mappingType)) {
if (logger.isDebugEnabled() && mappingSource.compressed().length < 512) {
logger.debug("[{}] adding mapping [{}], source [{}]", index, mappingType, mappingSource.string());
} else if (logger.isTraceEnabled()) {
logger.trace("[{}] adding mapping [{}], source [{}]", index, mappingType, mappingSource.string());
} else {
logger.debug("[{}] adding mapping [{}] (source suppressed due to length, use TRACE level if needed)", index, mappingType);
}
// we don't apply default, since it has been applied when the mappings were parsed initially
mapperService.merge(mappingType, mappingSource, false, true);
if (!mapperService.documentMapper(mappingType).mappingSource().equals(mappingSource)) {
logger.debug("[{}] parsed mapping [{}], and got different sources\noriginal:\n{}\nparsed:\n{}", index, mappingType, mappingSource, mapperService.documentMapper(mappingType).mappingSource());
requiresRefresh = true;
}
} else {
DocumentMapper existingMapper = mapperService.documentMapper(mappingType);
if (!mappingSource.equals(existingMapper.mappingSource())) {
// mapping changed, update it
if (logger.isDebugEnabled() && mappingSource.compressed().length < 512) {
logger.debug("[{}] updating mapping [{}], source [{}]", index, mappingType, mappingSource.string());
} else if (logger.isTraceEnabled()) {
logger.trace("[{}] updating mapping [{}], source [{}]", index, mappingType, mappingSource.string());
} else {
logger.debug("[{}] updating mapping [{}] (source suppressed due to length, use TRACE level if needed)", index, mappingType);
}
// we don't apply default, since it has been applied when the mappings were parsed initially
mapperService.merge(mappingType, mappingSource, false, true);
if (!mapperService.documentMapper(mappingType).mappingSource().equals(mappingSource)) {
requiresRefresh = true;
logger.debug("[{}] parsed mapping [{}], and got different sources\noriginal:\n{}\nparsed:\n{}", index, mappingType, mappingSource, mapperService.documentMapper(mappingType).mappingSource());
}
}
}
} catch (Throwable e) {
logger.warn("[{}] failed to add mapping [{}], source [{}]", e, index, mappingType, mappingSource);
throw e;
}
return requiresRefresh;
}
private boolean aliasesChanged(ClusterChangedEvent event) {
return !event.state().metaData().aliases().equals(event.previousState().metaData().aliases()) ||
!event.state().routingTable().equals(event.previousState().routingTable());
}
private void applyAliases(ClusterChangedEvent event) {
// check if aliases changed
if (aliasesChanged(event)) {
// go over and update aliases
for (IndexMetaData indexMetaData : event.state().metaData()) {
String index = indexMetaData.index();
IndexService indexService = indicesService.indexService(index);
if (indexService == null) {
// we only create / update here
continue;
}
IndexAliasesService indexAliasesService = indexService.aliasesService();
indexAliasesService.setAliases(indexMetaData.getAliases());
}
}
}
private void applyNewOrUpdatedShards(final ClusterChangedEvent event) {
if (!indicesService.changesAllowed()) {
return;
}
RoutingTable routingTable = event.state().routingTable();
RoutingNodes.RoutingNodeIterator routingNode = event.state().readOnlyRoutingNodes().routingNodeIter(event.state().nodes().localNodeId());
if (routingNode == null) {
failedShards.clear();
return;
}
DiscoveryNodes nodes = event.state().nodes();
for (final ShardRouting shardRouting : routingNode) {
final IndexService indexService = indicesService.indexService(shardRouting.index());
if (indexService == null) {
// got deleted on us, ignore
continue;
}
final IndexMetaData indexMetaData = event.state().metaData().index(shardRouting.index());
if (indexMetaData == null) {
// the index got deleted on the metadata, we will clean it later in the apply deleted method call
continue;
}
final int shardId = shardRouting.id();
if (!indexService.hasShard(shardId) && shardRouting.started()) {
if (failedShards.containsKey(shardRouting.shardId())) {
if (nodes.masterNode() != null) {
shardStateAction.resendShardFailed(shardRouting, indexMetaData.getUUID(),
"master " + nodes.masterNode() + " marked shard as started, but shard has previous failed. resending shard failure.",
nodes.masterNode()
);
}
} else {
// the master thinks we are started, but we don't have this shard at all, mark it as failed
sendFailShard(shardRouting, indexMetaData.getUUID(), "master [" + nodes.masterNode() + "] marked shard as started, but shard has not been created, mark shard as failed", null);
}
continue;
}
IndexShard indexShard = indexService.shard(shardId);
if (indexShard != null) {
ShardRouting currentRoutingEntry = indexShard.routingEntry();
// if the current and global routing are initializing, but are still not the same, its a different "shard" being allocated
// for example: a shard that recovers from one node and now needs to recover to another node,
// or a replica allocated and then allocating a primary because the primary failed on another node
boolean shardHasBeenRemoved = false;
if (currentRoutingEntry.initializing() && shardRouting.initializing() && !currentRoutingEntry.equals(shardRouting)) {
logger.debug("[{}][{}] removing shard (different instance of it allocated on this node, current [{}], global [{}])", shardRouting.index(), shardRouting.id(), currentRoutingEntry, shardRouting);
// closing the shard will also cancel any ongoing recovery.
indexService.removeShard(shardRouting.id(), "removing shard (different instance of it allocated on this node)");
shardHasBeenRemoved = true;
} else if (isPeerRecovery(shardRouting)) {
final DiscoveryNode sourceNode = findSourceNodeForPeerRecovery(routingTable, nodes, shardRouting);
// check if there is an existing recovery going, and if so, and the source node is not the same, cancel the recovery to restart it
final Predicate<RecoveryStatus> shouldCancel = new Predicate<RecoveryStatus>() {
@Override
public boolean apply(@Nullable RecoveryStatus status) {
return status.sourceNode().equals(sourceNode) == false;
}
};
if (recoveryTarget.cancelRecoveriesForShard(indexShard.shardId(), "recovery source node changed", shouldCancel)) {
logger.debug("[{}][{}] removing shard (recovery source changed), current [{}], global [{}])", shardRouting.index(), shardRouting.id(), currentRoutingEntry, shardRouting);
// closing the shard will also cancel any ongoing recovery.
indexService.removeShard(shardRouting.id(), "removing shard (recovery source node changed)");
shardHasBeenRemoved = true;
}
}
if (shardHasBeenRemoved == false && (shardRouting.equals(indexShard.routingEntry()) == false || shardRouting.version() > indexShard.routingEntry().version())) {
if (shardRouting.primary() && indexShard.routingEntry().primary() == false && shardRouting.initializing() && indexShard.allowsPrimaryPromotion() == false) {
logger.debug("{} reinitialize shard on primary promotion", indexShard.shardId());
indexService.removeShard(shardId, "promoted to primary");
} else {
// if we happen to remove the shardRouting by id above we don't need to jump in here!
indexShard.updateRoutingEntry(shardRouting, event.state().blocks().disableStatePersistence() == false);
}
}
}
if (shardRouting.initializing()) {
applyInitializingShard(event.state(),indexMetaData, shardRouting);
}
}
}
private void cleanFailedShards(final ClusterChangedEvent event) {
RoutingTable routingTable = event.state().routingTable();
RoutingNodes.RoutingNodeIterator routingNode = event.state().readOnlyRoutingNodes().routingNodeIter(event.state().nodes().localNodeId());
if (routingNode == null) {
failedShards.clear();
return;
}
DiscoveryNodes nodes = event.state().nodes();
long now = System.currentTimeMillis();
String localNodeId = nodes.localNodeId();
Iterator<Map.Entry<ShardId, FailedShard>> iterator = failedShards.entrySet().iterator();
shards:
while (iterator.hasNext()) {
Map.Entry<ShardId, FailedShard> entry = iterator.next();
FailedShard failedShard = entry.getValue();
IndexRoutingTable indexRoutingTable = routingTable.index(entry.getKey().getIndex());
if (indexRoutingTable != null) {
IndexShardRoutingTable shardRoutingTable = indexRoutingTable.shard(entry.getKey().id());
if (shardRoutingTable != null) {
for (ShardRouting shardRouting : shardRoutingTable.assignedShards()) {
if (localNodeId.equals(shardRouting.currentNodeId())) {
// we have a timeout here just to make sure we don't have dangled failed shards for some reason
// its just another safely layer
if (shardRouting.version() == failedShard.version && ((now - failedShard.timestamp) < TimeValue.timeValueMinutes(60).millis())) {
// It's the same failed shard - keep it if it hasn't timed out
continue shards;
} else {
// Different version or expired, remove it
break;
}
}
}
}
}
iterator.remove();
}
}
private void applyInitializingShard(final ClusterState state, final IndexMetaData indexMetaData, final ShardRouting shardRouting) {
final IndexService indexService = indicesService.indexService(shardRouting.index());
if (indexService == null) {
// got deleted on us, ignore
return;
}
final RoutingTable routingTable = state.routingTable();
final DiscoveryNodes nodes = state.getNodes();
final int shardId = shardRouting.id();
if (indexService.hasShard(shardId)) {
IndexShard indexShard = indexService.shardSafe(shardId);
if (indexShard.state() == IndexShardState.STARTED || indexShard.state() == IndexShardState.POST_RECOVERY) {
// the master thinks we are initializing, but we are already started or on POST_RECOVERY and waiting
// for master to confirm a shard started message (either master failover, or a cluster event before
// we managed to tell the master we started), mark us as started
if (logger.isTraceEnabled()) {
logger.trace("{} master marked shard as initializing, but shard has state [{}], resending shard started to {}",
indexShard.shardId(), indexShard.state(), nodes.masterNode());
}
if (nodes.masterNode() != null) {
shardStateAction.shardStarted(shardRouting, indexMetaData.getUUID(),
"master " + nodes.masterNode() + " marked shard as initializing, but shard state is [" + indexShard.state() + "], mark shard as started",
nodes.masterNode());
}
return;
} else {
if (indexShard.ignoreRecoveryAttempt()) {
logger.trace("ignoring recovery instruction for an existing shard {} (shard state: [{}])", indexShard.shardId(), indexShard.state());
return;
}
}
}
// if we're in peer recovery, try to find out the source node now so in case it fails, we will not create the index shard
DiscoveryNode sourceNode = null;
if (isPeerRecovery(shardRouting)) {
sourceNode = findSourceNodeForPeerRecovery(routingTable, nodes, shardRouting);
if (sourceNode == null) {
logger.trace("ignoring initializing shard {} - no source node can be found.", shardRouting.shardId());
return;
}
}
// if there is no shard, create it
if (!indexService.hasShard(shardId)) {
if (failedShards.containsKey(shardRouting.shardId())) {
if (nodes.masterNode() != null) {
shardStateAction.resendShardFailed(shardRouting, indexMetaData.getUUID(),
"master " + nodes.masterNode() + " marked shard as initializing, but shard is marked as failed, resend shard failure",
nodes.masterNode());
}
return;
}
try {
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}] creating shard", shardRouting.index(), shardId);
}
IndexShard indexShard = indexService.createShard(shardId, shardRouting.primary());
indexShard.updateRoutingEntry(shardRouting, state.blocks().disableStatePersistence() == false);
indexShard.addFailedEngineListener(failedEngineHandler);
} catch (IndexShardAlreadyExistsException e) {
// ignore this, the method call can happen several times
} catch (Throwable e) {
failAndRemoveShard(shardRouting, indexService, true, "failed to create shard", e);
return;
}
}
final IndexShard indexShard = indexService.shardSafe(shardId);
if (indexShard.ignoreRecoveryAttempt()) {
// we are already recovering (we can get to this state since the cluster event can happen several
// times while we recover)
logger.trace("ignoring recovery instruction for shard {} (shard state: [{}])", indexShard.shardId(), indexShard.state());
return;
}
if (isPeerRecovery(shardRouting)) {
try {
assert sourceNode != null : "peer recovery started but sourceNode is null";
// we don't mark this one as relocated at the end.
// For primaries: requests in any case are routed to both when its relocating and that way we handle
// the edge case where its mark as relocated, and we might need to roll it back...
// For replicas: we are recovering a backup from a primary
RecoveryState.Type type = shardRouting.primary() ? RecoveryState.Type.RELOCATION : RecoveryState.Type.REPLICA;
recoveryTarget.startRecovery(indexShard, type, sourceNode, new PeerRecoveryListener(shardRouting, indexService, indexMetaData));
} catch (Throwable e) {
indexShard.failShard("corrupted preexisting index", e);
handleRecoveryFailure(indexService, shardRouting, true, e);
}
} else {
final IndexShardRoutingTable indexShardRouting = routingTable.index(shardRouting.index()).shard(shardRouting.id());
indexService.shard(shardId).recoverFromStore(indexShardRouting, new StoreRecoveryService.RecoveryListener() {
@Override
public void onRecoveryDone() {
shardStateAction.shardStarted(shardRouting, indexMetaData.getUUID(), "after recovery from store");
}
@Override
public void onIgnoreRecovery(String reason) {
}
@Override
public void onRecoveryFailed(IndexShardRecoveryException e) {
handleRecoveryFailure(indexService, shardRouting, true, e);
}
});
}
}
/**
* Finds the routing source node for peer recovery, return null if its not found. Note, this method expects the shard
* routing to *require* peer recovery, use {@link #isPeerRecovery(org.elasticsearch.cluster.routing.ShardRouting)} to
* check if its needed or not.
*/
private DiscoveryNode findSourceNodeForPeerRecovery(RoutingTable routingTable, DiscoveryNodes nodes, ShardRouting shardRouting) {
DiscoveryNode sourceNode = null;
if (!shardRouting.primary()) {
IndexShardRoutingTable shardRoutingTable = routingTable.index(shardRouting.index()).shard(shardRouting.id());
for (ShardRouting entry : shardRoutingTable) {
if (entry.primary() && entry.active()) {
// only recover from started primary, if we can't find one, we will do it next round
sourceNode = nodes.get(entry.currentNodeId());
if (sourceNode == null) {
logger.trace("can't find replica source node because primary shard {} is assigned to an unknown node.", entry);
return null;
}
break;
}
}
if (sourceNode == null) {
logger.trace("can't find replica source node for {} because a primary shard can not be found.", shardRouting.shardId());
}
} else if (shardRouting.relocatingNodeId() != null) {
sourceNode = nodes.get(shardRouting.relocatingNodeId());
if (sourceNode == null) {
logger.trace("can't find relocation source node for shard {} because it is assigned to an unknown node [{}].", shardRouting.shardId(), shardRouting.relocatingNodeId());
}
} else {
throw new IllegalStateException("trying to find source node for peer recovery when routing state means no peer recovery: " + shardRouting);
}
return sourceNode;
}
private boolean isPeerRecovery(ShardRouting shardRouting) {
return !shardRouting.primary() || shardRouting.relocatingNodeId() != null;
}
private class PeerRecoveryListener implements RecoveryTarget.RecoveryListener {
private final ShardRouting shardRouting;
private final IndexService indexService;
private final IndexMetaData indexMetaData;
private PeerRecoveryListener(ShardRouting shardRouting, IndexService indexService, IndexMetaData indexMetaData) {
this.shardRouting = shardRouting;
this.indexService = indexService;
this.indexMetaData = indexMetaData;
}
@Override
public void onRecoveryDone(RecoveryState state) {
shardStateAction.shardStarted(shardRouting, indexMetaData.getUUID(), "after recovery (replica) from node [" + state.getSourceNode() + "]");
}
@Override
public void onRecoveryFailure(RecoveryState state, RecoveryFailedException e, boolean sendShardFailure) {
handleRecoveryFailure(indexService, shardRouting, sendShardFailure, e);
}
}
private void handleRecoveryFailure(IndexService indexService, ShardRouting shardRouting, boolean sendShardFailure, Throwable failure) {
synchronized (mutex) {
failAndRemoveShard(shardRouting, indexService, sendShardFailure, "failed recovery", failure);
}
}
private void removeIndex(String index, String reason) {
try {
indicesService.removeIndex(index, reason);
} catch (Throwable e) {
logger.warn("failed to clean index ({})", e, reason);
}
clearSeenMappings(index);
}
private void clearSeenMappings(String index) {
// clear seen mappings as well
for (Tuple<String, String> tuple : seenMappings.keySet()) {
if (tuple.v1().equals(index)) {
seenMappings.remove(tuple);
}
}
}
private void deleteIndex(String index, String reason) {
try {
indicesService.deleteIndex(index, reason);
} catch (Throwable e) {
logger.warn("failed to delete index ({})", e, reason);
}
// clear seen mappings as well
clearSeenMappings(index);
}
private void failAndRemoveShard(ShardRouting shardRouting, IndexService indexService, boolean sendShardFailure, String message, @Nullable Throwable failure) {
if (indexService.hasShard(shardRouting.getId())) {
try {
indexService.removeShard(shardRouting.getId(), message);
} catch (IndexShardMissingException e) {
// the node got closed on us, ignore it
} catch (Throwable e1) {
logger.warn("[{}][{}] failed to remove shard after failure ([{}])", e1, shardRouting.getIndex(), shardRouting.getId(), message);
}
}
if (sendShardFailure) {
sendFailShard(shardRouting, indexService.indexUUID(), message, failure);
}
}
private void sendFailShard(ShardRouting shardRouting, String indexUUID, String message, @Nullable Throwable failure) {
try {
logger.warn("[{}] marking and sending shard failed due to [{}]", failure, shardRouting.shardId(), message);
failedShards.put(shardRouting.shardId(), new FailedShard(shardRouting.version()));
shardStateAction.shardFailed(shardRouting, indexUUID, "shard failure [" + message + "]" + (failure == null ? "" : "[" + detailedMessage(failure) + "]"));
} catch (Throwable e1) {
logger.warn("[{}][{}] failed to mark shard as failed (because of [{}])", e1, shardRouting.getIndex(), shardRouting.getId(), message);
}
}
private class FailedEngineHandler implements Engine.FailedEngineListener {
@Override
public void onFailedEngine(final ShardId shardId, final String reason, final @Nullable Throwable failure) {
ShardRouting shardRouting = null;
final IndexService indexService = indicesService.indexService(shardId.index().name());
if (indexService != null) {
IndexShard indexShard = indexService.shard(shardId.id());
if (indexShard != null) {
shardRouting = indexShard.routingEntry();
}
}
if (shardRouting == null) {
logger.warn("[{}][{}] engine failed, but can't find index shard. failure reason: [{}]", failure,
shardId.index().name(), shardId.id(), reason);
return;
}
final ShardRouting fShardRouting = shardRouting;
threadPool.generic().execute(new Runnable() {
@Override
public void run() {
synchronized (mutex) {
failAndRemoveShard(fShardRouting, indexService, true, "engine failure, reason [" + reason + "]", failure);
}
}
});
}
}
}
| |
package com.github.florent37.materialviewpager;
import android.content.Context;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import com.github.ksoichiro.android.observablescrollview.ObservableScrollView;
import com.github.ksoichiro.android.observablescrollview.ObservableScrollViewCallbacks;
import com.github.ksoichiro.android.observablescrollview.ObservableWebView;
import com.github.ksoichiro.android.observablescrollview.ScrollState;
import com.nineoldandroids.animation.Animator;
import com.nineoldandroids.animation.AnimatorListenerAdapter;
import com.nineoldandroids.animation.ArgbEvaluator;
import com.nineoldandroids.animation.ObjectAnimator;
import com.nineoldandroids.animation.ValueAnimator;
import com.nineoldandroids.view.ViewHelper;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import static com.github.florent37.materialviewpager.Utils.canScroll;
import static com.github.florent37.materialviewpager.Utils.colorWithAlpha;
import static com.github.florent37.materialviewpager.Utils.dpToPx;
import static com.github.florent37.materialviewpager.Utils.getTheVisibileView;
import static com.github.florent37.materialviewpager.Utils.minMax;
import static com.github.florent37.materialviewpager.Utils.scrollTo;
import static com.github.florent37.materialviewpager.Utils.setBackgroundColor;
import static com.github.florent37.materialviewpager.Utils.setElevation;
import static com.github.florent37.materialviewpager.Utils.setScale;
/**
* Created by florentchampigny on 24/04/15.
*
* Listen to Scrollable inside MaterialViewPager
* When notified scroll, dispatch the current scroll to other scrollable
*
* Note : didn't want to translate the MaterialViewPager or intercept Scroll,
* so added a ViewPager with scrollables containing a transparent placeholder on top
*
* When scroll, animate the MaterialViewPager Header (toolbar, logo, color ...)
*/
public class MaterialViewPagerAnimator {
private static final String TAG = MaterialViewPagerAnimator.class.getSimpleName();
public static Boolean ENABLE_LOG = false;
private Context context;
//contains MaterialViewPager subviews references
private MaterialViewPagerHeader mHeader;
//duration of translate header enter animation
private static final int ENTER_TOOLBAR_ANIMATION_DURATION = 600;
//reference to the current MaterialViewPager
protected MaterialViewPager materialViewPager;
//final toolbar layout elevation (if attr viewpager_enableToolbarElevation = true)
public final float elevation;
//max scroll which will be dispatched for all scrollable
public final float scrollMax;
// equals scrollMax in DP (saved to avoir convert to dp anytime I use it)
public final float scrollMaxDp;
protected float lastYOffset = -1; //the current yOffset
protected float lastPercent = 0; //the current Percent
//contains the attributes given to MaterialViewPager from layout
protected MaterialViewPagerSettings settings;
//list of all registered scrollers
protected List<View> scrollViewList = new ArrayList<>();
//save all yOffsets of scrollables
protected HashMap<Object, Integer> yOffsets = new HashMap<>();
//the last headerYOffset during scroll
private float headerYOffset = Float.MAX_VALUE;
//the tmp headerAnimator (not null if animating, else null)
private Object headerAnimator;
boolean followScrollToolbarIsVisible = false;
float firstScrollValue = Float.MIN_VALUE;
boolean justToolbarAnimated = false;
//intial distance between pager & toolbat
float initialDistance = -1;
public MaterialViewPagerAnimator(MaterialViewPager materialViewPager) {
this.settings = materialViewPager.settings;
this.materialViewPager = materialViewPager;
this.mHeader = materialViewPager.materialViewPagerHeader;
this.context = mHeader.getContext();
// initialise the scrollMax to headerHeight, so until the first cell touch the top of the screen
this.scrollMax = this.settings.headerHeight;
//save in into dp once
this.scrollMaxDp = Utils.dpToPx(this.scrollMax, context);
//heightMaxScrollToolbar = context.getResources().getDimension(R.dimen.material_viewpager_padding_top);
elevation = dpToPx(4, context);
}
/**
* When notified for scroll, dispatch it to all registered scrollables
*
* @param source
* @param yOffset
*/
protected void dispatchScrollOffset(Object source, float yOffset) {
if (scrollViewList != null) {
for (Object scroll : scrollViewList) {
//do not re-scroll the source
if (scroll != null && scroll != source) {
setScrollOffset(scroll, yOffset);
}
}
}
}
/**
* When notified for scroll, dispatch it to all registered scrollables
*
* @param scroll
* @param yOffset
*/
private void setScrollOffset(Object scroll, float yOffset) {
//do not re-scroll the source
if (scroll != null && yOffset >= 0) {
scrollTo(scroll, yOffset);
//save the current yOffset of the scrollable on the yOffsets hashmap
yOffsets.put(scroll, (int) yOffset);
}
}
/**
* Called when a scroller(RecyclerView/ListView,ScrollView,WebView) scrolled by the user
*
* @param source the scroller
* @param yOffset the scroller current yOffset
*/
public boolean onMaterialScrolled(Object source, float yOffset) {
if(initialDistance == -1 || initialDistance == 0) {
initialDistance = mHeader.mPagerSlidingTabStrip.getTop() - mHeader.toolbar.getBottom();
}
//only if yOffset changed
if (yOffset == lastYOffset)
return false;
float scrollTop = -yOffset;
{
//parallax scroll of the Background ImageView (the KenBurnsView)
if (mHeader.headerBackground != null) {
if (this.settings.parallaxHeaderFactor != 0)
ViewHelper.setTranslationY(mHeader.headerBackground, scrollTop / this.settings.parallaxHeaderFactor);
if (ViewHelper.getY(mHeader.headerBackground) >= 0)
ViewHelper.setY(mHeader.headerBackground, 0);
}
}
if (ENABLE_LOG)
Log.d("yOffset", "" + yOffset);
//dispatch the new offset to all registered scrollables
dispatchScrollOffset(source, minMax(0, yOffset, scrollMaxDp));
float percent = yOffset / scrollMax;
//distance between pager & toolbar
float newDistance = ViewHelper.getY(mHeader.mPagerSlidingTabStrip) - mHeader.toolbar.getBottom();
percent = 1 - newDistance/initialDistance;
if(Float.isNaN(percent)) //fix for orientation change
return false;
percent = minMax(0, percent, 1);
{
if (!settings.toolbarTransparent) {
// change color of toolbar & viewpager indicator & statusBaground
setColorPercent(0.0f);
} else {
if (justToolbarAnimated) {
if (toolbarJoinsTabs()) {
setColorPercent(1.0f);
} else if (lastPercent != percent) {
setColorPercent(0.0f);
}
}
}
if (mHeader.mPagerSlidingTabStrip != null) { //move the viewpager indicator
//float newY = ViewHelper.getY(mHeader.mPagerSlidingTabStrip) + scrollTop;
if (ENABLE_LOG)
Log.d(TAG, "" + scrollTop);
//mHeader.mPagerSlidingTabStrip.setTranslationY(mHeader.getToolbar().getBottom()-mHeader.mPagerSlidingTabStrip.getY());
if (scrollTop <= 0) {
ViewHelper.setTranslationY(mHeader.mPagerSlidingTabStrip, scrollTop);
ViewHelper.setTranslationY(mHeader.toolbarLayoutBackground, scrollTop);
//when
if (ViewHelper.getY(mHeader.mPagerSlidingTabStrip) < mHeader.getToolbar().getBottom()) {
float ty = mHeader.getToolbar().getBottom() - mHeader.mPagerSlidingTabStrip.getTop();
ViewHelper.setTranslationY(mHeader.mPagerSlidingTabStrip, ty);
ViewHelper.setTranslationY(mHeader.toolbarLayoutBackground, ty);
}
}
}
if (mHeader.mLogo != null) { //move the header logo to toolbar
if (this.settings.hideLogoWithFade) {
ViewHelper.setAlpha(mHeader.mLogo, 1 - percent);
ViewHelper.setTranslationY(mHeader.mLogo, (mHeader.finalTitleY - mHeader.originalTitleY) * percent);
} else {
ViewHelper.setTranslationY(mHeader.mLogo, (mHeader.finalTitleY - mHeader.originalTitleY) * percent);
ViewHelper.setTranslationX(mHeader.mLogo, (mHeader.finalTitleX - mHeader.originalTitleX) * percent);
float scale = (1 - percent) * (1 - mHeader.finalScale) + mHeader.finalScale;
setScale(scale, mHeader.mLogo);
}
}
if (this.settings.hideToolbarAndTitle && mHeader.toolbarLayout != null) {
boolean scrollUp = lastYOffset < yOffset;
if (scrollUp) {
scrollUp(yOffset);
} else {
scrollDown(yOffset);
}
}
}
if (headerAnimator != null && percent < 1) {
if (headerAnimator instanceof ObjectAnimator)
((ObjectAnimator) headerAnimator).cancel();
else if (headerAnimator instanceof android.animation.ObjectAnimator)
((android.animation.ObjectAnimator) headerAnimator).cancel();
headerAnimator = null;
}
lastYOffset = yOffset;
return true;
}
private void scrollUp(float yOffset) {
if (ENABLE_LOG)
Log.d(TAG, "scrollUp");
followScrollToolbarLayout(yOffset);
}
private void scrollDown(float yOffset) {
if (ENABLE_LOG)
Log.d(TAG, "scrollDown");
if (yOffset > mHeader.toolbarLayout.getHeight() * 1.5f) {
animateEnterToolbarLayout(yOffset);
} else {
if (headerAnimator != null) {
followScrollToolbarIsVisible = true;
} else {
headerYOffset = Float.MAX_VALUE;
followScrollToolbarLayout(yOffset);
}
}
}
/**
* Change the color of the statusbackground, toolbar, toolbarlayout and pagertitlestrip
* With a color transition animation
*
* @param color the final color
* @param duration the transition color animation duration
*/
public void setColor(int color, int duration) {
ValueAnimator colorAnim = ObjectAnimator.ofInt(mHeader.headerBackground, "backgroundColor", settings.color, color);
colorAnim.setEvaluator(new ArgbEvaluator());
colorAnim.setDuration(duration);
colorAnim.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
final int animatedValue = (Integer) animation.getAnimatedValue();
int colorAlpha = colorWithAlpha(animatedValue, lastPercent);
mHeader.headerBackground.setBackgroundColor(colorAlpha);
mHeader.statusBackground.setBackgroundColor(colorAlpha);
mHeader.toolbar.setBackgroundColor(colorAlpha);
mHeader.toolbarLayoutBackground.setBackgroundColor(colorAlpha);
mHeader.mPagerSlidingTabStrip.setBackgroundColor(colorAlpha);
//set the new color as MaterialViewPager's color
settings.color = animatedValue;
}
});
colorAnim.start();
}
public void animateColorPercent(float percent, int duration) {
ValueAnimator valueAnimator = ValueAnimator.ofFloat(lastPercent, percent);
valueAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
setColorPercent((float) animation.getAnimatedValue());
}
});
valueAnimator.setDuration(duration);
valueAnimator.start();
}
public void setColorPercent(float percent) {
// change color of
// toolbar & viewpager indicator & statusBaground
setBackgroundColor(
colorWithAlpha(this.settings.color, percent),
mHeader.statusBackground
);
if (percent >= 1) {
setBackgroundColor(
colorWithAlpha(this.settings.color, percent),
mHeader.toolbar,
mHeader.toolbarLayoutBackground,
mHeader.mPagerSlidingTabStrip
);
} else {
setBackgroundColor(
colorWithAlpha(this.settings.color, 0),
mHeader.toolbar,
mHeader.toolbarLayoutBackground,
mHeader.mPagerSlidingTabStrip
);
}
if (this.settings.enableToolbarElevation && toolbarJoinsTabs())
setElevation(
(percent == 1) ? elevation : 0,
mHeader.toolbar,
mHeader.toolbarLayoutBackground,
mHeader.mPagerSlidingTabStrip,
mHeader.mLogo
);
lastPercent = percent;
}
private boolean toolbarJoinsTabs() {
return (mHeader.toolbar.getBottom() == mHeader.mPagerSlidingTabStrip.getTop() + ViewHelper.getTranslationY(mHeader.mPagerSlidingTabStrip));
}
/**
* move the toolbarlayout (containing toolbar & tabs)
* following the current scroll
*/
private void followScrollToolbarLayout(float yOffset) {
if (mHeader.toolbar.getBottom() == 0)
return;
if (toolbarJoinsTabs()) {
if (firstScrollValue == Float.MIN_VALUE)
firstScrollValue = yOffset;
float translationY = firstScrollValue - yOffset;
if (ENABLE_LOG)
Log.d(TAG, "translationY " + translationY);
ViewHelper.setTranslationY(mHeader.toolbarLayout, translationY);
} else {
ViewHelper.setTranslationY(mHeader.toolbarLayout, 0);
justToolbarAnimated = false;
}
followScrollToolbarIsVisible = (ViewHelper.getY(mHeader.toolbarLayout) >= 0);
}
/**
* Animate enter toolbarlayout
*
* @param yOffset
*/
private void animateEnterToolbarLayout(float yOffset) {
if (!followScrollToolbarIsVisible && headerAnimator != null) {
if (headerAnimator instanceof ObjectAnimator)
((ObjectAnimator) headerAnimator).cancel();
else if (headerAnimator instanceof android.animation.ObjectAnimator)
((android.animation.ObjectAnimator) headerAnimator).cancel();
headerAnimator = null;
}
if (headerAnimator == null) {
if (android.os.Build.VERSION.SDK_INT > Build.VERSION_CODES.GINGERBREAD_MR1) {
headerAnimator = android.animation.ObjectAnimator.ofFloat(mHeader.toolbarLayout, "translationY", 0).setDuration(ENTER_TOOLBAR_ANIMATION_DURATION);
((android.animation.ObjectAnimator) headerAnimator).addListener(new android.animation.AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(android.animation.Animator animation) {
super.onAnimationEnd(animation);
followScrollToolbarIsVisible = true;
firstScrollValue = Float.MIN_VALUE;
justToolbarAnimated = true;
}
});
((android.animation.ObjectAnimator) headerAnimator).start();
} else {
headerAnimator = ObjectAnimator.ofFloat(mHeader.toolbarLayout, "translationY", 0).setDuration(ENTER_TOOLBAR_ANIMATION_DURATION);
((ObjectAnimator) headerAnimator).addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
followScrollToolbarIsVisible = true;
firstScrollValue = Float.MIN_VALUE;
justToolbarAnimated = true;
}
});
((ObjectAnimator) headerAnimator).start();
}
headerYOffset = yOffset;
}
}
public int getHeaderHeight() {
return this.settings.headerHeight;
}
protected boolean isNewYOffset(int yOffset) {
if (lastYOffset == -1)
return true;
else
return yOffset != lastYOffset;
}
//region register scrollables
/**
* Register a RecyclerView to the current MaterialViewPagerAnimator
* Listen to RecyclerView.OnScrollListener so give to $[onScrollListener] your RecyclerView.OnScrollListener if you already use one
* For loadmore or anything else
*
* @param recyclerView the scrollable
* @param onScrollListener use it if you want to get a callback of the RecyclerView
*/
public void registerRecyclerView(final RecyclerView recyclerView, final RecyclerView.OnScrollListener onScrollListener) {
if (recyclerView != null) {
scrollViewList.add(recyclerView); //add to the scrollable list
yOffsets.put(recyclerView, recyclerView.getScrollY()); //save the initial recyclerview's yOffset (0) into hashmap
//only necessary for recyclerview
//listen to scroll
recyclerView.addOnScrollListener(new RecyclerView.OnScrollListener() {
boolean firstZeroPassed;
@Override
public void onScrollStateChanged(RecyclerView recyclerView, int newState) {
super.onScrollStateChanged(recyclerView, newState);
if (onScrollListener != null)
onScrollListener.onScrollStateChanged(recyclerView, newState);
}
@Override
public void onScrolled(RecyclerView recyclerView, int dx, int dy) {
super.onScrolled(recyclerView, dx, dy);
if (onScrollListener != null)
onScrollListener.onScrolled(recyclerView, dx, dy);
int yOffset = yOffsets.get(recyclerView);
yOffset += dy;
yOffsets.put(recyclerView, yOffset); //save the new offset
//first time you get 0, don't share it to others scrolls
if (yOffset == 0 && !firstZeroPassed) {
firstZeroPassed = true;
return;
}
//only if yOffset changed
if (isNewYOffset(yOffset))
onMaterialScrolled(recyclerView, yOffset);
}
});
recyclerView.post(new Runnable() {
@Override
public void run() {
setScrollOffset(recyclerView, lastYOffset);
}
});
}
}
/**
* Register a ScrollView to the current MaterialViewPagerAnimator
* Listen to ObservableScrollViewCallbacks so give to $[observableScrollViewCallbacks] your ObservableScrollViewCallbacks if you already use one
* For loadmore or anything else
*
* @param scrollView the scrollable
* @param observableScrollViewCallbacks use it if you want to get a callback of the RecyclerView
*/
public void registerScrollView(final ObservableScrollView scrollView, final ObservableScrollViewCallbacks observableScrollViewCallbacks) {
if (scrollView != null) {
scrollViewList.add(scrollView); //add to the scrollable list
if (scrollView.getParent() != null && scrollView.getParent().getParent() != null && scrollView.getParent().getParent() instanceof ViewGroup)
scrollView.setTouchInterceptionViewGroup((ViewGroup) scrollView.getParent().getParent());
scrollView.setScrollViewCallbacks(new ObservableScrollViewCallbacks() {
boolean firstZeroPassed;
@Override
public void onScrollChanged(int yOffset, boolean b, boolean b2) {
if (observableScrollViewCallbacks != null)
observableScrollViewCallbacks.onScrollChanged(yOffset, b, b2);
//first time you get 0, don't share it to others scrolls
if (yOffset == 0 && !firstZeroPassed) {
firstZeroPassed = true;
return;
}
//only if yOffset changed
if (isNewYOffset(yOffset))
onMaterialScrolled(scrollView, yOffset);
}
@Override
public void onDownMotionEvent() {
if (observableScrollViewCallbacks != null)
observableScrollViewCallbacks.onDownMotionEvent();
}
@Override
public void onUpOrCancelMotionEvent(ScrollState scrollState) {
if (observableScrollViewCallbacks != null)
observableScrollViewCallbacks.onUpOrCancelMotionEvent(scrollState);
}
});
scrollView.post(new Runnable() {
@Override
public void run() {
setScrollOffset(scrollView, lastYOffset);
}
});
}
}
/**
* Register a WebView to the current MaterialViewPagerAnimator
* Listen to ObservableScrollViewCallbacks so give to $[observableScrollViewCallbacks] your ObservableScrollViewCallbacks if you already use one
* For loadmore or anything else
*
* @param webView the scrollable
* @param observableScrollViewCallbacks use it if you want to get a callback of the RecyclerView
*/
public void registerWebView(final ObservableWebView webView, final ObservableScrollViewCallbacks observableScrollViewCallbacks) {
if (webView != null) {
if (scrollViewList.isEmpty())
onMaterialScrolled(webView, webView.getCurrentScrollY());
scrollViewList.add(webView); //add to the scrollable list
webView.setScrollViewCallbacks(new ObservableScrollViewCallbacks() {
@Override
public void onScrollChanged(int yOffset, boolean b, boolean b2) {
if (observableScrollViewCallbacks != null)
observableScrollViewCallbacks.onScrollChanged(yOffset, b, b2);
if (isNewYOffset(yOffset))
onMaterialScrolled(webView, yOffset);
}
@Override
public void onDownMotionEvent() {
if (observableScrollViewCallbacks != null)
observableScrollViewCallbacks.onDownMotionEvent();
}
@Override
public void onUpOrCancelMotionEvent(ScrollState scrollState) {
if (observableScrollViewCallbacks != null)
observableScrollViewCallbacks.onUpOrCancelMotionEvent(scrollState);
}
});
this.setScrollOffset(webView, -lastYOffset);
}
}
//endregion
public void restoreScroll(final float scroll, final MaterialViewPagerSettings settings) {
//try to scroll up, on a looper to wait until restored
new Handler(Looper.getMainLooper()).postDelayed(new Runnable() {
@Override
public void run() {
if(!onMaterialScrolled(null, scroll)){
restoreScroll(scroll,settings);
}
}
},100);
}
public void onViewPagerPageChanged() {
scrollDown(lastYOffset);
View visibleView = getTheVisibileView(scrollViewList);
if (!canScroll(visibleView)) {
followScrollToolbarLayout(0);
onMaterialScrolled(visibleView, 0);
}
if (justToolbarAnimated && toolbarJoinsTabs()) {
setColorPercent(1.0f);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.distributedlog.bk;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.bookkeeper.client.LedgerHandle;
import org.apache.bookkeeper.common.concurrent.FutureEventListener;
import org.apache.bookkeeper.common.concurrent.FutureUtils;
import org.apache.bookkeeper.util.ZkUtils;
import org.apache.bookkeeper.versioning.LongVersion;
import org.apache.bookkeeper.versioning.Versioned;
import org.apache.distributedlog.BookKeeperClient;
import org.apache.distributedlog.DistributedLogConfiguration;
import org.apache.distributedlog.ZooKeeperClient;
import org.apache.distributedlog.exceptions.DLInterruptedException;
import org.apache.distributedlog.util.Transaction;
import org.apache.distributedlog.util.Utils;
import org.apache.zookeeper.AsyncCallback;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.data.Stat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* LedgerAllocator impl.
*/
public class LedgerAllocatorPool implements LedgerAllocator {
private static final Logger logger = LoggerFactory.getLogger(LedgerAllocatorPool.class);
private final DistributedLogConfiguration conf;
private final QuorumConfigProvider quorumConfigProvider;
private final BookKeeperClient bkc;
private final ZooKeeperClient zkc;
private final ScheduledExecutorService scheduledExecutorService;
private final String poolPath;
private final int corePoolSize;
private final LinkedList<SimpleLedgerAllocator> pendingList =
new LinkedList<SimpleLedgerAllocator>();
private final LinkedList<SimpleLedgerAllocator> allocatingList =
new LinkedList<SimpleLedgerAllocator>();
private final Map<String, SimpleLedgerAllocator> rescueMap =
new HashMap<String, SimpleLedgerAllocator>();
private final Map<LedgerHandle, SimpleLedgerAllocator> obtainMap =
new HashMap<LedgerHandle, SimpleLedgerAllocator>();
private final Map<SimpleLedgerAllocator, LedgerHandle> reverseObtainMap =
new HashMap<SimpleLedgerAllocator, LedgerHandle>();
public LedgerAllocatorPool(String poolPath, int corePoolSize,
DistributedLogConfiguration conf,
ZooKeeperClient zkc,
BookKeeperClient bkc,
ScheduledExecutorService scheduledExecutorService) throws IOException {
this.poolPath = poolPath;
this.corePoolSize = corePoolSize;
this.conf = conf;
this.quorumConfigProvider =
new ImmutableQuorumConfigProvider(conf.getQuorumConfig());
this.zkc = zkc;
this.bkc = bkc;
this.scheduledExecutorService = scheduledExecutorService;
initializePool();
}
@Override
public void start() throws IOException {
for (LedgerAllocator allocator : pendingList) {
// issue allocating requests during initialize
allocator.allocate();
}
}
@VisibleForTesting
synchronized int pendingListSize() {
return pendingList.size();
}
@VisibleForTesting
synchronized int allocatingListSize() {
return allocatingList.size();
}
@VisibleForTesting
public synchronized int obtainMapSize() {
return obtainMap.size();
}
@VisibleForTesting
synchronized int rescueSize() {
return rescueMap.size();
}
@VisibleForTesting
synchronized SimpleLedgerAllocator getLedgerAllocator(LedgerHandle lh) {
return obtainMap.get(lh);
}
private void initializePool() throws IOException {
try {
List<String> allocators;
try {
allocators = zkc.get().getChildren(poolPath, false);
} catch (KeeperException.NoNodeException e) {
logger.info("Allocator Pool {} doesn't exist. Creating it.", poolPath);
ZkUtils.createFullPathOptimistic(zkc.get(), poolPath, new byte[0], zkc.getDefaultACL(),
CreateMode.PERSISTENT);
allocators = zkc.get().getChildren(poolPath, false);
}
if (null == allocators) {
allocators = new ArrayList<String>();
}
if (allocators.size() < corePoolSize) {
createAllocators(corePoolSize - allocators.size());
allocators = zkc.get().getChildren(poolPath, false);
}
initializeAllocators(allocators);
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
throw new DLInterruptedException("Interrupted when ensuring " + poolPath + " created : ", ie);
} catch (KeeperException ke) {
throw new IOException("Encountered zookeeper exception when initializing pool " + poolPath + " : ", ke);
}
}
private void createAllocators(int numAllocators) throws InterruptedException, IOException {
final AtomicInteger numPendings = new AtomicInteger(numAllocators);
final AtomicInteger numFailures = new AtomicInteger(0);
final CountDownLatch latch = new CountDownLatch(1);
AsyncCallback.StringCallback createCallback = new AsyncCallback.StringCallback() {
@Override
public void processResult(int rc, String path, Object ctx, String name) {
if (KeeperException.Code.OK.intValue() != rc) {
numFailures.incrementAndGet();
latch.countDown();
return;
}
if (numPendings.decrementAndGet() == 0 && numFailures.get() == 0) {
latch.countDown();
}
}
};
for (int i = 0; i < numAllocators; i++) {
zkc.get().create(poolPath + "/A", new byte[0],
zkc.getDefaultACL(),
CreateMode.PERSISTENT_SEQUENTIAL,
createCallback, null);
}
latch.await();
if (numFailures.get() > 0) {
throw new IOException("Failed to create " + numAllocators + " allocators.");
}
}
/**
* Initialize simple allocators with given list of allocator names <i>allocators</i>.
* It initializes a simple allocator with its simple allocator path.
*/
private void initializeAllocators(List<String> allocators) throws IOException, InterruptedException {
final AtomicInteger numPendings = new AtomicInteger(allocators.size());
final AtomicInteger numFailures = new AtomicInteger(0);
final CountDownLatch latch = new CountDownLatch(numPendings.get() > 0 ? 1 : 0);
AsyncCallback.DataCallback dataCallback = new AsyncCallback.DataCallback() {
@Override
public void processResult(int rc, String path, Object ctx, byte[] data, Stat stat) {
if (KeeperException.Code.OK.intValue() != rc) {
numFailures.incrementAndGet();
latch.countDown();
return;
}
Versioned<byte[]> allocatorData =
new Versioned<byte[]>(data, new LongVersion(stat.getVersion()));
SimpleLedgerAllocator allocator =
new SimpleLedgerAllocator(path, allocatorData, quorumConfigProvider, zkc, bkc);
allocator.start();
pendingList.add(allocator);
if (numPendings.decrementAndGet() == 0 && numFailures.get() == 0) {
latch.countDown();
}
}
};
for (String name : allocators) {
String path = poolPath + "/" + name;
zkc.get().getData(path, false, dataCallback, null);
}
latch.await();
if (numFailures.get() > 0) {
throw new IOException("Failed to initialize allocators : " + allocators);
}
}
private void scheduleAllocatorRescue(final SimpleLedgerAllocator ledgerAllocator) {
try {
scheduledExecutorService.schedule(new Runnable() {
@Override
public void run() {
try {
rescueAllocator(ledgerAllocator);
} catch (DLInterruptedException dle) {
Thread.currentThread().interrupt();
}
}
}, conf.getZKRetryBackoffStartMillis(), TimeUnit.MILLISECONDS);
} catch (RejectedExecutionException ree) {
logger.warn("Failed to schedule rescuing ledger allocator {} : ", ledgerAllocator.allocatePath, ree);
}
}
/**
* Rescue a ledger allocator from an ERROR state.
* @param ledgerAllocator
* ledger allocator to rescue
*/
private void rescueAllocator(final SimpleLedgerAllocator ledgerAllocator) throws DLInterruptedException {
SimpleLedgerAllocator oldAllocator;
synchronized (this) {
oldAllocator = rescueMap.put(ledgerAllocator.allocatePath, ledgerAllocator);
}
if (oldAllocator != null) {
logger.info("ledger allocator {} is being rescued.", ledgerAllocator.allocatePath);
return;
}
try {
zkc.get().getData(ledgerAllocator.allocatePath, false, new AsyncCallback.DataCallback() {
@Override
public void processResult(int rc, String path, Object ctx, byte[] data, Stat stat) {
boolean retry = false;
SimpleLedgerAllocator newAllocator = null;
if (KeeperException.Code.OK.intValue() == rc) {
Versioned<byte[]> allocatorData =
new Versioned<byte[]>(data, new LongVersion(stat.getVersion()));
logger.info("Rescuing ledger allocator {}.", path);
newAllocator = new SimpleLedgerAllocator(path, allocatorData, quorumConfigProvider, zkc, bkc);
newAllocator.start();
logger.info("Rescued ledger allocator {}.", path);
} else if (KeeperException.Code.NONODE.intValue() == rc) {
logger.info("Ledger allocator {} doesn't exist, skip rescuing it.", path);
} else {
retry = true;
}
synchronized (LedgerAllocatorPool.this) {
rescueMap.remove(ledgerAllocator.allocatePath);
if (null != newAllocator) {
pendingList.addLast(newAllocator);
}
}
if (retry) {
scheduleAllocatorRescue(ledgerAllocator);
}
}
}, null);
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
logger.warn("Interrupted on rescuing ledger allocator {} : ", ledgerAllocator.allocatePath, ie);
synchronized (LedgerAllocatorPool.this) {
rescueMap.remove(ledgerAllocator.allocatePath);
}
throw new DLInterruptedException("Interrupted on rescuing ledger allocator "
+ ledgerAllocator.allocatePath, ie);
} catch (IOException ioe) {
logger.warn("Failed to rescue ledger allocator {}, retry rescuing it later : ",
ledgerAllocator.allocatePath, ioe);
synchronized (LedgerAllocatorPool.this) {
rescueMap.remove(ledgerAllocator.allocatePath);
}
scheduleAllocatorRescue(ledgerAllocator);
}
}
@Override
public void allocate() throws IOException {
SimpleLedgerAllocator allocator;
synchronized (this) {
if (pendingList.isEmpty()) {
// if no ledger allocator available, we should fail it immediately,
// which the request will be redirected to other proxies
throw new IOException("No ledger allocator available under " + poolPath + ".");
} else {
allocator = pendingList.removeFirst();
}
}
boolean success = false;
try {
allocator.allocate();
synchronized (this) {
allocatingList.addLast(allocator);
}
success = true;
} finally {
if (!success) {
rescueAllocator(allocator);
}
}
}
@Override
public CompletableFuture<LedgerHandle> tryObtain(final Transaction<Object> txn,
final Transaction.OpListener<LedgerHandle> listener) {
final SimpleLedgerAllocator allocator;
synchronized (this) {
if (allocatingList.isEmpty()) {
return FutureUtils.exception(new IOException("No ledger allocator available under " + poolPath + "."));
} else {
allocator = allocatingList.removeFirst();
}
}
final CompletableFuture<LedgerHandle> tryObtainPromise = new CompletableFuture<LedgerHandle>();
final FutureEventListener<LedgerHandle> tryObtainListener = new FutureEventListener<LedgerHandle>() {
@Override
public void onSuccess(LedgerHandle lh) {
synchronized (LedgerAllocatorPool.this) {
obtainMap.put(lh, allocator);
reverseObtainMap.put(allocator, lh);
tryObtainPromise.complete(lh);
}
}
@Override
public void onFailure(Throwable cause) {
try {
rescueAllocator(allocator);
} catch (IOException ioe) {
logger.info("Failed to rescue allocator {}", allocator.allocatePath, ioe);
}
tryObtainPromise.completeExceptionally(cause);
}
};
allocator.tryObtain(txn, new Transaction.OpListener<LedgerHandle>() {
@Override
public void onCommit(LedgerHandle lh) {
confirmObtain(allocator);
listener.onCommit(lh);
}
@Override
public void onAbort(Throwable t) {
abortObtain(allocator);
listener.onAbort(t);
}
}).whenComplete(tryObtainListener);
return tryObtainPromise;
}
void confirmObtain(SimpleLedgerAllocator allocator) {
synchronized (this) {
LedgerHandle lh = reverseObtainMap.remove(allocator);
if (null != lh) {
obtainMap.remove(lh);
}
}
synchronized (this) {
pendingList.addLast(allocator);
}
}
void abortObtain(SimpleLedgerAllocator allocator) {
synchronized (this) {
LedgerHandle lh = reverseObtainMap.remove(allocator);
if (null != lh) {
obtainMap.remove(lh);
}
}
// if a ledger allocator is aborted, it is better to rescue it. since the ledger allocator might
// already encounter BadVersion exception.
try {
rescueAllocator(allocator);
} catch (DLInterruptedException e) {
logger.warn("Interrupted on rescuing ledger allocator pool {} : ", poolPath, e);
Thread.currentThread().interrupt();
}
}
@Override
public CompletableFuture<Void> asyncClose() {
List<LedgerAllocator> allocatorsToClose;
synchronized (this) {
allocatorsToClose = Lists.newArrayListWithExpectedSize(
pendingList.size() + allocatingList.size() + obtainMap.size());
allocatorsToClose.addAll(pendingList);
allocatorsToClose.addAll(allocatingList);
allocatorsToClose.addAll(obtainMap.values());
}
return FutureUtils.processList(
allocatorsToClose,
allocator -> allocator.asyncClose(),
scheduledExecutorService
).thenApply(values -> null);
}
@Override
public CompletableFuture<Void> delete() {
List<LedgerAllocator> allocatorsToDelete;
synchronized (this) {
allocatorsToDelete = Lists.newArrayListWithExpectedSize(
pendingList.size() + allocatingList.size() + obtainMap.size());
allocatorsToDelete.addAll(pendingList);
allocatorsToDelete.addAll(allocatingList);
allocatorsToDelete.addAll(obtainMap.values());
}
return FutureUtils.processList(
allocatorsToDelete,
allocator -> allocator.delete(),
scheduledExecutorService
).thenCompose(values -> Utils.zkDelete(zkc, poolPath, new LongVersion(-1)));
}
}
| |
package net.openhft.chronicle.queue.impl.single;
import net.openhft.chronicle.core.Jvm;
import net.openhft.chronicle.core.io.Closeable;
import net.openhft.chronicle.core.io.IOTools;
import net.openhft.chronicle.core.onoes.LogLevel;
import net.openhft.chronicle.core.threads.InterruptedRuntimeException;
import net.openhft.chronicle.queue.QueueTestCommon;
import net.openhft.chronicle.queue.impl.TableStore;
import net.openhft.chronicle.queue.impl.table.Metadata;
import net.openhft.chronicle.queue.impl.table.SingleTableBuilder;
import net.openhft.chronicle.testframework.process.ProcessRunner;
import net.openhft.chronicle.threads.Pauser;
import net.openhft.chronicle.threads.Threads;
import net.openhft.chronicle.wire.UnrecoverableTimeoutException;
import org.jetbrains.annotations.NotNull;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Collection;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.junit.Assert.*;
public class TSQueueLockTest extends QueueTestCommon {
private static final String TEST_LOCK_NAME = "testLock";
private static final long TIMEOUT_MS = 100;
private TableStore<Metadata.NoMeta> tableStore;
@Before
public void setUp() {
final Path tempDir = IOTools.createTempDirectory(this.getClass().getSimpleName());
tempDir.toFile().mkdirs();
Path storeDirectory = tempDir.resolve("test_store.cq4t");
tableStore = SingleTableBuilder.binary(storeDirectory, Metadata.NoMeta.INSTANCE).build();
}
@After
public void tearDown() {
Closeable.closeQuietly(tableStore);
}
@Test(timeout = 5_000)
public void lockWillThrowIllegalStateExceptionIfInterruptedWhileWaitingForLock() throws InterruptedException {
try (final TSQueueLock testLock = createTestLock(tableStore, 5_000)) {
testLock.acquireLock();
AtomicBoolean threwException = new AtomicBoolean(false);
Thread t = new Thread(() -> {
try {
testLock.acquireLock();
} catch (IllegalStateException e) {
threwException.set(true);
}
});
t.start();
Jvm.pause(10);
t.interrupt();
t.join();
assertTrue(threwException.get());
}
}
@Test(timeout = 5_000)
public void testIsLockedByCurrentProcess() {
AtomicLong actualPid = new AtomicLong(-1);
try (final TSQueueLock testLock = createTestLock()) {
testLock.acquireLock();
assertTrue(testLock.isLockedByCurrentProcess(actualPid::set));
assertEquals(-1, actualPid.get());
testLock.unlock();
assertFalse(testLock.isLockedByCurrentProcess(actualPid::set));
assertEquals(TSQueueLock.UNLOCKED, actualPid.get());
}
}
@Test(timeout = 5_000)
public void lockWillBeAcquiredAfterTimeoutWithAWarning() throws InterruptedException {
try (final TSQueueLock testLock = createTestLock(tableStore, 50)) {
Thread t = new Thread(testLock::acquireLock);
t.start();
t.join();
testLock.acquireLock();
assertTrue(exceptions.keySet().stream()
.anyMatch(ek -> ek.level == LogLevel.WARN && ek.clazz == TSQueueLock.class && ek.message.startsWith("Forced unlock")));
expectException("Unlocking forcibly");
expectException("Forced unlock");
}
}
@Test(timeout = 5_000, expected = UnrecoverableTimeoutException.class)
public void lockWillThrowExceptionAfterTimeoutWhenDontRecoverLockTimeoutIsTrue() throws InterruptedException {
expectException("queue.dont.recover.lock.timeout property is deprecated and will be removed");
System.setProperty("queue.dont.recover.lock.timeout", "true");
try (final TSQueueLock testLock = createTestLock(tableStore, 50)) {
Thread t = new Thread(testLock::acquireLock);
t.start();
t.join();
testLock.acquireLock();
fail("Should have thrown trying to lock()");
} finally {
System.clearProperty("queue.dont.recover.lock.timeout");
}
}
@Test(timeout = 5_000, expected = UnrecoverableTimeoutException.class)
public void lockWillThrowExceptionAfterTimeoutWhenOnlyUnlockIfProcessDeadIsTrue() throws InterruptedException {
System.setProperty("queue.force.unlock.mode", "LOCKING_PROCESS_DEAD");
expectException("Couldn't acquire lock after");
try (final TSQueueLock testLock = createTestLock(tableStore, 50)) {
Thread t = new Thread(testLock::acquireLock);
t.start();
t.join();
testLock.acquireLock();
fail("Should have thrown trying to lock()");
} finally {
System.clearProperty("queue.force.unlock.mode");
}
}
@Test(timeout = 5_000)
public void unlockWillWarnIfNotLocked() {
try (final TSQueueLock testLock = createTestLock()) {
testLock.unlock();
assertTrue(exceptions.keySet().stream()
.anyMatch(ek -> ek.level == LogLevel.WARN && ek.clazz == TSQueueLock.class && ek.message.startsWith("Queue lock was locked by another thread")));
expectException("Queue lock was locked by another thread");
}
}
@Test(timeout = 5_000)
public void unlockWillNotUnlockAndWarnIfLockedByAnotherProcess() throws IOException, InterruptedException, TimeoutException {
try (final TSQueueLock testLock = createTestLock()) {
final Process process = runLockingProcess(true);
waitForLockToBecomeLocked(testLock);
testLock.unlock();
assertTrue(testLock.isLocked());
assertTrue(exceptions.keySet().stream()
.anyMatch(ek -> ek.level == LogLevel.WARN && ek.clazz == TSQueueLock.class && ek.message.startsWith("Queue lock was locked by another thread")));
expectException("Queue lock was locked by another thread");
process.destroy();
process.waitFor();
}
}
@Test(timeout = 15_000)
public void lockPreventsConcurrentAcquisition() {
AtomicBoolean lockIsAcquired = new AtomicBoolean(false);
try (final TSQueueLock testLock = createTestLock(tableStore, 10_000)) {
int numThreads = Math.min(6, Runtime.getRuntime().availableProcessors());
ExecutorService executorService = Executors.newFixedThreadPool(numThreads);
CyclicBarrier barrier = new CyclicBarrier(numThreads);
final Collection<Future<?>> futures = IntStream.range(0, numThreads)
.mapToObj(v -> executorService.submit(new LockAcquirer(testLock, lockIsAcquired, 30, barrier)))
.collect(Collectors.toList());
futures.forEach(fut -> {
try {
fut.get();
} catch (Exception e) {
throw new RuntimeException(e);
}
});
Threads.shutdown(executorService);
}
}
@Test(timeout = 5_000)
public void forceUnlockIfProcessIsDeadWillFailWhenLockingProcessIsAlive() throws IOException, TimeoutException, InterruptedException {
Process lockingProcess = runLockingProcess(true);
try (TSQueueLock lock = createTestLock()) {
waitForLockToBecomeLocked(lock);
assertFalse(lock.forceUnlockIfProcessIsDead());
assertTrue(lock.isLocked());
}
lockingProcess.destroy();
lockingProcess.waitFor(5_000, TimeUnit.SECONDS);
}
@Test(timeout = 5_000)
public void forceUnlockIfProcessIsDeadWillSucceedWhenLockingProcessIsDead() throws IOException, TimeoutException, InterruptedException {
Process lockingProcess = runLockingProcess(false);
try (TSQueueLock lock = createTestLock()) {
waitForLockToBecomeLocked(lock);
lockingProcess.destroy();
lockingProcess.waitFor(5_000, TimeUnit.SECONDS);
assertTrue(lock.forceUnlockIfProcessIsDead());
assertFalse(lock.isLocked());
}
}
@Test(timeout = 5_000)
public void forceUnlockIfProcessIsDeadWillSucceedWhenLockIsNotLocked() {
try (TSQueueLock lock = createTestLock()) {
assertTrue(lock.forceUnlockIfProcessIsDead());
assertFalse(lock.isLocked());
}
}
private void waitForLockToBecomeLocked(TSQueueLock lock) throws TimeoutException {
Pauser p = Pauser.balanced();
while (!lock.isLocked()) {
p.pause(5_000, TimeUnit.SECONDS);
if (Thread.currentThread().isInterrupted()) {
throw new InterruptedRuntimeException("Interrupted waiting for lock to lock");
}
}
}
private TSQueueLock createTestLock() {
return createTestLock(tableStore, TIMEOUT_MS);
}
@NotNull
private static TSQueueLock createTestLock(TableStore<Metadata.NoMeta> tableStore, long timeoutMilliseconds) {
return new TSQueueLock(tableStore, Pauser::balanced, timeoutMilliseconds);
}
private Process runLockingProcess(boolean releaseAfterInterrupt) throws IOException {
return ProcessRunner.runClass(LockAndHoldUntilInterrupted.class,
tableStore.file().getAbsolutePath(), String.valueOf(releaseAfterInterrupt));
}
private static void lockAndHoldUntilInterrupted(String tableStorePath, boolean releaseWhenInterrupted) {
try (TableStore<Metadata.NoMeta> tableStore = SingleTableBuilder.binary(tableStorePath, Metadata.NoMeta.INSTANCE).build();
TSQueueLock lock = createTestLock(tableStore, 15_000)) {
lock.acquireLock();
while (!Thread.currentThread().isInterrupted()) {
Jvm.pause(100);
}
if (releaseWhenInterrupted) {
lock.unlock();
}
}
}
static class LockAndHoldUntilInterrupted {
public static void main(String[] args) {
lockAndHoldUntilInterrupted(args[0], Boolean.parseBoolean(args[1]));
}
}
static class LockAcquirer implements Runnable {
private final TSQueueLock TSQueueLock;
private final AtomicBoolean lockIsAcquired;
private final int numberOfIterations;
private final CyclicBarrier barrier;
LockAcquirer(TSQueueLock TSQueueLock, AtomicBoolean lockIsAcquired, int numberOfIterations, CyclicBarrier barrier) {
this.TSQueueLock = TSQueueLock;
this.lockIsAcquired = lockIsAcquired;
this.numberOfIterations = numberOfIterations;
this.barrier = barrier;
}
@Override
public void run() {
try {
barrier.await();
for (int i = 0; i < numberOfIterations; i++) {
TSQueueLock.acquireLock();
try {
lockIsAcquired.compareAndSet(false, true);
Jvm.pause(10);
lockIsAcquired.compareAndSet(true, false);
} finally {
TSQueueLock.unlock();
Jvm.pause(1);
}
}
} catch (Exception e) {
throw new AssertionError(e);
}
}
}
}
| |
/*******************************************************************************
* Copyright 2012 bmanuel
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.blastedstudios.freeboot.ui.postprocessing.filters;
import com.badlogic.gdx.utils.IntMap;
import com.blastedstudios.freeboot.ui.postprocessing.utils.PingPongBuffer;
public final class Blur extends MultipassFilter {
// @formatter:off
private enum Tap {
Tap3x3(1), Tap5x5(2),
// Tap7x7( 3 )
;
public final int radius;
private Tap (int radius) {
this.radius = radius;
}
}
public enum BlurType {
Gaussian3x3(Tap.Tap3x3), Gaussian3x3b(Tap.Tap3x3), // R=5 (11x11, policy "higher-then-discard")
Gaussian5x5(Tap.Tap5x5), Gaussian5x5b(Tap.Tap5x5), // R=9 (19x19, policy "higher-then-discard")
;
public final Tap tap;
private BlurType (Tap tap) {
this.tap = tap;
}
}
// @formatter:on
// blur
private BlurType type;
private float amount;
private int passes;
// fbo, textures
private float invWidth, invHeight;
private final IntMap<Convolve2D> convolve = new IntMap<Convolve2D>(Tap.values().length);
public Blur (int width, int height) {
// precompute constants
this.invWidth = 1f / (float)width;
this.invHeight = 1f / (float)height;
this.passes = 1;
this.amount = 1f;
// create filters
for (Tap tap : Tap.values()) {
convolve.put(tap.radius, new Convolve2D(tap.radius));
}
setType(BlurType.Gaussian5x5);
}
public void dispose () {
for (Convolve2D c : convolve.values()) {
c.dispose();
}
}
public void setPasses (int passes) {
this.passes = passes;
}
public void setType (BlurType type) {
if (this.type != type) {
this.type = type;
computeBlurWeightings();
}
}
// not all blur types support custom amounts at this time
public void setAmount (float amount) {
this.amount = amount;
computeBlurWeightings();
}
public int getPasses () {
return passes;
}
public BlurType getType () {
return type;
}
// not all blur types support custom amounts at this time
public float getAmount () {
return amount;
}
@Override
public void render (PingPongBuffer buffer) {
Convolve2D c = convolve.get(this.type.tap.radius);
for (int i = 0; i < this.passes; i++) {
c.render(buffer);
}
}
private void computeBlurWeightings () {
boolean hasdata = true;
Convolve2D c = convolve.get(this.type.tap.radius);
float[] outWeights = c.weights;
float[] outOffsetsH = c.offsetsHor;
float[] outOffsetsV = c.offsetsVert;
float dx = this.invWidth;
float dy = this.invHeight;
switch (this.type) {
case Gaussian3x3:
case Gaussian5x5:
computeKernel(this.type.tap.radius, this.amount, outWeights);
computeOffsets(this.type.tap.radius, this.invWidth, this.invHeight, outOffsetsH, outOffsetsV);
break;
case Gaussian3x3b:
// weights and offsets are computed from a binomial distribution
// and reduced to be used *only* with bilinearly-filtered texture lookups
//
// with radius = 1f
// weights
outWeights[0] = 0.352941f;
outWeights[1] = 0.294118f;
outWeights[2] = 0.352941f;
// horizontal offsets
outOffsetsH[0] = -1.33333f;
outOffsetsH[1] = 0f;
outOffsetsH[2] = 0f;
outOffsetsH[3] = 0f;
outOffsetsH[4] = 1.33333f;
outOffsetsH[5] = 0f;
// vertical offsets
outOffsetsV[0] = 0f;
outOffsetsV[1] = -1.33333f;
outOffsetsV[2] = 0f;
outOffsetsV[3] = 0f;
outOffsetsV[4] = 0f;
outOffsetsV[5] = 1.33333f;
// scale offsets from binomial space to screen space
for (int i = 0; i < c.length * 2; i++) {
outOffsetsH[i] *= dx;
outOffsetsV[i] *= dy;
}
break;
case Gaussian5x5b:
// weights and offsets are computed from a binomial distribution
// and reduced to be used *only* with bilinearly-filtered texture lookups
//
// with radius = 2f
// weights
outWeights[0] = 0.0702703f;
outWeights[1] = 0.316216f;
outWeights[2] = 0.227027f;
outWeights[3] = 0.316216f;
outWeights[4] = 0.0702703f;
// horizontal offsets
outOffsetsH[0] = -3.23077f;
outOffsetsH[1] = 0f;
outOffsetsH[2] = -1.38462f;
outOffsetsH[3] = 0f;
outOffsetsH[4] = 0f;
outOffsetsH[5] = 0f;
outOffsetsH[6] = 1.38462f;
outOffsetsH[7] = 0f;
outOffsetsH[8] = 3.23077f;
outOffsetsH[9] = 0f;
// vertical offsets
outOffsetsV[0] = 0f;
outOffsetsV[1] = -3.23077f;
outOffsetsV[2] = 0f;
outOffsetsV[3] = -1.38462f;
outOffsetsV[4] = 0f;
outOffsetsV[5] = 0f;
outOffsetsV[6] = 0f;
outOffsetsV[7] = 1.38462f;
outOffsetsV[8] = 0f;
outOffsetsV[9] = 3.23077f;
// scale offsets from binomial space to screen space
for (int i = 0; i < c.length * 2; i++) {
outOffsetsH[i] *= dx;
outOffsetsV[i] *= dy;
}
break;
default:
hasdata = false;
break;
}
if (hasdata) {
c.upload();
}
}
private void computeKernel (int blurRadius, float blurAmount, float[] outKernel) {
int radius = blurRadius;
// float sigma = (float)radius / amount;
float sigma = blurAmount;
float twoSigmaSquare = 2.0f * sigma * sigma;
float sigmaRoot = (float)Math.sqrt(twoSigmaSquare * Math.PI);
float total = 0.0f;
float distance = 0.0f;
int index = 0;
for (int i = -radius; i <= radius; ++i) {
distance = i * i;
index = i + radius;
outKernel[index] = (float)Math.exp(-distance / twoSigmaSquare) / sigmaRoot;
total += outKernel[index];
}
int size = (radius * 2) + 1;
for (int i = 0; i < size; ++i) {
outKernel[i] /= total;
}
}
private void computeOffsets (int blurRadius, float dx, float dy, float[] outOffsetH, float[] outOffsetV) {
int radius = blurRadius;
final int X = 0, Y = 1;
for (int i = -radius, j = 0; i <= radius; ++i, j += 2) {
outOffsetH[j + X] = i * dx;
outOffsetH[j + Y] = 0;
outOffsetV[j + X] = 0;
outOffsetV[j + Y] = i * dy;
}
}
@Override
public void rebind () {
computeBlurWeightings();
}
}
| |
package org.ovirt.engine.core.common.businessentities;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import org.ovirt.engine.core.common.businessentities.network.VdsNetworkInterface;
import org.ovirt.engine.core.common.businessentities.pm.FenceAgent;
import org.ovirt.engine.core.common.businessentities.pm.FenceProxySourceType;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.core.compat.RpmVersion;
import org.ovirt.engine.core.compat.Version;
public class VDS implements IVdcQueryable, BusinessEntityWithStatus<Guid, VDSStatus>, HasStoragePool<Guid>, HasErrata, Commented, Nameable, Cloneable {
private static final long serialVersionUID = -7893976203379789926L;
private VdsStatic vdsStatic;
private VdsDynamic vdsDynamic;
private VdsStatistics vdsStatistics;
private ArrayList<VdsNetworkInterface> interfaces;
private Set<String> networkNames;
private String activeNic;
private boolean balloonEnabled;
private boolean countThreadsAsCores;
private List<FenceAgent> fenceAgents;
private VdsSpmStatus spmStatus;
private Version clusterCompatibilityVersion;
private String clusterName;
private String clusterDescription;
private String clusterCpuName;
private Boolean clusterVirtService;
private Guid storagePoolId;
private String storagePoolName;
private int maxVdsMemoryOverCommit;
private ArrayList<VDSDomainsData> privateDomains;
private Boolean clusterGlusterService;
private Double imagesLastCheck;
private Double imagesLastDelay;
private ServerCpu cpuName;
private Integer vdsSpmId;
private float maxSchedulingMemory;
/**
* This map holds the disk usage reported by the host. The mapping is path to usage (in MB).
*/
private Map<String, Long> localDisksUsage;
public VDS() {
vdsStatic = new VdsStatic();
vdsDynamic = new VdsDynamic();
vdsStatistics = new VdsStatistics();
storagePoolId = Guid.Empty;
spmStatus = VdsSpmStatus.None;
interfaces = new ArrayList<>();
networkNames = new HashSet<>();
fenceAgents = new LinkedList<>();
}
@Override
public int hashCode() {
return Objects.hash(
vdsStatic,
cpuName,
spmStatus,
imagesLastCheck,
imagesLastDelay,
interfaces,
networkNames,
maxVdsMemoryOverCommit,
privateDomains,
vdsSpmId,
storagePoolId,
storagePoolName,
clusterCompatibilityVersion,
clusterCpuName,
clusterDescription,
clusterName,
clusterVirtService,
clusterGlusterService,
balloonEnabled,
countThreadsAsCores
);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof VDS)) {
return false;
}
VDS other = (VDS) obj;
return Objects.equals(vdsStatic, other.vdsStatic)
&& Objects.equals(cpuName, other.cpuName)
&& spmStatus == other.spmStatus
&& Objects.equals(imagesLastCheck, other.imagesLastCheck)
&& Objects.equals(imagesLastDelay, other.imagesLastDelay)
&& Objects.equals(interfaces, other.interfaces)
&& Objects.equals(networkNames, other.networkNames)
&& maxVdsMemoryOverCommit == other.maxVdsMemoryOverCommit
&& balloonEnabled == other.balloonEnabled
&& Objects.equals(privateDomains, other.privateDomains)
&& Objects.equals(vdsSpmId, other.vdsSpmId)
&& Objects.equals(storagePoolId, other.storagePoolId)
&& Objects.equals(storagePoolName, other.storagePoolName)
&& Objects.equals(clusterCompatibilityVersion, other.clusterCompatibilityVersion)
&& Objects.equals(clusterCpuName, other.clusterCpuName)
&& Objects.equals(clusterDescription, other.clusterDescription)
&& Objects.equals(clusterName, other.clusterName)
&& Objects.equals(clusterVirtService, other.clusterVirtService)
&& Objects.equals(clusterGlusterService, other.clusterGlusterService);
}
public VDS clone() {
VDS vds = new VDS();
vds.setClusterId(getClusterId());
vds.setClusterCpuName(getClusterCpuName());
vds.setCpuName(getCpuName());
vds.setClusterDescription(getClusterDescription());
vds.setId(getId());
vds.setVdsName(getName());
vds.setHostName(getHostName());
vds.setComment(getComment());
vds.setPort(getPort());
vds.setProtocol(getProtocol());
vds.setSshPort(getSshPort());
vds.setSshUsername(getSshUsername());
vds.setStatus(getStatus());
vds.setExternalStatus(getExternalStatus());
vds.setHardwareManufacturer(getHardwareManufacturer());
vds.setHardwareProductName(getHardwareProductName());
vds.setHardwareVersion(getHardwareVersion());
vds.setHardwareSerialNumber(getHardwareSerialNumber());
vds.setHardwareUUID(getHardwareUUID());
vds.setHardwareFamily(getHardwareFamily());
vds.setCpuCores(getCpuCores());
vds.setCpuThreads(getCpuThreads());
vds.setCpuModel(getCpuModel());
vds.setOnlineCpus(getOnlineCpus());
vds.setCpuSpeedMh(getCpuSpeedMh());
vds.setIfTotalSpeed(getIfTotalSpeed());
vds.setKvmEnabled(getKvmEnabled());
vds.setPhysicalMemMb(getPhysicalMemMb());
vds.setCpuIdle(getCpuIdle());
vds.setCpuLoad(getCpuLoad());
vds.setCpuSys(getCpuSys());
vds.setCpuUser(getCpuUser());
vds.setMemCommited(getMemCommited());
vds.setVmActive(getVmActive());
vds.setVmCount(getVmCount());
vds.setVmMigrating(getVmMigrating());
vds.setUsageMemPercent(getUsageMemPercent());
vds.setUsageCpuPercent(getUsageCpuPercent());
vds.setUsageNetworkPercent(getUsageNetworkPercent());
vds.setReservedMem(getReservedMem());
vds.setBootTime(getBootTime());
vds.setGuestOverhead(getGuestOverhead());
vds.setPreviousStatus(getPreviousStatus());
vds.setMemAvailable(getMemAvailable());
vds.setMemShared(getMemShared());
vds.setSoftwareVersion(getSoftwareVersion());
vds.setVersionName(getVersionName());
vds.setVersion(getVersion());
vds.setServerSslEnabled(isServerSslEnabled());
vds.setCpuFlags(getCpuFlags());
vds.setNetConfigDirty(getNetConfigDirty());
vds.setPmEnabled(isPmEnabled());
vds.setPmKdumpDetection(isPmKdumpDetection());
vds.setConsoleAddress(getConsoleAddress());
vds.setHBAs(getHBAs());
vds.setVdsSpmPriority(getVdsSpmPriority());
vds.setOtpValidity(getOtpValidity());
vds.setKernelVersion(getKernelVersion());
vds.setKvmVersion(getKvmVersion());
vds.setLibvirtVersion(getLibvirtVersion());
vds.setGlusterfsCliVersion(getGlusterfsCliVersion());
vds.setGlusterVersion(getGlusterVersion());
vds.setLibrbdVersion(getLibrbdVersion());
vds.setHooksStr(getHooksStr());
vds.setActiveNic(getActiveNic());
vds.setPowerManagementControlledByPolicy(isPowerManagementControlledByPolicy());
vds.setDisablePowerManagementPolicy(isDisablePowerManagementPolicy());
vds.setHighlyAvailableScore(getHighlyAvailableScore());
vds.setHighlyAvailableIsConfigured(getHighlyAvailableIsConfigured());
vds.setHighlyAvailableIsActive(getHighlyAvailableIsActive());
vds.setHighlyAvailableGlobalMaintenance(getHighlyAvailableGlobalMaintenance());
vds.setHighlyAvailableLocalMaintenance(getHighlyAvailableLocalMaintenance());
vds.setBalloonEnabled(isBalloonEnabled());
vds.setNumaNodeList(getNumaNodeList());
vds.setAutoNumaBalancing(getAutoNumaBalancing());
vds.setFenceAgents(getFenceAgents());
vds.setClusterCompatibilityVersion(getClusterCompatibilityVersion());
vds.setUpdateAvailable(isUpdateAvailable());
vds.setHostDevicePassthroughEnabled(isHostDevicePassthroughEnabled());
return vds;
}
public Version getClusterCompatibilityVersion() {
return clusterCompatibilityVersion;
}
public boolean isContainingHooks() {
// As VDSM reports the hooks in XMLRPCStruct that represents map of maps, we can assume that the string form of
// the map begins with
// { and ends with }
String hooksStr = getHooksStr();
return hooksStr != null && hooksStr.length() > 2;
}
public void setHooksStr(String hooksStr) {
getDynamicData().setHooksStr(hooksStr);
}
public String getHooksStr() {
return getDynamicData().getHooksStr();
}
public void setClusterCompatibilityVersion(Version value) {
clusterCompatibilityVersion = value;
}
public Guid getClusterId() {
return vdsStatic.getClusterId();
}
public void setClusterId(Guid value) {
vdsStatic.setClusterId(value);
}
public String getClusterName() {
return clusterName;
}
public void setClusterName(String value) {
clusterName = value;
}
public String getClusterDescription() {
return clusterDescription;
}
public void setClusterDescription(String value) {
clusterDescription = value;
}
public String getClusterCpuName() {
return clusterCpuName;
}
public void setClusterCpuName(String value) {
clusterCpuName = value;
}
public Boolean getClusterSupportsVirtService() {
return clusterVirtService;
}
public void setClusterSupportsVirtService(Boolean value) {
clusterVirtService = value;
}
public Boolean getClusterSupportsGlusterService() {
return clusterGlusterService;
}
public void setClusterSupportsGlusterService(Boolean value) {
clusterGlusterService = value;
}
@Override
public Guid getId() {
return vdsStatic.getId();
}
@Override
public void setId(Guid value) {
vdsStatic.setId(value);
vdsDynamic.setId(value);
vdsStatistics.setId(value);
}
@Override
public String getName() {
return vdsStatic.getName();
}
public void setVdsName(String value) {
vdsStatic.setName(value);
}
public String getUniqueId() {
return vdsStatic.getUniqueID();
}
public void setUniqueId(String value) {
vdsStatic.setUniqueID(value);
}
public String getHostName() {
return vdsStatic.getHostName();
}
public void setHostName(String value) {
vdsStatic.setHostName(value);
}
@Override
public String getComment() {
return vdsStatic.getComment();
}
@Override
public void setComment(String value) {
vdsStatic.setComment(value);
}
public int getPort() {
return vdsStatic.getPort();
}
public void setPort(int value) {
vdsStatic.setPort(value);
}
public VdsProtocol getProtocol() {
return vdsStatic.getProtocol();
}
public void setProtocol(VdsProtocol value) {
vdsStatic.setProtocol(value);
}
public int getSshPort() {
return vdsStatic.getSshPort();
}
public void setSshPort(int value) {
vdsStatic.setSshPort(value);
}
public String getSshUsername() {
return vdsStatic.getSshUsername();
}
public void setSshUsername(String value) {
vdsStatic.setSshUsername(value);
}
public boolean isServerSslEnabled() {
return vdsStatic.isServerSslEnabled();
}
public void setServerSslEnabled(boolean value) {
vdsStatic.setServerSslEnabled(value);
}
public VDSType getVdsType() {
return vdsStatic.getVdsType();
}
public void setVdsType(VDSType value) {
vdsStatic.setVdsType(value);
}
@Override
public VDSStatus getStatus() {
return vdsDynamic.getStatus();
}
@Override
public void setStatus(VDSStatus value) {
vdsDynamic.setStatus(value);
}
public ExternalStatus getExternalStatus() {
return vdsDynamic.getExternalStatus();
}
public void setExternalStatus(ExternalStatus externalStatus) {
vdsDynamic.setExternalStatus(externalStatus);
}
public Integer getCpuCores() {
return vdsDynamic.getCpuCores();
}
public void setCpuCores(Integer value) {
vdsDynamic.setCpuCores(value);
}
public Integer getCpuThreads() {
return vdsDynamic.getCpuThreads();
}
public void setCpuThreads(Integer value) {
vdsDynamic.setCpuThreads(value);
}
public String getHardwareUUID() {
return vdsDynamic.getHardwareUUID();
}
public String getHardwareManufacturer() {
return vdsDynamic.getHardwareManufacturer();
}
public String getHardwareFamily() {
return vdsDynamic.getHardwareFamily();
}
public String getHardwareSerialNumber() {
return vdsDynamic.getHardwareSerialNumber();
}
public String getHardwareProductName() {
return vdsDynamic.getHardwareProductName();
}
public String getHardwareVersion() {
return vdsDynamic.getHardwareVersion();
}
public void setHardwareUUID(String value) {
vdsDynamic.setHardwareUUID(value);
}
public void setHardwareFamily(String value) {
vdsDynamic.setHardwareFamily(value);
}
public void setHardwareSerialNumber(String value) {
vdsDynamic.setHardwareSerialNumber(value);
}
public void setHardwareVersion(String value) {
vdsDynamic.setHardwareVersion(value);
}
public void setHardwareProductName(String value) {
vdsDynamic.setHardwareProductName(value);
}
public void setHardwareManufacturer(String value) {
vdsDynamic.setHardwareManufacturer(value);
}
public Integer getCpuSockets() {
return vdsDynamic.getCpuSockets();
}
public void setCpuSockets(Integer value) {
vdsDynamic.setCpuSockets(value);
}
public String getCpuModel() {
return vdsDynamic.getCpuModel();
}
public void setCpuModel(String value) {
vdsDynamic.setCpuModel(value);
}
public String getOnlineCpus() {
return vdsDynamic.getOnlineCpus();
}
public void setOnlineCpus(String value) {
vdsDynamic.setOnlineCpus(value);
}
public Double getCpuSpeedMh() {
return vdsDynamic.getCpuSpeedMh();
}
public void setCpuSpeedMh(Double value) {
vdsDynamic.setCpuSpeedMh(value);
}
public String getIfTotalSpeed() {
return vdsDynamic.getIfTotalSpeed();
}
public void setIfTotalSpeed(String value) {
vdsDynamic.setIfTotalSpeed(value);
}
public Boolean getKvmEnabled() {
return vdsDynamic.getKvmEnabled();
}
public void setKvmEnabled(Boolean value) {
vdsDynamic.setKvmEnabled(value);
}
public Integer getPhysicalMemMb() {
return vdsDynamic.getPhysicalMemMb();
}
public void setPhysicalMemMb(Integer value) {
vdsDynamic.setPhysicalMemMb(value);
calculateFreeSchedulingMemoryCache();
}
public String getSupportedClusterLevels() {
return vdsDynamic.getSupportedClusterLevels();
}
public void setSupportedClusterLevels(String value) {
vdsDynamic.setSupportedClusterLevels(value);
}
public HashSet<Version> getSupportedClusterVersionsSet() {
return vdsDynamic.getSupportedClusterVersionsSet();
}
public String getSupportedEngines() {
return vdsDynamic.getSupportedEngines();
}
public void setSupportedEngines(String value) {
vdsDynamic.setSupportedEngines(value);
}
public HashSet<Version> getSupportedENGINESVersionsSet() {
return vdsDynamic.getSupportedEngineVersionsSet();
}
public Double getCpuIdle() {
return vdsStatistics.getCpuIdle();
}
public void setCpuIdle(Double value) {
vdsStatistics.setCpuIdle(value);
}
public Double getCpuLoad() {
return vdsStatistics.getCpuLoad();
}
public void setCpuLoad(Double value) {
vdsStatistics.setCpuLoad(value);
}
public Double getCpuSys() {
return vdsStatistics.getCpuSys();
}
public void setCpuSys(Double value) {
vdsStatistics.setCpuSys(value);
}
public Double getCpuUser() {
return vdsStatistics.getCpuUser();
}
public void setCpuUser(Double value) {
vdsStatistics.setCpuUser(value);
}
public Integer getMemCommited() {
return vdsDynamic.getMemCommited();
}
public void setMemCommited(Integer value) {
vdsDynamic.setMemCommited(value);
calculateFreeSchedulingMemoryCache();
}
public Integer getVmActive() {
return vdsDynamic.getVmActive();
}
public void setVmActive(Integer value) {
vdsDynamic.setVmActive(value);
}
public int getHighlyAvailableScore() {
return vdsStatistics.getHighlyAvailableScore();
}
public void setHighlyAvailableScore(int value) {
vdsStatistics.setHighlyAvailableScore(value);
}
public boolean getHighlyAvailableIsConfigured() {
return vdsStatistics.getHighlyAvailableIsConfigured();
}
public void setHighlyAvailableIsConfigured(boolean value) {
vdsStatistics.setHighlyAvailableIsConfigured(value);
}
public boolean getHighlyAvailableIsActive() {
return vdsStatistics.getHighlyAvailableIsActive();
}
public void setHighlyAvailableIsActive(boolean value) {
vdsStatistics.setHighlyAvailableIsActive(value);
}
public boolean getHighlyAvailableGlobalMaintenance() {
return vdsStatistics.getHighlyAvailableGlobalMaintenance();
}
public void setHighlyAvailableGlobalMaintenance(boolean value) {
vdsStatistics.setHighlyAvailableGlobalMaintenance(value);
}
public boolean getHighlyAvailableLocalMaintenance() {
return vdsStatistics.getHighlyAvailableLocalMaintenance();
}
public void setHighlyAvailableLocalMaintenance(boolean value) {
vdsStatistics.setHighlyAvailableLocalMaintenance(value);
}
public int getVmCount() {
return vdsDynamic.getVmCount();
}
public void setVmCount(int value) {
vdsDynamic.setVmCount(value);
}
public Integer getVmsCoresCount() {
return vdsDynamic.getVmsCoresCount();
}
public void setVmsCoresCount(Integer value) {
vdsDynamic.setVmsCoresCount(value);
}
public Integer getVmMigrating() {
return vdsDynamic.getVmMigrating();
}
public void setVmMigrating(Integer value) {
vdsDynamic.setVmMigrating(value);
}
public int getIncomingMigrations() {
return vdsDynamic.getIncomingMigrations();
}
public void setIncomingMigrations(int value) {
vdsDynamic.setIncomingMigrations(value);
}
public int getOutgoingMigrations() {
return vdsDynamic.getOutgoingMigrations();
}
public void setOutgoingMigrations(int value) {
vdsDynamic.setOutgoingMigrations(value);
}
public Integer getUsageMemPercent() {
return vdsStatistics.getUsageMemPercent();
}
public void setUsageMemPercent(Integer value) {
vdsStatistics.setUsageMemPercent(value);
}
public Integer getUsageCpuPercent() {
return vdsStatistics.getUsageCpuPercent();
}
public void setUsageCpuPercent(Integer value) {
vdsStatistics.setUsageCpuPercent(value);
}
public Integer getUsageNetworkPercent() {
return vdsStatistics.getUsageNetworkPercent();
}
public void setUsageNetworkPercent(Integer value) {
vdsStatistics.setUsageNetworkPercent(value);
}
public Integer getGuestOverhead() {
return vdsDynamic.getGuestOverhead();
}
public void setGuestOverhead(Integer value) {
vdsDynamic.setGuestOverhead(value);
}
public Integer getReservedMem() {
return vdsDynamic.getReservedMem();
}
public void setReservedMem(Integer value) {
vdsDynamic.setReservedMem(value);
calculateFreeSchedulingMemoryCache();
}
public Long getBootTime() {
return vdsStatistics.getBootTime();
}
public void setBootTime(Long value) {
vdsStatistics.setBootTime(value);
}
public VDSStatus getPreviousStatus() {
return vdsDynamic.getPreviousStatus();
}
public void setPreviousStatus(VDSStatus value) {
vdsDynamic.setPreviousStatus(value);
}
public Long getMemAvailable() {
return vdsStatistics.getMemAvailable();
}
public void setMemAvailable(Long value) {
vdsStatistics.setMemAvailable(value);
}
public Long getMemFree() {
return vdsStatistics.getMemFree();
}
public void setMemFree(Long value) {
vdsStatistics.setMemFree(value);
}
public Long getMemShared() {
return vdsStatistics.getMemShared();
}
public void setMemShared(Long value) {
vdsStatistics.setMemShared(value);
}
public String getConsoleAddress() {
return vdsStatic.getConsoleAddress();
}
public void setConsoleAddress(String value) {
vdsStatic.setConsoleAddress(value);
}
public Integer getMemCommitedPercent() {
Integer commited = vdsDynamic.getMemCommited();
Integer physical = vdsDynamic.getPhysicalMemMb();
if (commited == null || physical == null || physical == 0) {
return 0;
}
return (commited * 100) / physical;
}
/**
* This method is created for SOAP serialization of primitives that are readonly but sent by the client. The setter
* implementation is empty and the field is not being changed.
*/
@Deprecated
public void setMemCommitedPercent(Integer value) {
}
public Integer getMemSharedPercent() {
Long shared = vdsStatistics.getMemShared();
Integer physical = vdsDynamic.getPhysicalMemMb();
if (shared == null || physical == null || physical == 0) {
return 0;
}
return (int) ((shared * 100) / physical);
}
/**
* This method is created for SOAP serialization of primitives that are readonly but sent by the client. The setter
* implementation is empty and the field is not being changed.
*/
@Deprecated
public void setMemSharedPercent(Integer value) {
}
public Long getSwapFree() {
return vdsStatistics.getSwapFree();
}
public void setSwapFree(Long value) {
vdsStatistics.setSwapFree(value);
}
public Long getSwapTotal() {
return vdsStatistics.getSwapTotal();
}
public void setSwapTotal(Long value) {
vdsStatistics.setSwapTotal(value);
}
public Integer getKsmCpuPercent() {
return vdsStatistics.getKsmCpuPercent();
}
public void setKsmCpuPercent(Integer value) {
vdsStatistics.setKsmCpuPercent(value);
}
public Long getKsmPages() {
return vdsStatistics.getKsmPages();
}
public void setKsmPages(Long value) {
vdsStatistics.setKsmPages(value);
}
public Boolean getKsmState() {
return vdsStatistics.getKsmState();
}
public void setKsmState(Boolean value) {
vdsStatistics.setKsmState(value);
}
public String getSoftwareVersion() {
return vdsDynamic.getSoftwareVersion();
}
public void setSoftwareVersion(String value) {
vdsDynamic.setSoftwareVersion(value);
}
public String getVersionName() {
return vdsDynamic.getVersionName();
}
public void setVersionName(String value) {
vdsDynamic.setVersionName(value);
}
public String getBuildName() {
return vdsDynamic.getBuildName();
}
public void setBuildName(String value) {
vdsDynamic.setBuildName(value);
}
public String getCpuFlags() {
return vdsDynamic.getCpuFlags();
}
public void setCpuFlags(String value) {
vdsDynamic.setCpuFlags(value);
}
public Date getCpuOverCommitTimestamp() {
return vdsStatistics.getCpuOverCommitTimeStamp();
}
public void setCpuOverCommitTimestamp(Date value) {
vdsStatistics.setCpuOverCommitTimeStamp(value);
}
public int getVdsStrength() {
return vdsStatic.getVdsStrength();
}
public void setVdsStrength(int value) {
vdsStatic.setVdsStrength(value);
}
@Override
public Guid getStoragePoolId() {
return storagePoolId;
}
@Override
public void setStoragePoolId(Guid value) {
storagePoolId = value;
}
public String getStoragePoolName() {
return storagePoolName;
}
public void setStoragePoolName(String value) {
storagePoolName = value;
}
public int getMaxVdsMemoryOverCommit() {
return maxVdsMemoryOverCommit;
}
public void setMaxVdsMemoryOverCommit(int value) {
maxVdsMemoryOverCommit = value;
calculateFreeSchedulingMemoryCache();
}
/**
* Get the number of CPUs that were scheduled but not yet
* assigned to a running VM.
*
* This field is a cache, use for reporting only.
* The authoritative source for current value is the
* {@link org.ovirt.engine.core.bll.scheduling.pending.PendingResourceManager}
*/
public Integer getPendingVcpusCount() {
return vdsDynamic.getPendingVcpusCount();
}
public void setPendingVcpusCount(Integer value) {
vdsDynamic.setPendingVcpusCount(value);
}
/**
* Get the amount of memory that was scheduled but not yet
* assigned to a running VM.
*
* This field is a cache, use for reporting only.
* The authoritative source for current value is the
* {@link org.ovirt.engine.core.bll.scheduling.pending.PendingResourceManager}
*/
public int getPendingVmemSize() {
return vdsDynamic.getPendingVmemSize();
}
public void setPendingVmemSize(int value) {
vdsDynamic.setPendingVmemSize(value);
calculateFreeSchedulingMemoryCache();
}
public Boolean getNetConfigDirty() {
return vdsDynamic.getNetConfigDirty();
}
public void setNetConfigDirty(Boolean value) {
vdsDynamic.setNetConfigDirty(value);
}
public boolean isPmKdumpDetection() {
return vdsStatic.isPmKdumpDetection();
}
public void setPmKdumpDetection(boolean pmKdumpDetection) {
vdsStatic.setPmKdumpDetection(pmKdumpDetection);
}
public boolean isPmEnabled() {
return vdsStatic.isPmEnabled();
}
public void setPmEnabled(boolean value) {
vdsStatic.setPmEnabled(value);
}
public List<FenceProxySourceType> getFenceProxySources() {
return vdsStatic.getFenceProxySources();
}
public void setFenceProxySources(List<FenceProxySourceType> fenceProxySources) {
vdsStatic.setFenceProxySources(fenceProxySources);
}
public String getHostOs() {
return vdsDynamic.getHostOs();
}
public void setHostOs(String value) {
vdsDynamic.setHostOs(value);
}
public String getKvmVersion() {
return vdsDynamic.getKvmVersion();
}
public void setKvmVersion(String value) {
vdsDynamic.setKvmVersion(value);
}
public RpmVersion getLibvirtVersion() {
return vdsDynamic.getLibvirtVersion();
}
public void setLibvirtVersion(RpmVersion value) {
vdsDynamic.setLibvirtVersion(value);
}
public String getSpiceVersion() {
return vdsDynamic.getSpiceVersion();
}
public void setSpiceVersion(String value) {
vdsDynamic.setSpiceVersion(value);
}
public RpmVersion getGlusterVersion() {
return vdsDynamic.getGlusterVersion();
}
public void setGlusterVersion(RpmVersion value) {
vdsDynamic.setGlusterVersion(value);
}
public RpmVersion getLibrbdVersion() {
return vdsDynamic.getLibrbdVersion();
}
public void setLibrbdVersion(RpmVersion value) {
vdsDynamic.setLibrbdVersion(value);
}
public RpmVersion getGlusterfsCliVersion() {
return vdsDynamic.getGlusterfsCliVersion();
}
public void setGlusterfsCliVersion(RpmVersion value) {
vdsDynamic.setGlusterfsCliVersion(value);
}
public String getKernelVersion() {
return vdsDynamic.getKernelVersion();
}
public void setKernelVersion(String value) {
vdsDynamic.setKernelVersion(value);
}
public void setIScsiInitiatorName(String value) {
vdsDynamic.setIScsiInitiatorName(value);
}
public String getIScsiInitiatorName() {
return vdsDynamic.getIScsiInitiatorName();
}
public Map<String, List<Map<String, String>>> getHBAs() {
return vdsDynamic.getHBAs();
}
public void setHBAs(Map<String, List<Map<String, String>>> HBAs) {
vdsDynamic.setHBAs(HBAs);
}
public void setTransparentHugePagesState(VdsTransparentHugePagesState value) {
vdsDynamic.setTransparentHugePagesState(value);
}
public VdsTransparentHugePagesState getTransparentHugePagesState() {
return vdsDynamic.getTransparentHugePagesState();
}
public int getAnonymousHugePages() {
return vdsStatistics.getAnonymousHugePages();
}
public void setAnonymousHugePages(int value) {
vdsStatistics.setAnonymousHugePages(value);
}
public VdsStatic getStaticData() {
return vdsStatic;
}
public void setStaticData(VdsStatic value) {
vdsStatic = value;
}
public VdsDynamic getDynamicData() {
return vdsDynamic;
}
public void setDynamicData(VdsDynamic value) {
vdsDynamic = value;
}
public VdsStatistics getStatisticsData() {
return vdsStatistics;
}
public void setStatisticsData(VdsStatistics value) {
vdsStatistics = value;
}
public Set<String> getNetworkNames() {
return networkNames;
}
public ArrayList<VdsNetworkInterface> getInterfaces() {
return interfaces;
}
public ArrayList<VDSDomainsData> getDomains() {
return privateDomains;
}
public void setDomains(ArrayList<VDSDomainsData> value) {
privateDomains = value;
}
public Double getImagesLastCheck() {
return imagesLastCheck;
}
public void setImagesLastCheck(Double value) {
imagesLastCheck = value;
}
public Double getImagesLastDelay() {
return imagesLastDelay;
}
public void setImagesLastDelay(Double value) {
imagesLastDelay = value;
}
public void setVersion(RpmVersion value) {
vdsDynamic.setVersion(value);
}
public RpmVersion getVersion() {
return vdsDynamic.getVersion();
}
public ServerCpu getCpuName() {
return cpuName;
}
public void setCpuName(ServerCpu value) {
cpuName = value;
}
public Integer getVdsSpmId() {
return vdsSpmId;
}
public void setVdsSpmId(Integer value) {
vdsSpmId = value;
}
public long getOtpValidity() {
return vdsStatic.getOtpValidity();
}
public void setOtpValidity(long value) {
vdsStatic.setOtpValidity(value);
}
public int getVdsSpmPriority() {
return vdsStatic.getVdsSpmPriority();
}
public void setVdsSpmPriority(int value) {
vdsStatic.setVdsSpmPriority(value);
}
@Override
public Object getQueryableId() {
return getId();
}
public VdsSpmStatus getSpmStatus() {
return spmStatus;
}
public void setSpmStatus(VdsSpmStatus value) {
spmStatus = value;
}
public boolean isSpm() {
return spmStatus == VdsSpmStatus.SPM;
}
public NonOperationalReason getNonOperationalReason() {
return vdsDynamic.getNonOperationalReason();
}
public void setNonOperationalReason(NonOperationalReason nonOperationalReason) {
vdsDynamic.setNonOperationalReason(nonOperationalReason);
}
public Map<String, Long> getLocalDisksUsage() {
return localDisksUsage;
}
public void setLocalDisksUsage(Map<String, Long> localDiskUsage) {
localDisksUsage = localDiskUsage;
}
public boolean isAutoRecoverable() {
return vdsStatic.isAutoRecoverable();
}
public void setAutoRecoverable(boolean autoRecoverable) {
vdsStatic.setAutoRecoverable(autoRecoverable);
}
public String getSshKeyFingerprint() {
return vdsStatic.getSshKeyFingerprint();
}
public void setSshKeyFingerprint(String sshKeyFingerprint) {
vdsStatic.setSshKeyFingerprint(sshKeyFingerprint);
}
public Guid getHostProviderId() {
return vdsStatic.getHostProviderId();
}
public void setHostProviderId(Guid hostProviderId) {
vdsStatic.setHostProviderId(hostProviderId);
}
public List<FenceAgent> getFenceAgents() {
return fenceAgents;
}
public void setFenceAgents(List<FenceAgent> fenceAgents) {
this.fenceAgents = fenceAgents;
}
private void calculateFreeSchedulingMemoryCache() {
if (getMemCommited() != null && getPhysicalMemMb() != null && getReservedMem() != null) {
maxSchedulingMemory = getFreeVirtualMemory() - getPendingVmemSize();
// avoid negative values
maxSchedulingMemory = maxSchedulingMemory > 0 ? maxSchedulingMemory : 0;
}
}
public float getFreeVirtualMemory() {
if (getMemCommited() != null && getPhysicalMemMb() != null && getReservedMem() != null) {
float freeMemory = (getMaxVdsMemoryOverCommit() * getPhysicalMemMb() / 100.0f)
- getMemCommited()
- getReservedMem();
// avoid negative values
return freeMemory > 0 ? freeMemory : 0;
} else {
return 0;
}
}
public float getMaxSchedulingMemory() {
return maxSchedulingMemory;
}
@Override
public String toString() {
// note that vdsStatic may be null, so the getName with no null protection
// is not enough, remove this once vdsStatic can not be null
return "Host[" + (vdsStatic == null ? "null" : (vdsStatic.getName() + "," + vdsStatic.getId())) + "]";
}
public String getActiveNic() {
return activeNic;
}
public void setActiveNic(String activeNic) {
this.activeNic = activeNic;
}
public void setSupportedEmulatedMachines(String supportedEmulatedMachines) {
vdsDynamic.setSupportedEmulatedMachines(supportedEmulatedMachines);
}
public String getSupportedEmulatedMachines() {
return vdsDynamic.getSupportedEmulatedMachines();
}
public boolean isPowerManagementControlledByPolicy() {
return vdsDynamic.isPowerManagementControlledByPolicy();
}
public void setPowerManagementControlledByPolicy(boolean powerManagementControlledByPolicy) {
vdsDynamic.setPowerManagementControlledByPolicy(powerManagementControlledByPolicy);
}
public boolean isDisablePowerManagementPolicy() {
return vdsStatic.isDisablePowerManagementPolicy();
}
public void setDisablePowerManagementPolicy(boolean disablePowerManagementPolicy) {
vdsStatic.setDisablePowerManagementPolicy(disablePowerManagementPolicy);
}
public Set<VmRngDevice.Source> getSupportedRngSources() {
return vdsDynamic.getSupportedRngSources();
}
public KdumpStatus getKdumpStatus() {
return vdsDynamic.getKdumpStatus();
}
public void setKdumpStatus(KdumpStatus kdumpStatus) {
vdsDynamic.setKdumpStatus(kdumpStatus);
}
public SELinuxMode getSELinuxEnforceMode() {
return vdsDynamic.getSELinuxEnforceMode();
}
public void setSELinuxEnforceMode(Integer value) {
vdsDynamic.setSELinuxEnforceMode(value);
}
public void setNumaNodeList(List<VdsNumaNode> numaNodeList) {
vdsDynamic.setNumaNodeList(numaNodeList);
}
public List<VdsNumaNode> getNumaNodeList() {
return vdsDynamic.getNumaNodeList();
}
/**
* If host enables the feature of auto numa balancing.
*/
public AutoNumaBalanceStatus getAutoNumaBalancing() {
return vdsDynamic.getAutoNumaBalancing();
}
public void setAutoNumaBalancing(AutoNumaBalanceStatus autoNumaBalancing) {
vdsDynamic.setAutoNumaBalancing(autoNumaBalancing);
}
/**
* If host supports numa.
*/
public boolean isNumaSupport() {
return vdsDynamic.isNumaSupport();
}
public void setNumaSupport(boolean numaSupport) {
vdsDynamic.setNumaSupport(numaSupport);
}
public void setLiveSnapshotSupport(Boolean value) {
vdsDynamic.setLiveSnapshotSupport(value);
}
public Boolean getLiveSnapshotSupport() {
return vdsDynamic.getLiveSnapshotSupport();
}
public void setLiveMergeSupport(boolean value) {
vdsDynamic.setLiveMergeSupport(value);
}
public boolean getLiveMergeSupport() {
return vdsDynamic.getLiveMergeSupport();
}
public boolean isBalloonEnabled() {
return balloonEnabled;
}
public void setBalloonEnabled(boolean enableBalloon) {
balloonEnabled = enableBalloon;
}
public void setCountThreadsAsCores(boolean value) {
countThreadsAsCores = value;
}
public boolean getCountThreadsAsCores() {
return countThreadsAsCores;
}
public boolean isFenceAgentsExist() {
return !getFenceAgents().isEmpty();
}
public String getMaintenanceReason() {
return vdsDynamic.getMaintenanceReason();
}
public void setMaintenanceReason(String value) {
vdsDynamic.setMaintenanceReason(value);
}
public boolean isUpdateAvailable() {
return vdsDynamic.isUpdateAvailable();
}
public void setUpdateAvailable(boolean updateAvailable) {
vdsDynamic.setUpdateAvailable(updateAvailable);
}
public Set<String> getAdditionalFeatures() {
return vdsDynamic.getAdditionalFeatures();
}
public void setAdditionalFeatures(Set<String> additionalFeatures) {
vdsDynamic.setAdditionalFeatures(additionalFeatures);
}
public boolean isOvirtNode() {
return getVdsType() == VDSType.oVirtNode;
}
public List<V2VJobInfo> getV2VJobs() {
return vdsStatistics.getV2VJobs();
}
public void setV2VJobs(List<V2VJobInfo> v2vJobs) {
vdsStatistics.setV2VJobs(v2vJobs);
}
public void setHostDevicePassthroughEnabled(boolean value) {
vdsDynamic.setHostDevicePassthroughEnabled(value);
}
public boolean isHostDevicePassthroughEnabled() {
return vdsDynamic.isHostDevicePassthroughEnabled();
}
public boolean shouldVdsBeFenced() {
boolean result = false;
switch (this.getStatus()) {
case Down:
case InstallFailed:
case Maintenance:
case NonOperational:
case NonResponsive:
case Kdumping: // it should happen only after restart when host is stuck in status Kdumping
result = true;
break;
default:
break;
}
return result;
}
}
| |
package com.jeremyfeinstein.slidingmenu.lib;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.util.Log;
import android.util.TypedValue;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import com.jeremyfeinstein.slidingmenu.lib.SlidingMenu.CanvasTransformer;
import com.yintro.lazyandroid.app.R;
public class CustomViewBehind extends ViewGroup {
private static final String TAG = "CustomViewBehind";
private static final int MARGIN_THRESHOLD = 48; // dips
private final Paint mFadePaint = new Paint();
private int mTouchMode = SlidingMenu.TOUCHMODE_MARGIN;
private CustomViewAbove mViewAbove;
private View mContent;
private View mSecondaryContent;
private int mMarginThreshold;
private int mWidthOffset;
private CanvasTransformer mTransformer;
private boolean mChildrenEnabled;
private int mMode;
private boolean mFadeEnabled;
private float mScrollScale;
private Drawable mShadowDrawable;
private Drawable mSecondaryShadowDrawable;
private int mShadowWidth;
private float mFadeDegree;
private boolean mSelectorEnabled = true;
private Bitmap mSelectorDrawable;
private View mSelectedView;
public CustomViewBehind(Context context) {
this(context, null);
}
public CustomViewBehind(Context context, AttributeSet attrs) {
super(context, attrs);
mMarginThreshold = (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP,
MARGIN_THRESHOLD, getResources().getDisplayMetrics());
}
public void setCustomViewAbove(CustomViewAbove customViewAbove) {
mViewAbove = customViewAbove;
}
public void setCanvasTransformer(CanvasTransformer t) {
mTransformer = t;
}
public void setWidthOffset(int i) {
mWidthOffset = i;
requestLayout();
}
public int getMarginThreshold() {
return mMarginThreshold;
}
public void setMarginThreshold(int marginThreshold) {
mMarginThreshold = marginThreshold;
}
public int getBehindWidth() {
return mContent.getWidth();
}
public View getContent() {
return mContent;
}
public void setContent(View v) {
if (mContent != null)
removeView(mContent);
mContent = v;
addView(mContent);
}
public View getSecondaryContent() {
return mSecondaryContent;
}
/**
* Sets the secondary (right) menu for use when setMode is called with SlidingMenu.LEFT_RIGHT.
*
* @param v the right menu
*/
public void setSecondaryContent(View v) {
if (mSecondaryContent != null)
removeView(mSecondaryContent);
mSecondaryContent = v;
addView(mSecondaryContent);
}
public void setChildrenEnabled(boolean enabled) {
mChildrenEnabled = enabled;
}
@Override
public void scrollTo(int x, int y) {
super.scrollTo(x, y);
if (mTransformer != null)
invalidate();
}
@Override
public boolean onInterceptTouchEvent(MotionEvent e) {
return !mChildrenEnabled;
}
@Override
public boolean onTouchEvent(MotionEvent e) {
return !mChildrenEnabled;
}
@Override
protected void dispatchDraw(Canvas canvas) {
if (mTransformer != null) {
canvas.save();
mTransformer.transformCanvas(canvas, mViewAbove.getPercentOpen());
super.dispatchDraw(canvas);
canvas.restore();
} else
super.dispatchDraw(canvas);
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
final int width = r - l;
final int height = b - t;
mContent.layout(0, 0, width - mWidthOffset, height);
if (mSecondaryContent != null)
mSecondaryContent.layout(0, 0, width - mWidthOffset, height);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int width = getDefaultSize(0, widthMeasureSpec);
int height = getDefaultSize(0, heightMeasureSpec);
setMeasuredDimension(width, height);
final int contentWidth = getChildMeasureSpec(widthMeasureSpec, 0, width - mWidthOffset);
final int contentHeight = getChildMeasureSpec(heightMeasureSpec, 0, height);
mContent.measure(contentWidth, contentHeight);
if (mSecondaryContent != null)
mSecondaryContent.measure(contentWidth, contentHeight);
}
public int getMode() {
return mMode;
}
public void setMode(int mode) {
if (mode == SlidingMenu.LEFT || mode == SlidingMenu.RIGHT) {
if (mContent != null)
mContent.setVisibility(View.VISIBLE);
if (mSecondaryContent != null)
mSecondaryContent.setVisibility(View.INVISIBLE);
}
mMode = mode;
}
public float getScrollScale() {
return mScrollScale;
}
public void setScrollScale(float scrollScale) {
mScrollScale = scrollScale;
}
public void setShadowDrawable(Drawable shadow) {
mShadowDrawable = shadow;
invalidate();
}
public void setSecondaryShadowDrawable(Drawable shadow) {
mSecondaryShadowDrawable = shadow;
invalidate();
}
public void setShadowWidth(int width) {
mShadowWidth = width;
invalidate();
}
public void setFadeEnabled(boolean b) {
mFadeEnabled = b;
}
public void setFadeDegree(float degree) {
if (degree > 1.0f || degree < 0.0f)
throw new IllegalStateException("The BehindFadeDegree must be between 0.0f and 1.0f");
mFadeDegree = degree;
}
public int getMenuPage(int page) {
page = (page > 1) ? 2 : ((page < 1) ? 0 : page);
if (mMode == SlidingMenu.LEFT && page > 1) {
return 0;
} else if (mMode == SlidingMenu.RIGHT && page < 1) {
return 2;
} else {
return page;
}
}
public void scrollBehindTo(View content, int x, int y) {
int vis = View.VISIBLE;
if (mMode == SlidingMenu.LEFT) {
if (x >= content.getLeft()) vis = View.INVISIBLE;
scrollTo((int) ((x + getBehindWidth()) * mScrollScale), y);
} else if (mMode == SlidingMenu.RIGHT) {
if (x <= content.getLeft()) vis = View.INVISIBLE;
scrollTo((int) (getBehindWidth() - getWidth() +
(x - getBehindWidth()) * mScrollScale), y);
} else if (mMode == SlidingMenu.LEFT_RIGHT) {
mContent.setVisibility(x >= content.getLeft() ? View.INVISIBLE : View.VISIBLE);
mSecondaryContent.setVisibility(x <= content.getLeft() ? View.INVISIBLE : View.VISIBLE);
vis = x == 0 ? View.INVISIBLE : View.VISIBLE;
if (x <= content.getLeft()) {
scrollTo((int) ((x + getBehindWidth()) * mScrollScale), y);
} else {
scrollTo((int) (getBehindWidth() - getWidth() +
(x - getBehindWidth()) * mScrollScale), y);
}
}
if (vis == View.INVISIBLE)
Log.v(TAG, "behind INVISIBLE");
setVisibility(vis);
}
public int getMenuLeft(View content, int page) {
if (mMode == SlidingMenu.LEFT) {
switch (page) {
case 0:
return content.getLeft() - getBehindWidth();
case 2:
return content.getLeft();
}
} else if (mMode == SlidingMenu.RIGHT) {
switch (page) {
case 0:
return content.getLeft();
case 2:
return content.getLeft() + getBehindWidth();
}
} else if (mMode == SlidingMenu.LEFT_RIGHT) {
switch (page) {
case 0:
return content.getLeft() - getBehindWidth();
case 2:
return content.getLeft() + getBehindWidth();
}
}
return content.getLeft();
}
public int getAbsLeftBound(View content) {
if (mMode == SlidingMenu.LEFT || mMode == SlidingMenu.LEFT_RIGHT) {
return content.getLeft() - getBehindWidth();
} else if (mMode == SlidingMenu.RIGHT) {
return content.getLeft();
}
return 0;
}
public int getAbsRightBound(View content) {
if (mMode == SlidingMenu.LEFT) {
return content.getLeft();
} else if (mMode == SlidingMenu.RIGHT || mMode == SlidingMenu.LEFT_RIGHT) {
return content.getLeft() + getBehindWidth();
}
return 0;
}
public boolean marginTouchAllowed(View content, int x) {
int left = content.getLeft();
int right = content.getRight();
if (mMode == SlidingMenu.LEFT) {
return (x >= left && x <= mMarginThreshold + left);
} else if (mMode == SlidingMenu.RIGHT) {
return (x <= right && x >= right - mMarginThreshold);
} else if (mMode == SlidingMenu.LEFT_RIGHT) {
return (x >= left && x <= mMarginThreshold + left) ||
(x <= right && x >= right - mMarginThreshold);
}
return false;
}
public void setTouchMode(int i) {
mTouchMode = i;
}
public boolean menuOpenTouchAllowed(View content, int currPage, float x) {
switch (mTouchMode) {
case SlidingMenu.TOUCHMODE_FULLSCREEN:
return true;
case SlidingMenu.TOUCHMODE_MARGIN:
return menuTouchInQuickReturn(content, currPage, x);
}
return false;
}
public boolean menuTouchInQuickReturn(View content, int currPage, float x) {
if (mMode == SlidingMenu.LEFT || (mMode == SlidingMenu.LEFT_RIGHT && currPage == 0)) {
return x >= content.getLeft();
} else if (mMode == SlidingMenu.RIGHT || (mMode == SlidingMenu.LEFT_RIGHT && currPage == 2)) {
return x <= content.getRight();
}
return false;
}
public boolean menuClosedSlideAllowed(float dx) {
if (mMode == SlidingMenu.LEFT) {
return dx > 0;
} else if (mMode == SlidingMenu.RIGHT) {
return dx < 0;
} else if (mMode == SlidingMenu.LEFT_RIGHT) {
return true;
}
return false;
}
public boolean menuOpenSlideAllowed(float dx) {
if (mMode == SlidingMenu.LEFT) {
return dx < 0;
} else if (mMode == SlidingMenu.RIGHT) {
return dx > 0;
} else if (mMode == SlidingMenu.LEFT_RIGHT) {
return true;
}
return false;
}
public void drawShadow(View content, Canvas canvas) {
if (mShadowDrawable == null || mShadowWidth <= 0) return;
int left = 0;
if (mMode == SlidingMenu.LEFT) {
left = content.getLeft() - mShadowWidth;
} else if (mMode == SlidingMenu.RIGHT) {
left = content.getRight();
} else if (mMode == SlidingMenu.LEFT_RIGHT) {
if (mSecondaryShadowDrawable != null) {
left = content.getRight();
mSecondaryShadowDrawable.setBounds(left, 0, left + mShadowWidth, getHeight());
mSecondaryShadowDrawable.draw(canvas);
}
left = content.getLeft() - mShadowWidth;
}
mShadowDrawable.setBounds(left, 0, left + mShadowWidth, getHeight());
mShadowDrawable.draw(canvas);
}
public void drawFade(View content, Canvas canvas, float openPercent) {
if (!mFadeEnabled) return;
final int alpha = (int) (mFadeDegree * 255 * Math.abs(1 - openPercent));
mFadePaint.setColor(Color.argb(alpha, 0, 0, 0));
int left = 0;
int right = 0;
if (mMode == SlidingMenu.LEFT) {
left = content.getLeft() - getBehindWidth();
right = content.getLeft();
} else if (mMode == SlidingMenu.RIGHT) {
left = content.getRight();
right = content.getRight() + getBehindWidth();
} else if (mMode == SlidingMenu.LEFT_RIGHT) {
left = content.getLeft() - getBehindWidth();
right = content.getLeft();
canvas.drawRect(left, 0, right, getHeight(), mFadePaint);
left = content.getRight();
right = content.getRight() + getBehindWidth();
}
canvas.drawRect(left, 0, right, getHeight(), mFadePaint);
}
public void drawSelector(View content, Canvas canvas, float openPercent) {
if (!mSelectorEnabled) return;
if (mSelectorDrawable != null && mSelectedView != null) {
String tag = (String) mSelectedView.getTag(R.id.selected_view);
if (tag.equals(TAG + "SelectedView")) {
canvas.save();
int left, right, offset;
offset = (int) (mSelectorDrawable.getWidth() * openPercent);
if (mMode == SlidingMenu.LEFT) {
right = content.getLeft();
left = right - offset;
canvas.clipRect(left, 0, right, getHeight());
canvas.drawBitmap(mSelectorDrawable, left, getSelectorTop(), null);
} else if (mMode == SlidingMenu.RIGHT) {
left = content.getRight();
right = left + offset;
canvas.clipRect(left, 0, right, getHeight());
canvas.drawBitmap(mSelectorDrawable, right - mSelectorDrawable.getWidth(), getSelectorTop(), null);
}
canvas.restore();
}
}
}
public void setSelectorEnabled(boolean b) {
mSelectorEnabled = b;
}
public void setSelectedView(View v) {
if (mSelectedView != null) {
mSelectedView.setTag(R.id.selected_view, null);
mSelectedView = null;
}
if (v != null && v.getParent() != null) {
mSelectedView = v;
mSelectedView.setTag(R.id.selected_view, TAG + "SelectedView");
invalidate();
}
}
private int getSelectorTop() {
int y = mSelectedView.getTop();
y += (mSelectedView.getHeight() - mSelectorDrawable.getHeight()) / 2;
return y;
}
public void setSelectorBitmap(Bitmap b) {
mSelectorDrawable = b;
refreshDrawableState();
}
}
| |
package com.mybatis.model;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class UserExample {
protected String orderByClause;
protected boolean distinct;
protected List<Criteria> oredCriteria;
public UserExample() {
oredCriteria = new ArrayList<Criteria>();
}
public void setOrderByClause(String orderByClause) {
this.orderByClause = orderByClause;
}
public String getOrderByClause() {
return orderByClause;
}
public void setDistinct(boolean distinct) {
this.distinct = distinct;
}
public boolean isDistinct() {
return distinct;
}
public List<Criteria> getOredCriteria() {
return oredCriteria;
}
public void or(Criteria criteria) {
oredCriteria.add(criteria);
}
public Criteria or() {
Criteria criteria = createCriteriaInternal();
oredCriteria.add(criteria);
return criteria;
}
public Criteria createCriteria() {
Criteria criteria = createCriteriaInternal();
if (oredCriteria.size() == 0) {
oredCriteria.add(criteria);
}
return criteria;
}
protected Criteria createCriteriaInternal() {
Criteria criteria = new Criteria();
return criteria;
}
public void clear() {
oredCriteria.clear();
orderByClause = null;
distinct = false;
}
protected abstract static class GeneratedCriteria {
protected List<Criterion> criteria;
protected GeneratedCriteria() {
super();
criteria = new ArrayList<Criterion>();
}
public boolean isValid() {
return criteria.size() > 0;
}
public List<Criterion> getAllCriteria() {
return criteria;
}
public List<Criterion> getCriteria() {
return criteria;
}
protected void addCriterion(String condition) {
if (condition == null) {
throw new RuntimeException("Value for condition cannot be null");
}
criteria.add(new Criterion(condition));
}
protected void addCriterion(String condition, Object value, String property) {
if (value == null) {
throw new RuntimeException("Value for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value));
}
protected void addCriterion(String condition, Object value1, Object value2, String property) {
if (value1 == null || value2 == null) {
throw new RuntimeException("Between values for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value1, value2));
}
public Criteria andRecIdIsNull() {
addCriterion("rec_id is null");
return (Criteria) this;
}
public Criteria andRecIdIsNotNull() {
addCriterion("rec_id is not null");
return (Criteria) this;
}
public Criteria andRecIdEqualTo(Long value) {
addCriterion("rec_id =", value, "recId");
return (Criteria) this;
}
public Criteria andRecIdNotEqualTo(Long value) {
addCriterion("rec_id <>", value, "recId");
return (Criteria) this;
}
public Criteria andRecIdGreaterThan(Long value) {
addCriterion("rec_id >", value, "recId");
return (Criteria) this;
}
public Criteria andRecIdGreaterThanOrEqualTo(Long value) {
addCriterion("rec_id >=", value, "recId");
return (Criteria) this;
}
public Criteria andRecIdLessThan(Long value) {
addCriterion("rec_id <", value, "recId");
return (Criteria) this;
}
public Criteria andRecIdLessThanOrEqualTo(Long value) {
addCriterion("rec_id <=", value, "recId");
return (Criteria) this;
}
public Criteria andRecIdIn(List<Long> values) {
addCriterion("rec_id in", values, "recId");
return (Criteria) this;
}
public Criteria andRecIdNotIn(List<Long> values) {
addCriterion("rec_id not in", values, "recId");
return (Criteria) this;
}
public Criteria andRecIdBetween(Long value1, Long value2) {
addCriterion("rec_id between", value1, value2, "recId");
return (Criteria) this;
}
public Criteria andRecIdNotBetween(Long value1, Long value2) {
addCriterion("rec_id not between", value1, value2, "recId");
return (Criteria) this;
}
public Criteria andNameIsNull() {
addCriterion("name is null");
return (Criteria) this;
}
public Criteria andNameIsNotNull() {
addCriterion("name is not null");
return (Criteria) this;
}
public Criteria andNameEqualTo(String value) {
addCriterion("name =", value, "name");
return (Criteria) this;
}
public Criteria andNameNotEqualTo(String value) {
addCriterion("name <>", value, "name");
return (Criteria) this;
}
public Criteria andNameGreaterThan(String value) {
addCriterion("name >", value, "name");
return (Criteria) this;
}
public Criteria andNameGreaterThanOrEqualTo(String value) {
addCriterion("name >=", value, "name");
return (Criteria) this;
}
public Criteria andNameLessThan(String value) {
addCriterion("name <", value, "name");
return (Criteria) this;
}
public Criteria andNameLessThanOrEqualTo(String value) {
addCriterion("name <=", value, "name");
return (Criteria) this;
}
public Criteria andNameLike(String value) {
addCriterion("name like", value, "name");
return (Criteria) this;
}
public Criteria andNameNotLike(String value) {
addCriterion("name not like", value, "name");
return (Criteria) this;
}
public Criteria andNameIn(List<String> values) {
addCriterion("name in", values, "name");
return (Criteria) this;
}
public Criteria andNameNotIn(List<String> values) {
addCriterion("name not in", values, "name");
return (Criteria) this;
}
public Criteria andNameBetween(String value1, String value2) {
addCriterion("name between", value1, value2, "name");
return (Criteria) this;
}
public Criteria andNameNotBetween(String value1, String value2) {
addCriterion("name not between", value1, value2, "name");
return (Criteria) this;
}
public Criteria andPasswordIsNull() {
addCriterion("password is null");
return (Criteria) this;
}
public Criteria andPasswordIsNotNull() {
addCriterion("password is not null");
return (Criteria) this;
}
public Criteria andPasswordEqualTo(String value) {
addCriterion("password =", value, "password");
return (Criteria) this;
}
public Criteria andPasswordNotEqualTo(String value) {
addCriterion("password <>", value, "password");
return (Criteria) this;
}
public Criteria andPasswordGreaterThan(String value) {
addCriterion("password >", value, "password");
return (Criteria) this;
}
public Criteria andPasswordGreaterThanOrEqualTo(String value) {
addCriterion("password >=", value, "password");
return (Criteria) this;
}
public Criteria andPasswordLessThan(String value) {
addCriterion("password <", value, "password");
return (Criteria) this;
}
public Criteria andPasswordLessThanOrEqualTo(String value) {
addCriterion("password <=", value, "password");
return (Criteria) this;
}
public Criteria andPasswordLike(String value) {
addCriterion("password like", value, "password");
return (Criteria) this;
}
public Criteria andPasswordNotLike(String value) {
addCriterion("password not like", value, "password");
return (Criteria) this;
}
public Criteria andPasswordIn(List<String> values) {
addCriterion("password in", values, "password");
return (Criteria) this;
}
public Criteria andPasswordNotIn(List<String> values) {
addCriterion("password not in", values, "password");
return (Criteria) this;
}
public Criteria andPasswordBetween(String value1, String value2) {
addCriterion("password between", value1, value2, "password");
return (Criteria) this;
}
public Criteria andPasswordNotBetween(String value1, String value2) {
addCriterion("password not between", value1, value2, "password");
return (Criteria) this;
}
public Criteria andTypeIsNull() {
addCriterion("type is null");
return (Criteria) this;
}
public Criteria andTypeIsNotNull() {
addCriterion("type is not null");
return (Criteria) this;
}
public Criteria andTypeEqualTo(Integer value) {
addCriterion("type =", value, "type");
return (Criteria) this;
}
public Criteria andTypeNotEqualTo(Integer value) {
addCriterion("type <>", value, "type");
return (Criteria) this;
}
public Criteria andTypeGreaterThan(Integer value) {
addCriterion("type >", value, "type");
return (Criteria) this;
}
public Criteria andTypeGreaterThanOrEqualTo(Integer value) {
addCriterion("type >=", value, "type");
return (Criteria) this;
}
public Criteria andTypeLessThan(Integer value) {
addCriterion("type <", value, "type");
return (Criteria) this;
}
public Criteria andTypeLessThanOrEqualTo(Integer value) {
addCriterion("type <=", value, "type");
return (Criteria) this;
}
public Criteria andTypeIn(List<Integer> values) {
addCriterion("type in", values, "type");
return (Criteria) this;
}
public Criteria andTypeNotIn(List<Integer> values) {
addCriterion("type not in", values, "type");
return (Criteria) this;
}
public Criteria andTypeBetween(Integer value1, Integer value2) {
addCriterion("type between", value1, value2, "type");
return (Criteria) this;
}
public Criteria andTypeNotBetween(Integer value1, Integer value2) {
addCriterion("type not between", value1, value2, "type");
return (Criteria) this;
}
public Criteria andAddTimeIsNull() {
addCriterion("add_time is null");
return (Criteria) this;
}
public Criteria andAddTimeIsNotNull() {
addCriterion("add_time is not null");
return (Criteria) this;
}
public Criteria andAddTimeEqualTo(Date value) {
addCriterion("add_time =", value, "addTime");
return (Criteria) this;
}
public Criteria andAddTimeNotEqualTo(Date value) {
addCriterion("add_time <>", value, "addTime");
return (Criteria) this;
}
public Criteria andAddTimeGreaterThan(Date value) {
addCriterion("add_time >", value, "addTime");
return (Criteria) this;
}
public Criteria andAddTimeGreaterThanOrEqualTo(Date value) {
addCriterion("add_time >=", value, "addTime");
return (Criteria) this;
}
public Criteria andAddTimeLessThan(Date value) {
addCriterion("add_time <", value, "addTime");
return (Criteria) this;
}
public Criteria andAddTimeLessThanOrEqualTo(Date value) {
addCriterion("add_time <=", value, "addTime");
return (Criteria) this;
}
public Criteria andAddTimeIn(List<Date> values) {
addCriterion("add_time in", values, "addTime");
return (Criteria) this;
}
public Criteria andAddTimeNotIn(List<Date> values) {
addCriterion("add_time not in", values, "addTime");
return (Criteria) this;
}
public Criteria andAddTimeBetween(Date value1, Date value2) {
addCriterion("add_time between", value1, value2, "addTime");
return (Criteria) this;
}
public Criteria andAddTimeNotBetween(Date value1, Date value2) {
addCriterion("add_time not between", value1, value2, "addTime");
return (Criteria) this;
}
public Criteria andUpdateTimeIsNull() {
addCriterion("update_time is null");
return (Criteria) this;
}
public Criteria andUpdateTimeIsNotNull() {
addCriterion("update_time is not null");
return (Criteria) this;
}
public Criteria andUpdateTimeEqualTo(Date value) {
addCriterion("update_time =", value, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeNotEqualTo(Date value) {
addCriterion("update_time <>", value, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeGreaterThan(Date value) {
addCriterion("update_time >", value, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeGreaterThanOrEqualTo(Date value) {
addCriterion("update_time >=", value, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeLessThan(Date value) {
addCriterion("update_time <", value, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeLessThanOrEqualTo(Date value) {
addCriterion("update_time <=", value, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeIn(List<Date> values) {
addCriterion("update_time in", values, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeNotIn(List<Date> values) {
addCriterion("update_time not in", values, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeBetween(Date value1, Date value2) {
addCriterion("update_time between", value1, value2, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeNotBetween(Date value1, Date value2) {
addCriterion("update_time not between", value1, value2, "updateTime");
return (Criteria) this;
}
}
public static class Criteria extends GeneratedCriteria {
protected Criteria() {
super();
}
}
public static class Criterion {
private String condition;
private Object value;
private Object secondValue;
private boolean noValue;
private boolean singleValue;
private boolean betweenValue;
private boolean listValue;
private String typeHandler;
public String getCondition() {
return condition;
}
public Object getValue() {
return value;
}
public Object getSecondValue() {
return secondValue;
}
public boolean isNoValue() {
return noValue;
}
public boolean isSingleValue() {
return singleValue;
}
public boolean isBetweenValue() {
return betweenValue;
}
public boolean isListValue() {
return listValue;
}
public String getTypeHandler() {
return typeHandler;
}
protected Criterion(String condition) {
super();
this.condition = condition;
this.typeHandler = null;
this.noValue = true;
}
protected Criterion(String condition, Object value, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.typeHandler = typeHandler;
if (value instanceof List<?>) {
this.listValue = true;
} else {
this.singleValue = true;
}
}
protected Criterion(String condition, Object value) {
this(condition, value, null);
}
protected Criterion(String condition, Object value, Object secondValue, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.secondValue = secondValue;
this.typeHandler = typeHandler;
this.betweenValue = true;
}
protected Criterion(String condition, Object value, Object secondValue) {
this(condition, value, secondValue, null);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.compress;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.Random;
import junit.framework.TestCase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.RandomDatum;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
import org.apache.hadoop.io.compress.zlib.ZlibFactory;
public class TestCodec extends TestCase {
private static final Log LOG=
LogFactory.getLog(TestCodec.class);
private Configuration conf = new Configuration();
private int count = 10000;
private int seed = new Random().nextInt();
public void testDefaultCodec() throws IOException {
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.DefaultCodec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.DefaultCodec");
}
public void testGzipCodec() throws IOException {
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.GzipCodec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.GzipCodec");
}
public void testBZip2Codec() throws IOException {
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.BZip2Codec");
}
public void testGzipCodecWithParam() throws IOException {
Configuration conf = new Configuration(this.conf);
ZlibFactory.setCompressionLevel(conf, CompressionLevel.BEST_COMPRESSION);
ZlibFactory.setCompressionStrategy(conf, CompressionStrategy.HUFFMAN_ONLY);
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.GzipCodec");
codecTest(conf, seed, count, "org.apache.hadoop.io.compress.GzipCodec");
}
private static void codecTest(Configuration conf, int seed, int count,
String codecClass)
throws IOException {
// Create the codec
CompressionCodec codec = null;
try {
codec = (CompressionCodec)
ReflectionUtils.newInstance(conf.getClassByName(codecClass), conf);
} catch (ClassNotFoundException cnfe) {
throw new IOException("Illegal codec!");
}
LOG.info("Created a Codec object of type: " + codecClass);
// Generate data
DataOutputBuffer data = new DataOutputBuffer();
RandomDatum.Generator generator = new RandomDatum.Generator(seed);
for(int i=0; i < count; ++i) {
generator.next();
RandomDatum key = generator.getKey();
RandomDatum value = generator.getValue();
key.write(data);
value.write(data);
}
DataInputBuffer originalData = new DataInputBuffer();
DataInputStream originalIn = new DataInputStream(new BufferedInputStream(originalData));
originalData.reset(data.getData(), 0, data.getLength());
LOG.info("Generated " + count + " records");
// Compress data
DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
CompressionOutputStream deflateFilter =
codec.createOutputStream(compressedDataBuffer);
DataOutputStream deflateOut =
new DataOutputStream(new BufferedOutputStream(deflateFilter));
deflateOut.write(data.getData(), 0, data.getLength());
deflateOut.flush();
deflateFilter.finish();
LOG.info("Finished compressing data");
// De-compress data
DataInputBuffer deCompressedDataBuffer = new DataInputBuffer();
deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0,
compressedDataBuffer.getLength());
CompressionInputStream inflateFilter =
codec.createInputStream(deCompressedDataBuffer);
DataInputStream inflateIn =
new DataInputStream(new BufferedInputStream(inflateFilter));
// Check
for(int i=0; i < count; ++i) {
RandomDatum k1 = new RandomDatum();
RandomDatum v1 = new RandomDatum();
k1.readFields(originalIn);
v1.readFields(originalIn);
RandomDatum k2 = new RandomDatum();
RandomDatum v2 = new RandomDatum();
k2.readFields(inflateIn);
v2.readFields(inflateIn);
}
LOG.info("SUCCESS! Completed checking " + count + " records");
}
public void testCodecPoolGzipReuse() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", true);
if (!ZlibFactory.isNativeZlibLoaded(conf)) {
LOG.warn("testCodecPoolGzipReuse skipped: native libs not loaded");
return;
}
GzipCodec gzc = ReflectionUtils.newInstance(GzipCodec.class, conf);
DefaultCodec dfc = ReflectionUtils.newInstance(DefaultCodec.class, conf);
Compressor c1 = CodecPool.getCompressor(gzc);
Compressor c2 = CodecPool.getCompressor(dfc);
CodecPool.returnCompressor(c1);
CodecPool.returnCompressor(c2);
assertTrue("Got mismatched ZlibCompressor", c2 != CodecPool.getCompressor(gzc));
}
private static void gzipReinitTest(Configuration conf, CompressionCodec codec)
throws IOException {
// Add codec to cache
ZlibFactory.setCompressionLevel(conf, CompressionLevel.BEST_COMPRESSION);
ZlibFactory.setCompressionStrategy(conf,
CompressionStrategy.DEFAULT_STRATEGY);
Compressor c1 = CodecPool.getCompressor(codec);
CodecPool.returnCompressor(c1);
// reset compressor's compression level to perform no compression
ZlibFactory.setCompressionLevel(conf, CompressionLevel.NO_COMPRESSION);
Compressor c2 = CodecPool.getCompressor(codec, conf);
// ensure same compressor placed earlier
assertTrue("Got mismatched ZlibCompressor", c1 == c2);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
CompressionOutputStream cos = null;
// write trivially compressable data
byte[] b = new byte[1 << 15];
Arrays.fill(b, (byte) 43);
try {
cos = codec.createOutputStream(bos, c2);
cos.write(b);
} finally {
if (cos != null) {
cos.close();
}
CodecPool.returnCompressor(c2);
}
byte[] outbytes = bos.toByteArray();
// verify data were not compressed
assertTrue("Compressed bytes contrary to configuration",
outbytes.length >= b.length);
}
public void testCodecPoolCompressorReinit() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", true);
if (ZlibFactory.isNativeZlibLoaded(conf)) {
GzipCodec gzc = ReflectionUtils.newInstance(GzipCodec.class, conf);
gzipReinitTest(conf, gzc);
} else {
LOG.warn("testCodecPoolCompressorReinit skipped: native libs not loaded");
}
conf.setBoolean("hadoop.native.lib", false);
DefaultCodec dfc = ReflectionUtils.newInstance(DefaultCodec.class, conf);
gzipReinitTest(conf, dfc);
}
public void testSequenceFileDefaultCodec() throws IOException, ClassNotFoundException,
InstantiationException, IllegalAccessException {
sequenceFileCodecTest(conf, 100, "org.apache.hadoop.io.compress.DefaultCodec", 100);
sequenceFileCodecTest(conf, 200000, "org.apache.hadoop.io.compress.DefaultCodec", 1000000);
}
public void testSequenceFileBZip2Codec() throws IOException, ClassNotFoundException,
InstantiationException, IllegalAccessException {
sequenceFileCodecTest(conf, 0, "org.apache.hadoop.io.compress.BZip2Codec", 100);
sequenceFileCodecTest(conf, 100, "org.apache.hadoop.io.compress.BZip2Codec", 100);
sequenceFileCodecTest(conf, 200000, "org.apache.hadoop.io.compress.BZip2Codec", 1000000);
}
private static void sequenceFileCodecTest(Configuration conf, int lines,
String codecClass, int blockSize)
throws IOException, ClassNotFoundException, InstantiationException, IllegalAccessException {
Path filePath = new Path("SequenceFileCodecTest." + codecClass);
// Configuration
conf.setInt("io.seqfile.compress.blocksize", blockSize);
// Create the SequenceFile
FileSystem fs = FileSystem.get(conf);
LOG.info("Creating SequenceFile with codec \"" + codecClass + "\"");
SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, filePath,
Text.class, Text.class, CompressionType.BLOCK,
(CompressionCodec)Class.forName(codecClass).newInstance());
// Write some data
LOG.info("Writing to SequenceFile...");
for (int i=0; i<lines; i++) {
Text key = new Text("key" + i);
Text value = new Text("value" + i);
writer.append(key, value);
}
writer.close();
// Read the data back and check
LOG.info("Reading from the SequenceFile...");
SequenceFile.Reader reader = new SequenceFile.Reader(fs, filePath, conf);
Writable key = (Writable)reader.getKeyClass().newInstance();
Writable value = (Writable)reader.getValueClass().newInstance();
int lc = 0;
try {
while (reader.next(key, value)) {
assertEquals("key" + lc, key.toString());
assertEquals("value" + lc, value.toString());
lc ++;
}
} finally {
reader.close();
}
assertEquals(lines, lc);
// Delete temporary files
fs.delete(filePath, false);
LOG.info("SUCCESS! Completed SequenceFileCodecTest with codec \"" + codecClass + "\"");
}
public static void main(String[] args) {
int count = 10000;
String codecClass = "org.apache.hadoop.io.compress.DefaultCodec";
String usage = "TestCodec [-count N] [-codec <codec class>]";
if (args.length == 0) {
System.err.println(usage);
System.exit(-1);
}
try {
for (int i=0; i < args.length; ++i) { // parse command line
if (args[i] == null) {
continue;
} else if (args[i].equals("-count")) {
count = Integer.parseInt(args[++i]);
} else if (args[i].equals("-codec")) {
codecClass = args[++i];
}
}
Configuration conf = new Configuration();
int seed = 0;
codecTest(conf, seed, count, codecClass);
} catch (Exception e) {
System.err.println("Caught: " + e);
e.printStackTrace();
}
}
public TestCodec(String name) {
super(name);
}
}
| |
/*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file tothe License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.buffer;
import io.netty.util.Recycler;
import io.netty.util.internal.PlatformDependent;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.GatheringByteChannel;
import java.nio.channels.ScatteringByteChannel;
final class PooledHeapByteBuf extends PooledByteBuf<byte[]> {
private static final Recycler<PooledHeapByteBuf> RECYCLER = new Recycler<PooledHeapByteBuf>() {
@Override
protected PooledHeapByteBuf newObject(Handle<PooledHeapByteBuf> handle) {
return new PooledHeapByteBuf(handle, 0);
}
};
static PooledHeapByteBuf newInstance(int maxCapacity) {
PooledHeapByteBuf buf = RECYCLER.get();
buf.setRefCnt(1);
buf.maxCapacity(maxCapacity);
return buf;
}
private PooledHeapByteBuf(Recycler.Handle<PooledHeapByteBuf> recyclerHandle, int maxCapacity) {
super(recyclerHandle, maxCapacity);
}
@Override
public boolean isDirect() {
return false;
}
@Override
protected byte _getByte(int index) {
return memory[idx(index)];
}
@Override
protected short _getShort(int index) {
index = idx(index);
return (short) (memory[index] << 8 | memory[index + 1] & 0xFF);
}
@Override
protected int _getUnsignedMedium(int index) {
index = idx(index);
return (memory[index] & 0xff) << 16 |
(memory[index + 1] & 0xff) << 8 |
memory[index + 2] & 0xff;
}
@Override
protected int _getInt(int index) {
index = idx(index);
return (memory[index] & 0xff) << 24 |
(memory[index + 1] & 0xff) << 16 |
(memory[index + 2] & 0xff) << 8 |
memory[index + 3] & 0xff;
}
@Override
protected long _getLong(int index) {
index = idx(index);
return ((long) memory[index] & 0xff) << 56 |
((long) memory[index + 1] & 0xff) << 48 |
((long) memory[index + 2] & 0xff) << 40 |
((long) memory[index + 3] & 0xff) << 32 |
((long) memory[index + 4] & 0xff) << 24 |
((long) memory[index + 5] & 0xff) << 16 |
((long) memory[index + 6] & 0xff) << 8 |
(long) memory[index + 7] & 0xff;
}
@Override
public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) {
checkDstIndex(index, length, dstIndex, dst.capacity());
if (dst.hasMemoryAddress()) {
PlatformDependent.copyMemory(memory, idx(index), dst.memoryAddress() + dstIndex, length);
} else if (dst.hasArray()) {
getBytes(index, dst.array(), dst.arrayOffset() + dstIndex, length);
} else {
dst.setBytes(dstIndex, memory, idx(index), length);
}
return this;
}
@Override
public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) {
checkDstIndex(index, length, dstIndex, dst.length);
System.arraycopy(memory, idx(index), dst, dstIndex, length);
return this;
}
@Override
public ByteBuf getBytes(int index, ByteBuffer dst) {
checkIndex(index);
dst.put(memory, idx(index), Math.min(capacity() - index, dst.remaining()));
return this;
}
@Override
public ByteBuf getBytes(int index, OutputStream out, int length) throws IOException {
checkIndex(index, length);
out.write(memory, idx(index), length);
return this;
}
@Override
public int getBytes(int index, GatheringByteChannel out, int length) throws IOException {
return getBytes(index, out, length, false);
}
private int getBytes(int index, GatheringByteChannel out, int length, boolean internal) throws IOException {
checkIndex(index, length);
index = idx(index);
ByteBuffer tmpBuf;
if (internal) {
tmpBuf = internalNioBuffer();
} else {
tmpBuf = ByteBuffer.wrap(memory);
}
return out.write((ByteBuffer) tmpBuf.clear().position(index).limit(index + length));
}
@Override
public int readBytes(GatheringByteChannel out, int length) throws IOException {
checkReadableBytes(length);
int readBytes = getBytes(readerIndex, out, length, true);
readerIndex += readBytes;
return readBytes;
}
@Override
protected void _setByte(int index, int value) {
memory[idx(index)] = (byte) value;
}
@Override
protected void _setShort(int index, int value) {
index = idx(index);
memory[index] = (byte) (value >>> 8);
memory[index + 1] = (byte) value;
}
@Override
protected void _setMedium(int index, int value) {
index = idx(index);
memory[index] = (byte) (value >>> 16);
memory[index + 1] = (byte) (value >>> 8);
memory[index + 2] = (byte) value;
}
@Override
protected void _setInt(int index, int value) {
index = idx(index);
memory[index] = (byte) (value >>> 24);
memory[index + 1] = (byte) (value >>> 16);
memory[index + 2] = (byte) (value >>> 8);
memory[index + 3] = (byte) value;
}
@Override
protected void _setLong(int index, long value) {
index = idx(index);
memory[index] = (byte) (value >>> 56);
memory[index + 1] = (byte) (value >>> 48);
memory[index + 2] = (byte) (value >>> 40);
memory[index + 3] = (byte) (value >>> 32);
memory[index + 4] = (byte) (value >>> 24);
memory[index + 5] = (byte) (value >>> 16);
memory[index + 6] = (byte) (value >>> 8);
memory[index + 7] = (byte) value;
}
@Override
public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) {
checkSrcIndex(index, length, srcIndex, src.capacity());
if (src.hasMemoryAddress()) {
PlatformDependent.copyMemory(src.memoryAddress() + srcIndex, memory, idx(index), length);
} else if (src.hasArray()) {
setBytes(index, src.array(), src.arrayOffset() + srcIndex, length);
} else {
src.getBytes(srcIndex, memory, idx(index), length);
}
return this;
}
@Override
public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) {
checkSrcIndex(index, length, srcIndex, src.length);
System.arraycopy(src, srcIndex, memory, idx(index), length);
return this;
}
@Override
public ByteBuf setBytes(int index, ByteBuffer src) {
int length = src.remaining();
checkIndex(index, length);
src.get(memory, idx(index), length);
return this;
}
@Override
public int setBytes(int index, InputStream in, int length) throws IOException {
checkIndex(index, length);
return in.read(memory, idx(index), length);
}
@Override
public int setBytes(int index, ScatteringByteChannel in, int length) throws IOException {
checkIndex(index, length);
index = idx(index);
try {
return in.read((ByteBuffer) internalNioBuffer().clear().position(index).limit(index + length));
} catch (ClosedChannelException e) {
return -1;
}
}
@Override
public ByteBuf copy(int index, int length) {
checkIndex(index, length);
ByteBuf copy = alloc().heapBuffer(length, maxCapacity());
copy.writeBytes(memory, idx(index), length);
return copy;
}
@Override
public int nioBufferCount() {
return 1;
}
@Override
public ByteBuffer[] nioBuffers(int index, int length) {
return new ByteBuffer[] { nioBuffer(index, length) };
}
@Override
public ByteBuffer nioBuffer(int index, int length) {
checkIndex(index, length);
index = idx(index);
ByteBuffer buf = ByteBuffer.wrap(memory, index, length);
return buf.slice();
}
@Override
public ByteBuffer internalNioBuffer(int index, int length) {
checkIndex(index, length);
index = idx(index);
return (ByteBuffer) internalNioBuffer().clear().position(index).limit(index + length);
}
@Override
public boolean hasArray() {
return true;
}
@Override
public byte[] array() {
return memory;
}
@Override
public int arrayOffset() {
return offset;
}
@Override
public boolean hasMemoryAddress() {
return false;
}
@Override
public long memoryAddress() {
throw new UnsupportedOperationException();
}
@Override
protected ByteBuffer newInternalNioBuffer(byte[] memory) {
return ByteBuffer.wrap(memory);
}
}
| |
/*
* Based on Steven Spencer's Java Tip in JavaWorld:
* http://www.javaworld.com/javaworld/javatips/jw-javatip66.html
*/
package net.sourceforge.ganttproject.util;
import java.io.IOException;
import java.net.URL;
/**
* A simple, static class to display an URL in the system browser.
*
* Under Windows, this will bring up the default browser,
* usually either Netscape or Microsoft IE. The default browser is
* determined by the OS. This has been tested under: Windows 95/98/NT/2000.
*
* Under MacOS, this will bring up the default browser.
* The default browser is determined by the OS.
* This has been tested under: n/a
*
* In other cases (and under Unix),
* the system browser is hard-coded to be 'netscape'.
* Netscape must be in your PATH for this to work. This has been
* tested with the following platforms: AIX, HP-UX and Solaris.
*
* Examples:
* * BrowserControl.displayURL("http://www.javaworld.com")
*
* BrowserControl.displayURL("file://c:\\docs\\index.html")
*
* BrowserContorl.displayURL("file:///user/joe/index.html");
*
* Note - you must include the url type -- either "http://" or
* "file://".
*/
public class BrowserControl {
/**
* Display an URL in the system browser. If you want to display a
* file, you must include the absolute path name.
*
* @param url the document's url (the url must start with either "http://"
* or "file://").
*/
public static boolean displayURL(String url) {
// Opening a browser, even when running sandbox-restricted
// in JavaWebStart.
try {
Class serManClass = Class.forName("javax.jnlp.ServiceManager");
Class basSerClass = Class.forName("javax.jnlp.BasicService");
Class[] stringParam = {String.class};
Class[] urlParam = {URL.class};
Object basicService = serManClass.getMethod("lookup", stringParam)
.invoke(serManClass, new Object[] {"javax.jnlp.BasicService"});
basSerClass.getMethod("showDocument", urlParam)
.invoke(basicService, new Object[] {new URL(url)});
return true;
} catch(Exception e) {
// Not running in JavaWebStart or service is not supported.
// We continue with the methods below ...
}
String[] cmd = null;
switch(getPlatform()) {
case(WIN_ID):
return runCmdLine(replaceToken(WIN_CMDLINE, URLTOKEN, url));
case(MAC_ID):
return runCmdLine(replaceToken(MAC_CMDLINE, URLTOKEN, url));
default:
for (int i = 0; i<OTHER_CMDLINES.length; i++) {
if (runCmdLine( replaceToken(OTHER_CMDLINES[i], URLTOKEN, url),
replaceToken(OTHER_FALLBACKS[i], URLTOKEN, url)))
return true;
}
}
return false;
}
/**
* Try to determine whether this application is running under Windows
* or some other platform by examing the "os.name" property.
*
* @return the ID of the platform
*/
private static int getPlatform()
{
String os = System.getProperty("os.name");
if ( os != null && os.startsWith(WIN_PREFIX)) return WIN_ID;
if ( os != null && os.startsWith(MAC_PREFIX)) return MAC_ID;
return OTHER_ID;
}
private static String connectStringArray(String[] a) {
if (a == null) return null;
String s = "";
for (int i = 0; i<a.length; i++) {
if (i > 0) s += " ";
s += a[i];
}
return s;
}
private static String[] replaceToken(String[] target, String token, String replacement) {
if (null == target) return null;
String[] result = new String[target.length];
for(int i = 0; i<target.length; i++)
result[i] = target[i].replaceAll(token, replacement);
return result;
}
private static boolean runCmdLine(String[] cmdLine) {
return runCmdLine(cmdLine,null);
}
private static boolean runCmdLine(String[] cmdLine, String[] fallBackCmdLine) {
try {
System.err.println(
"Trying to invoke browser, cmd='" +
connectStringArray(cmdLine) + "' ... ");
Process p = Runtime.getRuntime().exec(cmdLine);
if (null != fallBackCmdLine) {
// wait for exit code -- if it's 0, command worked,
// otherwise we need to start fallBackCmdLine.
int exitCode = p.waitFor();
if (exitCode != 0) {
System.err.println(exitCode);
System.err.println();
System.err.println(
"Trying to invoke browser, cmd='" +
connectStringArray(fallBackCmdLine) + "' ...");
Runtime.getRuntime().exec(fallBackCmdLine);
}
}
System.err.println();
return true;
} catch(InterruptedException e) {
System.err.println("Caught: " + e);
} catch(IOException e) {
System.err.println("Caught: " + e);
}
System.err.println();
return false;
}
// This token is a placeholder for the actual URL
private static final String URLTOKEN = "%URLTOKEN%";
// Used to identify the windows platform.
private static final int WIN_ID = 1;
// Used to discover the windows platform.
private static final String WIN_PREFIX = "Windows";
// The default system browser under windows.
// Once upon a time:
// for 'Windows 9' and 'Windows M': start
// for 'Windows': cmd /c start
private static final String[] WIN_CMDLINE = {"rundll32", "url.dll,FileProtocolHandler", URLTOKEN};
// Used to identify the mac platform.
private static final int MAC_ID = 2;
// Used to discover the mac platform.
private static final String MAC_PREFIX = "Mac";
// The default system browser under mac.
private static final String[] MAC_CMDLINE = {"open", URLTOKEN};
// Used to identify the mac platform.
private static final int OTHER_ID = -1;
private static final String[][] OTHER_CMDLINES = {
// The first guess for a browser under other systems (and unix):
// Remote controlling mozilla (http://www.mozilla.org/unix/remote.html)
{"mozilla", "-remote", "openURL(" + URLTOKEN + ",new-window)"},
// The second guess for a browser under other systems (and unix):
// The RedHat skript htmlview
{"htmlview", URLTOKEN},
// The third guess for a browser under other systems (and unix):
// Remote controlling netscape (http://wp.netscape.com/newsref/std/x-remote.html)
{"netscape", "-remote", "openURL(" + URLTOKEN + ")"}
};
private static final String[][] OTHER_FALLBACKS = {
// Fallback for remote controlling mozilla:
// Starting up a new mozilla
{"mozilla", URLTOKEN},
// No fallback for htmlview
null,
// Fallback for remote controlling netscape:
// Starting up a new netscape
{"netscape", URLTOKEN}
};
}
| |
/*
* Copyright (c) 2014 Spotify AB.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.helios.client;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.AsyncFunction;
import com.google.common.util.concurrent.FutureFallback;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.type.TypeFactory;
import com.spotify.helios.common.HeliosException;
import com.spotify.helios.common.Json;
import com.spotify.helios.common.Resolver;
import com.spotify.helios.common.Version;
import com.spotify.helios.common.VersionCompatibility;
import com.spotify.helios.common.VersionCompatibility.Status;
import com.spotify.helios.common.descriptors.Deployment;
import com.spotify.helios.common.descriptors.DeploymentGroup;
import com.spotify.helios.common.descriptors.HostStatus;
import com.spotify.helios.common.descriptors.Job;
import com.spotify.helios.common.descriptors.JobId;
import com.spotify.helios.common.descriptors.JobStatus;
import com.spotify.helios.common.descriptors.RolloutOptions;
import com.spotify.helios.common.protocol.CreateDeploymentGroupResponse;
import com.spotify.helios.common.protocol.CreateJobResponse;
import com.spotify.helios.common.protocol.DeploymentGroupStatusResponse;
import com.spotify.helios.common.protocol.HostDeregisterResponse;
import com.spotify.helios.common.protocol.JobDeleteResponse;
import com.spotify.helios.common.protocol.JobDeployResponse;
import com.spotify.helios.common.protocol.JobUndeployResponse;
import com.spotify.helios.common.protocol.RemoveDeploymentGroupResponse;
import com.spotify.helios.common.protocol.RollingUpdateRequest;
import com.spotify.helios.common.protocol.RollingUpdateResponse;
import com.spotify.helios.common.protocol.SetGoalResponse;
import com.spotify.helios.common.protocol.TaskStatusEvents;
import com.spotify.helios.common.protocol.VersionResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.net.ConnectException;
import java.net.HttpURLConnection;
import java.net.SocketTimeoutException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.UnknownHostException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.zip.GZIPInputStream;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.util.concurrent.Futures.immediateFuture;
import static com.google.common.util.concurrent.Futures.transform;
import static com.google.common.util.concurrent.Futures.withFallback;
import static com.google.common.util.concurrent.MoreExecutors.getExitingExecutorService;
import static com.spotify.helios.common.VersionCompatibility.HELIOS_SERVER_VERSION_HEADER;
import static com.spotify.helios.common.VersionCompatibility.HELIOS_VERSION_STATUS_HEADER;
import static java.lang.String.format;
import static java.lang.System.currentTimeMillis;
import static java.net.HttpURLConnection.HTTP_BAD_METHOD;
import static java.net.HttpURLConnection.HTTP_BAD_REQUEST;
import static java.net.HttpURLConnection.HTTP_FORBIDDEN;
import static java.net.HttpURLConnection.HTTP_NOT_FOUND;
import static java.net.HttpURLConnection.HTTP_OK;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Arrays.asList;
import static java.util.concurrent.Executors.newFixedThreadPool;
import static java.util.concurrent.TimeUnit.SECONDS;
public class HeliosClient implements AutoCloseable {
private static final Logger log = LoggerFactory.getLogger(HeliosClient.class);
private static final long RETRY_TIMEOUT_MILLIS = SECONDS.toMillis(60);
private static final long HTTP_TIMEOUT_MILLIS = SECONDS.toMillis(10);
private final AtomicBoolean versionWarningLogged = new AtomicBoolean();
private final String user;
private final Supplier<List<URI>> endpointSupplier;
private final ListeningExecutorService executorService;
HeliosClient(final String user,
final Supplier<List<URI>> endpointSupplier,
final ListeningExecutorService executorService) {
this.user = checkNotNull(user);
this.endpointSupplier = checkNotNull(endpointSupplier);
this.executorService = checkNotNull(executorService);
}
HeliosClient(final String user, final List<URI> endpoints,
final ListeningExecutorService executorService) {
this(user, Suppliers.ofInstance(endpoints), executorService);
}
HeliosClient(final String user, final Supplier<List<URI>> endpointSupplier) {
this(user, endpointSupplier, MoreExecutors.listeningDecorator(getExitingExecutorService(
(ThreadPoolExecutor) newFixedThreadPool(4), 0, SECONDS)));
}
HeliosClient(final String user, final List<URI> endpoints) {
this(user, Suppliers.ofInstance(endpoints));
}
@Override
public void close() {
executorService.shutdownNow();
}
private URI uri(final String path) {
return uri(path, Collections.<String, String>emptyMap());
}
private URI uri(final String path, final Map<String, String> query) {
// TODO(dano): use a uri builder and clean this mess up
checkArgument(path.startsWith("/"));
final Map<String, String> queryWithUser = Maps.newHashMap(query);
queryWithUser.put("user", user);
final String queryPart = Joiner.on('&').withKeyValueSeparator("=").join(queryWithUser);
try {
return new URI("http", "helios", path, queryPart, null);
} catch (URISyntaxException e) {
throw Throwables.propagate(e);
}
}
private String path(final String resource, final Object... params) {
final String path;
if (params.length == 0) {
path = resource;
} else {
final List<String> encodedParams = Lists.newArrayList();
for (final Object param : params) {
final URI u;
try {
final String p = param.toString().replace("/", "%2F");
// URI does path encoding right, but using it is painful
u = new URI("http", "ignore", "/" + p, "");
} catch (URISyntaxException e) {
throw Throwables.propagate(e);
}
encodedParams.add(u.getRawPath().substring(1));
}
path = format(resource, encodedParams.toArray());
}
return path;
}
private ListenableFuture<Response> request(final URI uri, final String method) {
return request(uri, method, null);
}
private ListenableFuture<Response> request(final URI uri, final String method,
final Object entity) {
final Map<String, List<String>> headers = Maps.newHashMap();
final byte[] entityBytes;
headers.put(VersionCompatibility.HELIOS_VERSION_HEADER, asList(Version.POM_VERSION));
if (entity != null) {
headers.put("Content-Type", asList("application/json"));
headers.put("Charset", asList("utf-8"));
entityBytes = Json.asBytesUnchecked(entity);
} else {
entityBytes = new byte[]{};
}
return executorService.submit(new Callable<Response>() {
@Override
public Response call() throws Exception {
final HttpURLConnection connection = connect(uri, method, entityBytes, headers);
final int status = connection.getResponseCode();
final InputStream rawStream;
if (status / 100 != 2) {
rawStream = connection.getErrorStream();
} else {
rawStream = connection.getInputStream();
}
final boolean gzip = isGzipCompressed(connection);
final InputStream stream = gzip ? new GZIPInputStream(rawStream) : rawStream;
final ByteArrayOutputStream payload = new ByteArrayOutputStream();
if (stream != null) {
int n;
byte[] buffer = new byte[4096];
while ((n = stream.read(buffer, 0, buffer.length)) != -1) {
payload.write(buffer, 0, n);
}
}
URI realUri = connection.getURL().toURI();
if (log.isTraceEnabled()) {
log.trace("rep: {} {} {} {} {} gzip:{}",
method, realUri, status, payload.size(), decode(payload), gzip);
} else {
log.debug("rep: {} {} {} {} gzip:{}",
method, realUri, status, payload.size(), gzip);
}
checkprotocolVersionStatus(connection);
return new Response(method, uri, status, payload.toByteArray());
}
private boolean isGzipCompressed(final HttpURLConnection connection) {
final List<String> encodings = connection.getHeaderFields().get("Content-Encoding");
if (encodings == null) {
return false;
}
for (String encoding : encodings) {
if ("gzip".equals(encoding)) {
return true;
}
}
return false;
}
});
}
private void checkprotocolVersionStatus(final HttpURLConnection connection) {
final Status versionStatus = getVersionStatus(connection);
if (versionStatus == null) {
log.debug("Server didn't return a version header!");
return; // shouldn't happen really
}
final String serverVersion = connection.getHeaderField(HELIOS_SERVER_VERSION_HEADER);
if ((versionStatus == VersionCompatibility.Status.MAYBE) &&
(versionWarningLogged.compareAndSet(false, true))) {
log.warn("Your Helios client version [{}] is ahead of the server [{}]. This will"
+ " probably work ok but there is the potential for weird things. If in doubt,"
+ " contact the Helios team if you think the cluster you're connecting to is out"
+ " of date and should be upgraded.", Version.POM_VERSION, serverVersion);
}
}
private Status getVersionStatus(final HttpURLConnection connection) {
final String status = connection.getHeaderField(HELIOS_VERSION_STATUS_HEADER);
if (status != null) {
return VersionCompatibility.Status.valueOf(status);
}
return null;
}
private String decode(final ByteArrayOutputStream payload) {
final byte[] bytes = payload.toByteArray();
try {
return Json.asPrettyString(Json.read(bytes, new TypeReference<Map<String, Object>>() {}));
} catch (IOException e) {
return new String(bytes, UTF_8);
}
}
/**
* Sets up a connection, retrying on connect failure.
*/
private HttpURLConnection connect(final URI uri, final String method, final byte[] entity,
final Map<String, List<String>> headers)
throws URISyntaxException, IOException, TimeoutException, InterruptedException,
HeliosException {
final long deadline = currentTimeMillis() + RETRY_TIMEOUT_MILLIS;
final int offset = ThreadLocalRandom.current().nextInt();
while (currentTimeMillis() < deadline) {
final List<URI> endpoints = endpointSupplier.get();
if (endpoints.isEmpty()) {
throw new RuntimeException("failed to resolve master");
}
log.debug("endpoint uris are {}", endpoints);
for (int i = 0; i < endpoints.size() && currentTimeMillis() < deadline; i++) {
final URI endpoint = endpoints.get(positive(offset + i) % endpoints.size());
final String fullpath = endpoint.getPath() + uri.getPath();
final String host = endpoint.getHost();
final int port = endpoint.getPort();
if (host == null || port == -1) {
throw new HeliosException("Master endpoints must be of the form "
+ "\"http[s]://heliosmaster.domain.net:<port>\"");
}
final URI realUri = new URI("http", host + ":" + port, fullpath, uri.getQuery(), null);
try {
log.debug("connecting to {}", realUri);
return connect0(realUri, method, entity, headers);
} catch (ConnectException | SocketTimeoutException | UnknownHostException e) {
// UnknownHostException happens if we can't resolve hostname into IP address.
// UnknownHostException's getMessage method returns just the hostname which is a useless
// message, so log the exception class name to provide more info.
log.debug(e.getClass().getSimpleName() + " - " + e.getMessage());
// Connecting failed, sleep a bit to avoid hammering and then try another endpoint
Thread.sleep(200);
}
}
log.warn("Failed to connect, retrying in 5 seconds.");
Thread.sleep(5000);
}
throw new TimeoutException("Timed out connecting to master");
}
private HttpURLConnection connect0(final URI uri, final String method, final byte[] entity,
final Map<String, List<String>> headers)
throws IOException {
if (log.isTraceEnabled()) {
log.trace("req: {} {} {} {} {} {}", method, uri,
headers.size(),
Joiner.on(',').withKeyValueSeparator("=").join(headers),
entity.length, Json.asPrettyStringUnchecked(entity));
} else {
log.debug("req: {} {} {} {}", method, uri, headers.size(), entity.length);
}
final HttpURLConnection connection;
connection = (HttpURLConnection) uri.toURL().openConnection();
connection.setRequestProperty("Accept-Encoding", "gzip");
connection.setInstanceFollowRedirects(false);
connection.setConnectTimeout((int) HTTP_TIMEOUT_MILLIS);
connection.setReadTimeout((int) HTTP_TIMEOUT_MILLIS);
for (Map.Entry<String, List<String>> header : headers.entrySet()) {
for (final String value : header.getValue()) {
connection.addRequestProperty(header.getKey(), value);
}
}
if (entity.length > 0) {
connection.setDoOutput(true);
connection.getOutputStream().write(entity);
}
setRequestMethod(connection, method);
connection.getResponseCode();
return connection;
}
private int positive(final int value) {
return value < 0 ? value + Integer.MAX_VALUE : value;
}
private void setRequestMethod(final HttpURLConnection connection, final String method) {
// Nasty workaround for ancient HttpURLConnection only supporting few methods
final Class<?> httpURLConnectionClass = connection.getClass();
try {
final Field methodField = httpURLConnectionClass.getSuperclass().getDeclaredField("method");
methodField.setAccessible(true);
methodField.set(connection, method);
} catch (NoSuchFieldException | IllegalAccessException e) {
throw Throwables.propagate(e);
}
}
private <T> ListenableFuture<T> get(final URI uri, final TypeReference<T> typeReference) {
return get(uri, Json.type(typeReference));
}
private <T> ListenableFuture<T> get(final URI uri, final Class<T> clazz) {
return get(uri, Json.type(clazz));
}
private <T> ListenableFuture<T> get(final URI uri, final JavaType javaType) {
return transform(request(uri, "GET"), new ConvertResponseToPojo<T>(javaType));
}
private ListenableFuture<Integer> put(final URI uri) {
return status(request(uri, "PUT"));
}
public ListenableFuture<JobDeployResponse> deploy(final Deployment job, final String host) {
return deploy(job, host, "");
}
public ListenableFuture<JobDeployResponse> deploy(final Deployment job, final String host,
final String token) {
final Set<Integer> deserializeReturnCodes = ImmutableSet.of(HTTP_OK, HTTP_NOT_FOUND,
HTTP_BAD_METHOD,
HTTP_BAD_REQUEST,
HTTP_FORBIDDEN);
return transform(request(uri(path("/hosts/%s/jobs/%s", host, job.getJobId()),
ImmutableMap.of("token", token)),
"PUT", job),
ConvertResponseToPojo.create(JobDeployResponse.class, deserializeReturnCodes));
}
public ListenableFuture<SetGoalResponse> setGoal(final Deployment job, final String host) {
return setGoal(job, host, "");
}
public ListenableFuture<SetGoalResponse> setGoal(final Deployment job, final String host,
final String token) {
return transform(request(uri(path("/hosts/%s/jobs/%s", host, job.getJobId()),
ImmutableMap.of("token", token)),
"PATCH", job),
ConvertResponseToPojo.create(SetGoalResponse.class,
ImmutableSet.of(HTTP_OK, HTTP_NOT_FOUND,
HTTP_FORBIDDEN)));
}
private ListenableFuture<Integer> status(final ListenableFuture<Response> req) {
return transform(req,
new Function<Response, Integer>() {
@Override
public Integer apply(final Response reply) {
return reply.status;
}
});
}
public ListenableFuture<Deployment> deployment(final String host, final JobId job) {
return get(uri(path("/hosts/%s/jobs/%s", host, job)), Deployment.class);
}
public ListenableFuture<HostStatus> hostStatus(final String host) {
return get(uri(path("/hosts/%s/status", host)), HostStatus.class);
}
public ListenableFuture<Map<String, HostStatus>> hostStatuses(final List<String> hosts) {
final ConvertResponseToPojo<Map<String, HostStatus>> converter = ConvertResponseToPojo.create(
TypeFactory.defaultInstance().constructMapType(Map.class, String.class, HostStatus.class),
ImmutableSet.of(HTTP_OK));
return transform(request(uri("/hosts/statuses"), "POST", hosts), converter);
}
public ListenableFuture<Integer> registerHost(final String host, final String id) {
return put(uri(path("/hosts/%s", host), ImmutableMap.of("id", id)));
}
public ListenableFuture<JobDeleteResponse> deleteJob(final JobId id) {
return deleteJob(id, "");
}
public ListenableFuture<JobDeleteResponse> deleteJob(final JobId id, final String token) {
return transform(request(uri(path("/jobs/%s", id),
ImmutableMap.of("token", token)),
"DELETE"),
ConvertResponseToPojo.create(JobDeleteResponse.class,
ImmutableSet.of(HTTP_OK, HTTP_NOT_FOUND,
HTTP_BAD_REQUEST,
HTTP_FORBIDDEN)));
}
public ListenableFuture<JobUndeployResponse> undeploy(final JobId jobId, final String host) {
return undeploy(jobId, host, "");
}
public ListenableFuture<JobUndeployResponse> undeploy(final JobId jobId, final String host,
final String token) {
return transform(request(uri(path("/hosts/%s/jobs/%s", host, jobId),
ImmutableMap.of("token", token)),
"DELETE"),
ConvertResponseToPojo.create(JobUndeployResponse.class,
ImmutableSet.of(HTTP_OK, HTTP_NOT_FOUND,
HTTP_BAD_REQUEST,
HTTP_FORBIDDEN)));
}
public ListenableFuture<HostDeregisterResponse> deregisterHost(final String host) {
return transform(request(uri(path("/hosts/%s", host)), "DELETE"),
ConvertResponseToPojo.create(HostDeregisterResponse.class,
ImmutableSet.of(HTTP_OK, HTTP_NOT_FOUND)));
}
public ListenableFuture<List<String>> listHosts() {
return get(uri("/hosts/"), new TypeReference<List<String>>() {
});
}
public ListenableFuture<List<String>> listMasters() {
return get(uri("/masters/"), new TypeReference<List<String>>() {
});
}
public ListenableFuture<VersionResponse> version() {
// Create a fallback in case we fail to connect to the master. Return null if this happens.
// The transform below will handle this and return an appropriate error message to the caller.
final ListenableFuture<Response> futureWithFallback = withFallback(
request(uri("/version/"), "GET"),
new FutureFallback<Response>() {
@Override
public ListenableFuture<Response> create(Throwable t) throws Exception {
return immediateFuture(null);
}
}
);
return transform(
futureWithFallback,
new AsyncFunction<Response, VersionResponse>() {
@Override
public ListenableFuture<VersionResponse> apply(Response reply) throws Exception {
final String masterVersion =
reply == null ? "Unable to connect to master" :
reply.status == HTTP_OK ? Json.read(reply.payload, String.class) :
"Master replied with error code " + reply.status;
return immediateFuture(new VersionResponse(Version.POM_VERSION, masterVersion));
}
});
}
public ListenableFuture<CreateJobResponse> createJob(final Job descriptor) {
return transform(request(uri("/jobs/"), "POST", descriptor),
ConvertResponseToPojo.create(CreateJobResponse.class,
ImmutableSet.of(HTTP_OK, HTTP_BAD_REQUEST)));
}
public ListenableFuture<Map<JobId, Job>> jobs(final String query) {
return get(uri("/jobs", ImmutableMap.of("q", query)), new TypeReference<Map<JobId, Job>>() {
});
}
public ListenableFuture<Map<JobId, Job>> jobs() {
return get(uri("/jobs"), new TypeReference<Map<JobId, Job>>() {});
}
public ListenableFuture<TaskStatusEvents> jobHistory(final JobId jobId) {
return transform(
request(uri(path("/history/jobs/%s", jobId.toString())), "GET"),
ConvertResponseToPojo.create(TaskStatusEvents.class,
ImmutableSet.of(HTTP_OK, HTTP_NOT_FOUND)));
}
public ListenableFuture<JobStatus> jobStatus(final JobId jobId) {
return get(uri(path("/jobs/%s/status", jobId)), JobStatus.class);
}
public ListenableFuture<Map<JobId, JobStatus>> jobStatuses(final Set<JobId> jobs) {
final ConvertResponseToPojo<Map<JobId, JobStatus>> converter = ConvertResponseToPojo.create(
TypeFactory.defaultInstance().constructMapType(Map.class, JobId.class, JobStatus.class),
ImmutableSet.of(HTTP_OK));
return transform(request(uri("/jobs/statuses"), "POST", jobs), converter);
}
public ListenableFuture<DeploymentGroup> deploymentGroup(final String name) {
return get(uri("/deployment-group/" + name), new TypeReference<DeploymentGroup>() {
});
}
public ListenableFuture<List<String>> listDeploymentGroups() {
return get(uri("/deployment-group/"), new TypeReference<List<String>>() {
});
}
public ListenableFuture<DeploymentGroupStatusResponse> deploymentGroupStatus(final String name) {
return get(uri(path("/deployment-group/%s/status", name)),
new TypeReference<DeploymentGroupStatusResponse>() {});
}
public ListenableFuture<CreateDeploymentGroupResponse>
createDeploymentGroup(final DeploymentGroup descriptor) {
return transform(request(uri("/deployment-group/"), "POST", descriptor),
ConvertResponseToPojo.create(CreateDeploymentGroupResponse.class,
ImmutableSet.of(HTTP_OK, HTTP_BAD_REQUEST)));
}
public ListenableFuture<RemoveDeploymentGroupResponse> removeDeploymentGroup(final String name) {
return transform(request(uri("/deployment-group/" + name), "DELETE"),
ConvertResponseToPojo.create(RemoveDeploymentGroupResponse.class,
ImmutableSet.of(HTTP_OK, HTTP_BAD_REQUEST)));
}
public ListenableFuture<RollingUpdateResponse> rollingUpdate(
final String deploymentGroupName, final JobId job, final RolloutOptions options) {
return transform(
request(uri(path("/deployment-group/%s/rolling-update", deploymentGroupName)),
"POST", new RollingUpdateRequest(job, options)),
ConvertResponseToPojo.create(RollingUpdateResponse.class,
ImmutableSet.of(HTTP_OK, HTTP_BAD_REQUEST)));
}
public ListenableFuture<Integer> abortRollingUpdate(final String deploymentGroupName) {
return status(request(
uri(path("/deployment-group/%s/rolling-update/abort", deploymentGroupName)), "POST"));
}
private static final class ConvertResponseToPojo<T> implements AsyncFunction<Response, T> {
private final JavaType javaType;
private final Set<Integer> decodeableStatusCodes;
private ConvertResponseToPojo(final JavaType javaType) {
this(javaType, ImmutableSet.of(HTTP_OK));
}
public ConvertResponseToPojo(final JavaType type, final Set<Integer> decodeableStatusCodes) {
this.javaType = type;
this.decodeableStatusCodes = decodeableStatusCodes;
}
public static <T> ConvertResponseToPojo<T> create(final JavaType type,
final Set<Integer> decodeableStatusCodes) {
return new ConvertResponseToPojo<>(type, decodeableStatusCodes);
}
public static <T> ConvertResponseToPojo<T> create(final Class<T> clazz,
final Set<Integer> decodeableStatusCodes) {
return new ConvertResponseToPojo<>(Json.type(clazz), decodeableStatusCodes);
}
@Override
public ListenableFuture<T> apply(final Response reply)
throws HeliosException {
if (reply.status == HTTP_NOT_FOUND && !decodeableStatusCodes.contains(HTTP_NOT_FOUND)) {
return immediateFuture(null);
}
if (!decodeableStatusCodes.contains(reply.status)) {
throw new HeliosException("request failed: " + reply);
}
if (reply.payload.length == 0) {
throw new HeliosException("bad reply: " + reply);
}
final T result;
try {
result = Json.read(reply.payload, javaType);
} catch (IOException e) {
throw new HeliosException("bad reply: " + reply, e);
}
return immediateFuture(result);
}
}
public static Builder newBuilder() {
return new Builder();
}
public static class Builder {
private String user;
private Supplier<List<URI>> endpointSupplier;
public Builder setUser(final String user) {
this.user = user;
return this;
}
public Builder setDomain(final String domain) {
return setEndpointSupplier(Resolver.supplier("helios", domain));
}
public Builder setEndpoints(final List<URI> endpoints) {
return setEndpointSupplier(Suppliers.ofInstance(endpoints));
}
public Builder setEndpoints(final URI... endpoints) {
return setEndpointSupplier(Suppliers.ofInstance(asList(endpoints)));
}
public Builder setEndpoints(final String... endpoints) {
return setEndpointStrings(asList(endpoints));
}
public Builder setEndpointStrings(final List<String> endpoints) {
final List<URI> uris = Lists.newArrayList();
for (String endpoint : endpoints) {
uris.add(URI.create(endpoint));
}
return setEndpoints(uris);
}
public Builder setEndpointSupplier(final Supplier<List<URI>> endpointSupplier) {
this.endpointSupplier = endpointSupplier;
return this;
}
public HeliosClient build() {
return new HeliosClient(user, endpointSupplier);
}
}
/**
* Create a new helios client as a specific user, connecting to a helios master cluster in a
* specific domain.
*
* @param domain The target domain.
* @param user The user to identify as.
* @return A helios client.
*/
public static HeliosClient create(final String domain, final String user) {
return HeliosClient.newBuilder()
.setDomain(domain)
.setUser(user)
.build();
}
private static class Response {
private final String method;
private final URI uri;
private final int status;
private final byte[] payload;
public Response(final String method, final URI uri, final int status, final byte[] payload) {
this.method = method;
this.uri = uri;
this.status = status;
this.payload = payload;
}
@Override
public String toString() {
return "Response{" +
"method='" + method + '\'' +
", uri=" + uri +
", status=" + status +
", payload=" + decode(payload) +
'}';
}
private String decode(final byte[] payload) {
if (payload == null) {
return "";
}
final int length = Math.min(payload.length, 1024);
return new String(payload, 0, length, UTF_8);
}
}
}
| |
package test.UserFragments;
import android.app.AlertDialog;
import android.app.Fragment;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Message;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Adapter;
import android.widget.ArrayAdapter;
import android.widget.AutoCompleteTextView;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import android.widget.ViewSwitcher;
import com.afollestad.materialdialogs.MaterialDialog;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.location.LocationServices;
import com.google.api.client.http.javanet.NetHttpTransport;
import java.util.ArrayList;
import java.util.List;
import test.Fragments.CameraPictureFragment;
import test.Network.CurrentAddressService;
import test.assesortron5.R;
import test.objects.Address;
import test.objects.Project;
import test.persistence.Constants;
import test.persistence.State;
import test.persistence.Storage;
/**
* Created by otf on 7/18/15.
*/
public class NewProjectRequired extends Fragment implements CameraPictureFragment.CameraPictureFragmentCallback {
ProjectListener projectListener;
AddressReceiver addressReceiver;
Project project;
String userId;
private EditText name, floors, basementFloors;
private EditText streetAddress, city, zip;
private AutoCompleteTextView state;
private Button submit, takePicture, findCurrentAddress;
private ViewSwitcher viewSwitcher;
private ImageView imageView;
private String sitePictureId;
public NewProjectRequired() {}
public static NewProjectRequired newInstance(String userId) {
NewProjectRequired newProjectRequired = new NewProjectRequired();
Bundle bundle = new Bundle();
bundle.putString(Constants.USER_ID, userId);
newProjectRequired.setArguments(bundle);
return newProjectRequired;
}
public static NewProjectRequired newInstance(ProjectListener projectListener, String userId) {
NewProjectRequired newProjectRequired = new NewProjectRequired();
newProjectRequired.setProjectListener(projectListener);
Bundle bundle = new Bundle();
bundle.putString(Constants.USER_ID, userId);
newProjectRequired.setArguments(bundle);
return newProjectRequired;
}
@Override
public void onCreate(Bundle savedInstance) {
super.onCreate(savedInstance);
setRetainInstance(true);
Log.i("NewProject OnCreate", "OnCreate Called...");
addressReceiver = new AddressReceiver();
IntentFilter intentFilter = new IntentFilter(CurrentAddressService.ACTION);
intentFilter.addCategory(Intent.CATEGORY_DEFAULT);
getActivity().registerReceiver(addressReceiver, intentFilter);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup vg, Bundle saved) {
super.onCreateView(inflater, vg, saved);
View view = inflater.inflate(R.layout.fragment_project_required, null);
userId = getArguments().getString(Constants.USER_ID);
setVariables(view);
setListeners();
return view;
}
@Override
public void onStop() {
getActivity().unregisterReceiver(addressReceiver);
super.onStop();
}
private void setProjectListener(ProjectListener projectListener) {
this.projectListener = projectListener;
}
private void setVariables(View view) {
name = (EditText)view.findViewById(R.id.project_enter_project_name);
streetAddress = (EditText)view.findViewById(R.id.project_enter_street_address);
city = (EditText)view.findViewById(R.id.project__enter_city);
state = (AutoCompleteTextView)view.findViewById(R.id.project_enter_state);
zip = (EditText)view.findViewById(R.id.project_enter_zip);
floors = (EditText)view.findViewById(R.id.project_enter_above_floors);
basementFloors = (EditText)view.findViewById(R.id.project_enter_below_floors);
takePicture = (Button)view.findViewById(R.id.project_enter_take_picture);
imageView = (ImageView)view.findViewById(R.id.project_enter_site_picture);
viewSwitcher = (ViewSwitcher)view.findViewById(R.id.project_enter_view_switcher);
submit = (Button)view.findViewById(R.id.project_enter_submit);
findCurrentAddress = (Button)view.findViewById(R.id.project_enter_find_current_address);
ArrayAdapter<String> adapter = new ArrayAdapter<>(getActivity(), android.R.layout.simple_spinner_dropdown_item, State.getAbbreviationList());
state.setAdapter(adapter);
state.setThreshold(1);
}
private void setListeners() {
submit.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (complete()) {
if (project == null) {
project = new Project(userId);
setProject();
projectListener.submitProject(project);
} else {
setProject();
projectListener.submitProject(project);
}
clearFields();
} else {
new MaterialDialog.Builder(getActivity())
.title("More Information Needed")
.content("Projects must have " +
"\n -Name" +
"\n -Street Address" +
"\n -either City & State OR Zip Code" +
"\n -either Above Ground Floors or Below Ground Floors" +
"\n\nProject Picture is optional")
.positiveText("OK")
.show();
}
}
});
takePicture.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
startCamera();
}
});
if (sitePictureId != null) {
setImageViewImage(sitePictureId);
}
findCurrentAddress.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(getActivity(), CurrentAddressService.class);
getActivity().startService(intent);
}
});
}
private void startCamera() {
getFragmentManager()
.beginTransaction()
.add(CameraPictureFragment.newInstance(this), null)
.commit();
}
private boolean complete() {
return check(name) &
check(streetAddress) &
(
(check(city) & check(state)) |
check(zip)
) &
(check(floors) | check(basementFloors));
}
private boolean check(EditText editText) {
if (editText.getText().toString().equals("")) {
setError(editText);
return false;
} else {
return true;
}
}
private void setAddress(Address address) {
streetAddress.setText(address.getStreeAddress());
city.setText(address.getCity());
state.setText(address.getState());
zip.setText(address.getZip());
}
private void setError(final EditText view) {
view.setBackgroundColor(Color.RED);
view.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
view.setBackgroundColor(Color.TRANSPARENT);
Log.i("ONCLICKC", "Clicked");
}
});
}
private void setProject() {
project.setName(name.getText().toString());
Address address;
if (project.getAddress() != null) {
address = project.getAddress();
} else {
address = new Address();
}
address.setStreetAddress(streetAddress.getText().toString());
address.setCity(city.getText().toString());
address.setState(state.getText().toString());
address.setZip(zip.getText().toString());
project.setAddress(address);
project.setNumAGFloors(floors.getText().toString());
project.setNumBasementFloors(basementFloors.getText().toString());
project.addSitePicture(sitePictureId);
}
private void clearFields() {
name.setText("");
name.setBackgroundColor(Color.TRANSPARENT);
streetAddress.setText("");
streetAddress.setBackgroundColor(Color.TRANSPARENT);
city.setText("");
city.setBackgroundColor(Color.TRANSPARENT);
state.setText("");
state.setBackgroundColor(Color.TRANSPARENT);
zip.setText("");
zip.setBackgroundColor(Color.TRANSPARENT);
floors.setText("");
floors.setBackgroundColor(Color.TRANSPARENT);
basementFloors.setText("");
basementFloors.setBackgroundColor(Color.TRANSPARENT);
}
@Override
public void returnImageId(String imageId) {
sitePictureId = imageId;
setImageViewImage(imageId);
}
private void setImageViewImage(String imageId) {
AsyncTask<String, Void, Bitmap> setImage = new AsyncTask<String, Void, Bitmap>() {
@Override
protected Bitmap doInBackground(String... strings) {
return Storage.getPictureByOwnerId(getActivity(), strings[0]);
}
@Override
protected void onPostExecute(Bitmap bitmap) {
imageView.setImageBitmap(bitmap);
viewSwitcher.showNext();
}
};
setImage.execute(imageId);
}
public class AddressReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
String message = intent.getStringExtra(CurrentAddressService.MESSAGE);
if (message != null && !"".equals(message)) {
Toast.makeText(getActivity(), message, Toast.LENGTH_SHORT).show();
}
List<android.location.Address> addressList = intent.getParcelableArrayListExtra(CurrentAddressService.ADDRESS);
if (addressList != null) {
final List<Address> addresses = new ArrayList<>();
for (android.location.Address address: addressList) {
for (int i = 0; i < address.getMaxAddressLineIndex(); i++) {
Log.i("Address line " + i, address.getAddressLine(i));
}
Address address1 = Address.fromAndroidAddress(address);
if (address1.getStreeAddress() != null && address1.getCity() != null) {
addresses.add(address1);
}
}
if (addresses.size() > 0) {
new MaterialDialog.Builder(getActivity())
.title(addresses.size() + " possible address" + (addresses.size() > 1 ? "es" : ""))
.adapter(new AddressListAdapter(addresses),
new MaterialDialog.ListCallback() {
@Override
public void onSelection(MaterialDialog materialDialog, View view, int i, CharSequence charSequence) {
setAddress(addresses.get(i));
materialDialog.dismiss();
}
})
.negativeText("cancel")
.show();
} else {
Toast.makeText(getActivity(), "No address found, Sorry!", Toast.LENGTH_SHORT).show();
}
}
}
}
private class AddressListAdapter extends BaseAdapter {
List<Address> addresses;
public AddressListAdapter(List<Address> addresses) {
this.addresses = addresses;
}
@Override
public int getCount() {
return addresses.size();
}
@Override
public Object getItem(int i) {
return addresses.get(i);
}
@Override
public long getItemId(int i) {
return i;
}
@Override
public View getView(int i, View view, ViewGroup viewGroup) {
if (view == null) {
LayoutInflater inflater = LayoutInflater.from(getActivity());
view = inflater.inflate(R.layout.list_empty, null);
}
TextView text = (TextView)view.findViewById(R.id.list_empty_message);
text.setText(addresses.get(i).getFullAddress());
return view;
}
}
public interface ProjectListener {
public void submitProject(Project project);
}
}
| |
package com.igormaznitsa.jbbp.mapper;
import static com.igormaznitsa.jbbp.mapper.JBBPMapper.MAKE_CLASS_INSTANCE_METHOD_NAME;
import com.igormaznitsa.jbbp.exceptions.JBBPMapperException;
import com.igormaznitsa.jbbp.io.JBBPBitNumber;
import com.igormaznitsa.jbbp.io.JBBPBitOrder;
import com.igormaznitsa.jbbp.model.JBBPAbstractArrayField;
import com.igormaznitsa.jbbp.model.JBBPAbstractField;
import com.igormaznitsa.jbbp.model.JBBPFieldArrayBit;
import com.igormaznitsa.jbbp.model.JBBPFieldArrayByte;
import com.igormaznitsa.jbbp.model.JBBPFieldArrayInt;
import com.igormaznitsa.jbbp.model.JBBPFieldArrayLong;
import com.igormaznitsa.jbbp.model.JBBPFieldArrayShort;
import com.igormaznitsa.jbbp.model.JBBPFieldArrayStruct;
import com.igormaznitsa.jbbp.model.JBBPFieldArrayUByte;
import com.igormaznitsa.jbbp.model.JBBPFieldArrayUShort;
import com.igormaznitsa.jbbp.model.JBBPFieldInt;
import com.igormaznitsa.jbbp.model.JBBPFieldLong;
import com.igormaznitsa.jbbp.model.JBBPFieldString;
import com.igormaznitsa.jbbp.model.JBBPFieldStruct;
import com.igormaznitsa.jbbp.model.JBBPNumericField;
import com.igormaznitsa.jbbp.utils.Function;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
public final class MappedFieldRecord implements Comparable<MappedFieldRecord> {
private static final Function<Class<?>, Object> STATIC_MAKE_CLASS_INSTANCE_INSTANTIATOR =
(Class<?> klazz) -> {
Class<?> currentClass = klazz;
Object result = null;
boolean find;
do {
try {
final Method method =
currentClass.getMethod(MAKE_CLASS_INSTANCE_METHOD_NAME, Class.class);
if (Modifier.isStatic(method.getModifiers())) {
result = method.invoke(null, klazz);
}
} catch (IllegalAccessException ex) {
throw new RuntimeException(String
.format("Can't get access to static method %s(%ss) in %s",
MAKE_CLASS_INSTANCE_METHOD_NAME, klazz, currentClass), ex);
} catch (InvocationTargetException ex) {
throw new RuntimeException(String
.format("Can't call static method %s(%s) in %s", MAKE_CLASS_INSTANCE_METHOD_NAME,
klazz, currentClass), ex);
} catch (NoSuchMethodException ex) {
// do nothing!
}
if (result == null) {
if (currentClass.isLocalClass()) {
currentClass = currentClass.getEnclosingClass();
find = currentClass != null;
} else {
find = false;
}
} else {
find = false;
}
} while (find);
return result;
};
private static final Function<Class<?>, Object> DEFAULT_CONSTRUCTOR_INSTANTIATOR =
(Class<?> aClass) -> {
try {
if (!aClass.isLocalClass() || Modifier.isStatic(aClass.getModifiers())) {
return aClass.getConstructor().newInstance();
} else {
return null;
}
} catch (NoSuchMethodException ex) {
return null;
} catch (InvocationTargetException ex) {
throw new RuntimeException(
String.format("Error during default constructor call, class %s", aClass), ex);
} catch (IllegalAccessException ex) {
throw new RuntimeException(
String.format("Can't get access to default constructor , class %s", aClass), ex);
} catch (InstantiationException ex) {
throw new RuntimeException(String.format("Can't make instance of class %s", aClass), ex);
}
};
private static final FieldProcessor PROC_ARRAYS =
(record, rootStructure, instance, customFieldProcessor, binField, flags, instantiators) -> {
if (binField instanceof JBBPAbstractArrayField) {
if (binField instanceof JBBPFieldArrayStruct) {
// structure
final JBBPFieldArrayStruct structArray = (JBBPFieldArrayStruct) binField;
final Class<?> componentType = record.mappingField.getType().getComponentType();
Object valueArray = getFieldValue(instance, record.getter, record.mappingField);
valueArray = valueArray == null ? Array.newInstance(componentType, structArray.size()) :
valueArray;
if (Array.getLength(valueArray) != structArray.size()) {
throw new JBBPMapperException(
"Can't map an array field for different expected size [" +
Array.getLength(valueArray) + "!=" + structArray.size() + ']', binField,
record.mappingClass, record.mappingField, null);
}
for (int i = 0; i < structArray.size(); i++) {
final Object curInstance = Array.get(valueArray, i);
if (curInstance == null) {
Array.set(valueArray, i, JBBPMapper.map(structArray.getElementAt(i),
tryMakeInstance(componentType, binField, instance, record.mappingField,
instantiators), customFieldProcessor, instantiators));
} else {
Array.set(valueArray, i,
JBBPMapper.map(structArray.getElementAt(i), curInstance, customFieldProcessor));
}
}
setFieldValue(instance, record.setter, record.mappingField, binField, valueArray);
} else {
// primitive
mapArrayField(instance, record.setter, record.mappingField,
(JBBPAbstractArrayField<?>) binField,
record.binAnnotation.bitOrder() == JBBPBitOrder.MSB0);
}
} else {
throw new JBBPMapperException("Can't map a non-array value to an array mapping field",
binField, record.mappingClass, record.mappingField, null);
}
};
private static final FieldProcessor PROC_NUM =
(record, rootStructure, instance, customFieldProcessor, binField, flags, instantiators) -> {
if (binField instanceof JBBPNumericField) {
mapNumericField(instance, record.setter, record.mappingField, (JBBPNumericField) binField,
record.binAnnotation.bitOrder() == JBBPBitOrder.MSB0);
} else if (binField instanceof JBBPFieldString) {
if (record.mappingField.getType().isPrimitive()) {
throw new JBBPMapperException("Can't map string to a primitive mapping field", binField,
record.mappingClass, record.mappingField, null);
} else {
setFieldValue(instance, record.setter, record.mappingField, binField,
((JBBPFieldString) binField).getAsString());
}
} else if (binField instanceof JBBPFieldStruct) {
if (record.mappingField.getType().isPrimitive()) {
throw new JBBPMapperException("Can't map structure to a primitive mapping field",
binField, record.mappingClass, record.mappingField, null);
} else {
final Object curValue = getFieldValue(instance, record.getter, record.mappingField);
if (curValue == null) {
if (record.instanceMaker == null) {
setFieldValue(instance, record.setter, record.mappingField, binField, JBBPMapper
.map((JBBPFieldStruct) binField,
tryMakeInstance(record.mappingField.getType(), binField, instance,
record.mappingField, instantiators), customFieldProcessor));
} else {
try {
JBBPMapper.map((JBBPFieldStruct) binField, record.instanceMaker.invoke(instance));
} catch (Exception ex) {
throw new JBBPMapperException(
"Can't map field which member generatet by instance", binField,
record.mappingClass, record.mappingField, ex);
}
}
} else {
setFieldValue(instance, record.setter, record.mappingField, binField,
JBBPMapper.map((JBBPFieldStruct) binField, curValue, customFieldProcessor));
}
}
} else {
boolean processed = false;
if (record.mappingField.getType() == String.class &&
binField instanceof JBBPAbstractArrayField) {
final String convertedValue =
convertFieldValueToString((JBBPAbstractArrayField<?>) binField);
if (convertedValue != null) {
setFieldValue(instance, record.setter, record.mappingField, binField, convertedValue);
processed = true;
}
}
if (!processed) {
throw new JBBPMapperException("Can't map a field for its value incompatibility",
binField, record.mappingClass, record.mappingField, null);
}
}
};
public final Field mappingField;
public final Class<?> mappingClass;
public final Method setter;
public final Method getter;
public final Method instanceMaker;
public final Bin binAnnotation;
public final boolean bitWideField;
public final String fieldName;
public final String fieldPath;
public final JBBPBitNumber mappedBitNumber;
public final BinType fieldType;
public final FieldProcessor proc;
MappedFieldRecord(final Field mappingField,
final Method instanceMaker,
final Method setter,
final Method getter,
final Class<?> mappingClass,
final Bin binAnnotation) {
this.instanceMaker = instanceMaker;
this.setter = setter;
this.getter = getter;
this.mappingField = mappingField;
this.mappingClass = mappingClass;
this.binAnnotation = binAnnotation;
this.mappedBitNumber = binAnnotation.bitNumber();
if (binAnnotation.type() == BinType.UNDEFINED) {
BinType thetype = BinType.findCompatible(mappingField.getType());
if (thetype == null) {
throw new IllegalStateException("Can't find compatible mapped type for field");
} else if (this.mappedBitNumber.getBitNumber() < 8 &&
!(thetype == BinType.STRUCT || thetype == BinType.STRUCT_ARRAY)) {
thetype = thetype.isArray() ? BinType.BIT_ARRAY : BinType.BIT;
}
this.fieldType = thetype;
} else {
this.fieldType = binAnnotation.type();
}
this.bitWideField = this.fieldType == BinType.BIT || fieldType == BinType.BIT_ARRAY;
this.fieldName =
binAnnotation.name().length() == 0 ? mappingField.getName() : binAnnotation.name();
this.fieldPath = binAnnotation.path();
if (this.mappingField.getType().isArray()) {
this.proc = PROC_ARRAYS;
} else {
this.proc = PROC_NUM;
}
}
/**
* Map a parsed array to an array field in mapping class.
*
* @param mappingClassInstance a mapping class instance, must not be null
* @param setter detected setter for the field, can be null
* @param mappingField a field in the mapping class to be set, must not be
* null
* @param arrayField a binary parsed array field, must not be null
* @param invertBitOrder flag shows that values of an array must be bit
* reversed before set
*/
private static void mapArrayField(final Object mappingClassInstance, final Method setter,
final Field mappingField,
final JBBPAbstractArrayField<?> arrayField,
final boolean invertBitOrder) {
try {
final Object value;
if (arrayField instanceof JBBPFieldArrayLong &&
mappingField.getType().getComponentType() == double.class) {
final long[] longarray = (long[]) arrayField.getValueArrayAsObject(invertBitOrder);
final double[] doublearray = new double[longarray.length];
for (int i = 0; i < longarray.length; i++) {
doublearray[i] = Double.longBitsToDouble(longarray[i]);
}
value = doublearray;
} else if (arrayField instanceof JBBPFieldArrayInt &&
mappingField.getType().getComponentType() == float.class) {
final int[] intarray = (int[]) arrayField.getValueArrayAsObject(invertBitOrder);
final float[] floatarray = new float[intarray.length];
for (int i = 0; i < intarray.length; i++) {
floatarray[i] = Float.intBitsToFloat(intarray[i]);
}
value = floatarray;
} else if (arrayField instanceof JBBPFieldArrayUShort &&
mappingField.getType().getComponentType() == char.class) {
final short[] shortarray = (short[]) arrayField.getValueArrayAsObject(invertBitOrder);
final char[] chararray = new char[shortarray.length];
for (int i = 0; i < shortarray.length; i++) {
chararray[i] = (char) shortarray[i];
}
value = chararray;
} else {
value = arrayField.getValueArrayAsObject(invertBitOrder);
}
if (setter == null) {
mappingField.set(mappingClassInstance, value);
} else {
setter.invoke(mappingClassInstance, value);
}
} catch (IllegalAccessException ex) {
throw new JBBPMapperException("Can't get access to a mapping field", arrayField,
mappingClassInstance.getClass(), mappingField, ex);
} catch (IllegalArgumentException ex) {
throw new JBBPMapperException("Can't set argument to a mapping field", arrayField,
mappingClassInstance.getClass(), mappingField, ex);
} catch (InvocationTargetException ex) {
throw new JBBPMapperException("Can't set argument to field through setter", arrayField,
mappingClassInstance.getClass(), mappingField, ex);
}
}
/**
* Convert an array field into its string representation.
*
* @param field an array field to be converted, must not be null
* @return the string representation of the array or null if the field can't
* be converted
*/
private static String convertFieldValueToString(final JBBPAbstractArrayField<?> field) {
final StringBuilder result;
if (field instanceof JBBPFieldArrayBit) {
final JBBPFieldArrayBit array = (JBBPFieldArrayBit) field;
result = new StringBuilder(array.size());
for (final byte b : array.getArray()) {
result.append((char) (b & 0xFF));
}
} else if (field instanceof JBBPFieldArrayByte) {
final JBBPFieldArrayByte array = (JBBPFieldArrayByte) field;
result = new StringBuilder(array.size());
for (final byte b : array.getArray()) {
result.append((char) (b & 0xFF));
}
} else if (field instanceof JBBPFieldArrayUByte) {
final JBBPFieldArrayUByte array = (JBBPFieldArrayUByte) field;
result = new StringBuilder(array.size());
for (final byte b : array.getArray()) {
result.append((char) (b & 0xFF));
}
} else if (field instanceof JBBPFieldArrayShort) {
final JBBPFieldArrayShort array = (JBBPFieldArrayShort) field;
result = new StringBuilder(array.size());
for (final short b : array.getArray()) {
result.append((char) b);
}
} else if (field instanceof JBBPFieldArrayUShort) {
final JBBPFieldArrayUShort array = (JBBPFieldArrayUShort) field;
result = new StringBuilder(array.size());
for (final short b : array.getArray()) {
result.append((char) b);
}
} else {
result = null;
}
return result == null ? null : result.toString();
}
/**
* Map a parsed primitive numeric field to a primitive field in a mapping
* class.
*
* @param mappingClassInstance the mapping class instance, must not be null
* @param setter detected setter for field, can be null
* @param mappingField a mapping field to set the value, must not be null
* @param numericField a parsed numeric field which value should be used, must
* not be null
* @param invertBitOrder flag shows that the parsed numeric field value must
* be reversed in its bit before setting
*/
private static void mapNumericField(final Object mappingClassInstance, final Method setter,
final Field mappingField, final JBBPNumericField numericField,
final boolean invertBitOrder) {
final Class<?> fieldClass = mappingField.getType();
try {
if (fieldClass == byte.class) {
final byte value = (byte) (invertBitOrder ? numericField.getAsInvertedBitOrder() :
numericField.getAsInt());
if (setter == null) {
mappingField.setByte(mappingClassInstance, value);
} else {
setter.invoke(mappingClassInstance, value);
}
} else if (fieldClass == boolean.class) {
if (setter == null) {
mappingField.setBoolean(mappingClassInstance, numericField.getAsBool());
} else {
setter.invoke(mappingClassInstance, numericField.getAsBool());
}
} else if (fieldClass == char.class) {
final char value = (char) (invertBitOrder ? numericField.getAsInvertedBitOrder() :
numericField.getAsInt());
if (setter == null) {
mappingField.setChar(mappingClassInstance, value);
} else {
setter.invoke(mappingClassInstance, value);
}
} else if (fieldClass == short.class) {
final short value = (short) (invertBitOrder ? numericField.getAsInvertedBitOrder() :
numericField.getAsInt());
if (setter == null) {
mappingField.setShort(mappingClassInstance, value);
} else {
setter.invoke(mappingClassInstance, value);
}
} else if (fieldClass == int.class) {
final int value =
(int) (invertBitOrder ? numericField.getAsInvertedBitOrder() : numericField.getAsInt());
if (setter == null) {
mappingField.setInt(mappingClassInstance, value);
} else {
setter.invoke(mappingClassInstance, value);
}
} else if (fieldClass == long.class) {
final long value =
(invertBitOrder ? numericField.getAsInvertedBitOrder() : numericField.getAsLong());
if (setter == null) {
mappingField.setLong(mappingClassInstance, value);
} else {
setter.invoke(mappingClassInstance, value);
}
} else if (fieldClass == float.class) {
final float value;
if (numericField instanceof JBBPFieldInt) {
value =
invertBitOrder ? Float.intBitsToFloat((int) numericField.getAsInvertedBitOrder()) :
Float.intBitsToFloat(numericField.getAsInt());
} else {
value =
invertBitOrder ? Float.intBitsToFloat((int) numericField.getAsInvertedBitOrder()) :
numericField.getAsFloat();
}
if (setter == null) {
mappingField.setFloat(mappingClassInstance, value);
} else {
setter.invoke(mappingClassInstance, value);
}
} else if (fieldClass == double.class) {
final double value;
if (numericField instanceof JBBPFieldLong) {
value = invertBitOrder ? Double.longBitsToDouble(numericField.getAsInvertedBitOrder()) :
Double.longBitsToDouble(numericField.getAsLong());
} else {
value = invertBitOrder ? Double.longBitsToDouble(numericField.getAsInvertedBitOrder()) :
numericField.getAsDouble();
}
if (setter == null) {
mappingField.setDouble(mappingClassInstance, value);
} else {
setter.invoke(mappingClassInstance, value);
}
} else {
throw new JBBPMapperException(
"Unsupported mapping class field type to be mapped for binary parsed data",
(JBBPAbstractField) numericField, mappingClassInstance.getClass(), mappingField, null);
}
} catch (IllegalAccessException ex) {
throw new JBBPMapperException("Can't get access to a mapping field",
(JBBPAbstractField) numericField, mappingClassInstance.getClass(), mappingField, ex);
} catch (IllegalArgumentException ex) {
throw new JBBPMapperException("Can't set argument to a mapping field",
(JBBPAbstractField) numericField, mappingClassInstance.getClass(), mappingField, ex);
} catch (InvocationTargetException ex) {
throw new JBBPMapperException("Can't set argument to a mapping field through setter",
(JBBPAbstractField) numericField, mappingClassInstance.getClass(), mappingField, ex);
}
}
/**
* Get a value of a field from a class instance.
*
* @param classInstance a class instance object
* @param getter method to get field value, can be null
* @param classField a class field which value must be returned, must not be
* null
* @return the field value for the class instance
*/
private static Object getFieldValue(final Object classInstance, final Method getter,
final Field classField) {
try {
if (getter == null) {
return classField.get(classInstance);
} else {
return getter.invoke(classInstance);
}
} catch (IllegalArgumentException ex) {
throw new JBBPMapperException("Can't set get value from a mapping field", null,
classInstance.getClass(), classField, ex);
} catch (IllegalAccessException ex) {
throw new JBBPMapperException("Can't get access to a mapping field", null,
classInstance.getClass(), classField, ex);
} catch (InvocationTargetException ex) {
throw new JBBPMapperException("Can't get field value through getter", null,
classInstance.getClass(), classField, ex);
}
}
/**
* Set a value to a field of a class instance. Can't be used for static
* fields!
*
* @param classInstance a class instance
* @param setter setter for the field, can be null
* @param classField a mapping class field which should be set by the value,
* must not be null
* @param binField a parsed bin field which value will be set, can be null
* @param value a value to be set to the class field
*/
static void setFieldValue(final Object classInstance, final Method setter, final Field classField,
final JBBPAbstractField binField, final Object value) {
try {
if (setter == null) {
classField.set(classInstance, value);
} else {
setter.invoke(classInstance, value);
}
} catch (IllegalArgumentException ex) {
throw new JBBPMapperException("Can't set value to a mapping field", binField,
classInstance.getClass(), classField, ex);
} catch (IllegalAccessException ex) {
throw new JBBPMapperException("Can't get access to a mapping field", binField,
classInstance.getClass(), classField, ex);
} catch (InvocationTargetException ex) {
throw new JBBPMapperException("Can't set field value through setter", binField,
classInstance.getClass(), classField, ex);
}
}
@SuppressWarnings("TryWithIdenticalCatches")
private static <T> T tryMakeInstance(
final Class<T> type,
final JBBPAbstractField binField,
final Object mappingObject,
final Field mappingField,
final Function<Class<?>, Object>[] instantiators
) {
T result = null;
for (final Function<Class<?>, Object> instantiator : instantiators) {
result = type.cast(instantiator.apply(type));
if (result != null) {
break;
}
}
if (result == null) {
Exception detectedException = null;
try {
final Method method =
mappingObject.getClass().getMethod(MAKE_CLASS_INSTANCE_METHOD_NAME, Class.class);
if (!Modifier.isStatic(method.getModifiers())) {
result = type.cast(
mappingObject.getClass().getMethod(MAKE_CLASS_INSTANCE_METHOD_NAME, Class.class)
.invoke(mappingObject, type));
}
} catch (NoSuchMethodException ex) {
// do nothing
} catch (IllegalAccessException ex) {
// WARNING! Don't replace by multicatch for Android compatibility!
detectedException = ex;
} catch (InvocationTargetException ex) {
detectedException = ex;
}
if (detectedException != null) {
throw new RuntimeException(String
.format("Error during %s(%s) call", MAKE_CLASS_INSTANCE_METHOD_NAME,
mappingObject.getClass()), detectedException);
}
if (result == null) {
result = type.cast(STATIC_MAKE_CLASS_INSTANCE_INSTANTIATOR.apply(type));
if (result == null) {
result = type.cast(DEFAULT_CONSTRUCTOR_INSTANTIATOR.apply(type));
}
}
if (result == null) {
throw new JBBPMapperException(String.format("Can't create instance of %s", type), binField,
mappingObject.getClass(), mappingField, null);
}
}
return result;
}
@Override
public int compareTo(final MappedFieldRecord o) {
final int thisOrder = this.binAnnotation.order();
final int thatOrder = o.binAnnotation.order();
final int result;
if (thisOrder == thatOrder) {
result = this.mappingField.getName().compareTo(o.mappingField.getName());
} else {
result = thisOrder < thatOrder ? -1 : 1;
}
return result;
}
interface FieldProcessor {
@SuppressWarnings("unchecked")
void apply(
MappedFieldRecord record,
JBBPFieldStruct rootStructure,
Object instance,
JBBPMapperCustomFieldProcessor customFieldProcessor,
JBBPAbstractField binField,
int flags,
Function<Class<?>, Object>... instantiators
);
}
}
| |
/*
* Copyright 2010 sasc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sasc.smartcard.app.conax;
import java.util.Arrays;
import java.util.List;
import sasc.emv.EMVUtil;
import sasc.smartcard.common.AtrHandler;
import sasc.smartcard.common.SmartCard;
import sasc.terminal.CardConnection;
import sasc.terminal.CardResponse;
import sasc.terminal.TerminalException;
import sasc.util.Log;
import sasc.util.Util;
/**
*
* @author sasc
*/
public class ConaxSession implements AtrHandler {
//3B 24 00 80 72 A4 45
private static final List<String> ATR_PATTERNS = Arrays.asList("3B 24 00 .. .. .. 45", "3B 24 00 30 42 30 30", "3B 34 00 00 30 42 30 30", "3B 34 94 00 30 42 30 30");
public static AtrHandler getAtrHandler() {
return new ConaxSession();
}
@Override
public List<String> getAtrPatterns() {
return ATR_PATTERNS;
}
@Override
public boolean process(SmartCard card, CardConnection terminal) throws TerminalException {
Log.debug("Found Conax Pay TV Card ATR");
byte SW1;
byte SW2;
String command;
CardResponse response;
//Init card
Log.commandHeader("Send Conax command 'Init card'");
command = "DD 26 00 00 03 10 01 01 00"; //with Le
response = EMVUtil.sendCmdNoParse(terminal, command);
SW1 = response.getSW1();
SW2 = response.getSW2();
if (SW1 == (byte) 0x98) {
}
//Get init card response
Log.commandHeader("Send Conax command 'Get response'");
command = "DD CA 00 00 "+Util.byte2Hex(response.getSW2()); //with Le
response = EMVUtil.sendCmdNoParse(terminal, command);
SW1 = response.getSW1();
SW2 = response.getSW2();
//Init EMM
Log.commandHeader("Send Conax command 'Init EMM'");
command = "DD 82 00 00 14 11 12 01 B0 0F FF FF DD 00 00 09 04 0B 00 E0 30 1B 64 3D FE 00"; //with Le
response = EMVUtil.sendCmdNoParse(terminal, command);
SW1 = response.getSW1();
SW2 = response.getSW2();
//Get init EMM response
Log.commandHeader("Send Conax command 'Get response'");
command = "DD CA 00 00 "+Util.byte2Hex(response.getSW2()); //with Le
response = EMVUtil.sendCmdNoParse(terminal, command);
SW1 = response.getSW1();
SW2 = response.getSW2();
byte[] data = response.getData();
if(data.length == 0x1a){
long cardNumber = Util.byteArrayToLong(data, 13, 4);
String cardNumberStr = String.valueOf(cardNumber);
while(cardNumberStr.length() < 11){
cardNumberStr = "0"+cardNumberStr;
}
StringBuilder sb = new StringBuilder();
sb.append(cardNumberStr.substring(0, 3));
sb.append(" ");
sb.append(cardNumberStr.substring(3, 7));
sb.append(" ");
sb.append(cardNumberStr.substring(7, 11));
sb.append("-X");
Log.info("Card Number: " + sb.toString());
}
//Menu Request
Log.commandHeader("Send Conax command 'Get Menu'");
command = "DD B2 00 00 03 15 01 AA 00"; //with Le
response = EMVUtil.sendCmdNoParse(terminal, command);
SW1 = response.getSW1();
SW2 = response.getSW2();
while(Util.isBitSet(SW1, 4)){
//Get Menu response
Log.commandHeader("Send Conax command 'Get response'");
command = "DD CA 00 00 "+Util.byte2Hex(response.getSW2()); //with Le
response = EMVUtil.sendCmdNoParse(terminal, command);
SW1 = response.getSW1();
SW2 = response.getSW2();
}
//Crypt (Get Unique Address)
Log.commandHeader("Send Conax command 'Get Unique Address'");
command = "DD C2 00 00 02 66 00 00"; //with Le
response = EMVUtil.sendCmdNoParse(terminal, command);
SW1 = response.getSW1();
SW2 = response.getSW2();
//Get Crypt response
Log.commandHeader("Send Conax command 'Get response'");
command = "DD CA 00 00 "+Util.byte2Hex(response.getSW2()); //with Le
response = EMVUtil.sendCmdNoParse(terminal, command);
SW1 = response.getSW1();
SW2 = response.getSW2();
//Get Return Channel Details
Log.commandHeader("Send Conax command 'Get Return Channel Details'");
command = "DD C4 00 00 02 1B 00 00"; //with Le
response = EMVUtil.sendCmdNoParse(terminal, command);
SW1 = response.getSW1();
SW2 = response.getSW2();
while(Util.isBitSet(SW1, 4)){ //0x9c || 0x98
//Get Subscription info response
Log.commandHeader("Send Conax command 'Get response'");
command = "DD CA 00 00 "+Util.byte2Hex(response.getSW2()); //with Le
response = EMVUtil.sendCmdNoParse(terminal, command);
SW1 = response.getSW1();
SW2 = response.getSW2();
}
//Get Subscription info
Log.commandHeader("Send Conax command 'Get Subscription Info'");
command = "DD C6 00 00 03 1C 01 00 00"; //with Le
response = EMVUtil.sendCmdNoParse(terminal, command);
SW1 = response.getSW1();
SW2 = response.getSW2();
while(Util.isBitSet(SW1, 4)){ //0x9c || 0x98
//Get Subscription info response
Log.commandHeader("Send Conax command 'Get response'");
command = "DD CA 00 00 "+Util.byte2Hex(response.getSW2()); //with Le
response = EMVUtil.sendCmdNoParse(terminal, command);
SW1 = response.getSW1();
SW2 = response.getSW2();
}
return true; //Handle exclusively
}
public static void main(String[] args) throws Exception {
// CardConnection cardConnection = TerminalUtil.connect(TerminalUtil.State.CARD_INSERTED);
//
// ConaxSession.start(cardConnection);
System.out.println(parseDate((byte)0x21, (byte)0x15));
System.out.println(parseDateToString((byte)0x21, (byte)0x15));
}
/**
* returns the date formatted as 'yyyymmdd'
*/
private static int parseDate(byte data0, byte data1){
int y;
int m;
int d;
int l;
y= 1990+ ((data1>>>4) + ((data0>>>5)&0x7)*10);
m= data1&0xf;
d= data0&0x1f;
l= (y*100+m)*100+d;
return l;
}
private static String parseDateToString(byte data0, byte data1){
int y;
int m;
int d;
String s;
y= 1990+ ((data1>>>4) + ((data0>>>5)&0x7)*10);
m= data1&0xf;
d= data0&0x1f;
// l= (y*100+m)*100+d;
return y+"-"+(m<10?"0"+m:m)+"-"+(d<10?"0"+d:d);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode.fsdataset;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
import org.apache.hadoop.hdfs.protocol.BlockLocalPathInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsBlocksMetadata;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DataStorage;
import org.apache.hadoop.hdfs.server.datanode.FinalizedReplica;
import org.apache.hadoop.hdfs.server.datanode.Replica;
import org.apache.hadoop.hdfs.server.datanode.ReplicaInPipelineInterface;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetFactory;
import org.apache.hadoop.hdfs.server.datanode.metrics.FSDatasetMBean;
import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock;
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
import org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo;
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
import org.apache.hadoop.util.DiskChecker.DiskErrorException;
import org.apache.hadoop.util.ReflectionUtils;
/**
* This is a service provider interface for the underlying storage that
* stores replicas for a data node.
* The default implementation stores replicas on local drives.
*/
@InterfaceAudience.Private
public interface FsDatasetSpi<V extends FsVolumeSpi> extends FSDatasetMBean {
/**
* A factory for creating {@link FsDatasetSpi} objects.
*/
public static abstract class Factory<D extends FsDatasetSpi<?>> {
/** @return the configured factory. */
public static Factory<?> getFactory(Configuration conf) {
@SuppressWarnings("rawtypes")
final Class<? extends Factory> clazz = conf.getClass(
DFSConfigKeys.DFS_DATANODE_FSDATASET_FACTORY_KEY,
FsDatasetFactory.class,
Factory.class);
return ReflectionUtils.newInstance(clazz, conf);
}
/** Create a new object. */
public abstract D newInstance(DataNode datanode, DataStorage storage,
Configuration conf) throws IOException;
/** Does the factory create simulated objects? */
public boolean isSimulated() {
return false;
}
}
/**
* Create rolling logs.
*
* @param prefix the prefix of the log names.
* @return rolling logs
*/
public RollingLogs createRollingLogs(String bpid, String prefix
) throws IOException;
/** @return a list of volumes. */
public List<V> getVolumes();
/** @return a storage with the given storage ID */
public DatanodeStorage getStorage(final String storageUuid);
/** @return one or more storage reports for attached volumes. */
public StorageReport[] getStorageReports(String bpid)
throws IOException;
/** @return the volume that contains a replica of the block. */
public V getVolume(ExtendedBlock b);
/** @return a volume information map (name => info). */
public Map<String, Object> getVolumeInfoMap();
/** @return a list of finalized blocks for the given block pool. */
public List<FinalizedReplica> getFinalizedBlocks(String bpid);
/**
* Check whether the in-memory block record matches the block on the disk,
* and, in case that they are not matched, update the record or mark it
* as corrupted.
*/
public void checkAndUpdate(String bpid, long blockId, File diskFile,
File diskMetaFile, FsVolumeSpi vol);
/**
* @param b - the block
* @return a stream if the meta-data of the block exists;
* otherwise, return null.
* @throws IOException
*/
public LengthInputStream getMetaDataInputStream(ExtendedBlock b
) throws IOException;
/**
* Returns the specified block's on-disk length (excluding metadata)
* @return the specified block's on-disk length (excluding metadta)
* @throws IOException on error
*/
public long getLength(ExtendedBlock b) throws IOException;
/**
* Get reference to the replica meta info in the replicasMap.
* To be called from methods that are synchronized on {@link FSDataset}
* @return replica from the replicas map
*/
@Deprecated
public Replica getReplica(String bpid, long blockId);
/**
* @return replica meta information
*/
public String getReplicaString(String bpid, long blockId);
/**
* @return the generation stamp stored with the block.
*/
public Block getStoredBlock(String bpid, long blkid) throws IOException;
/**
* Returns an input stream at specified offset of the specified block
* @param b block
* @param seekOffset offset with in the block to seek to
* @return an input stream to read the contents of the specified block,
* starting at the offset
* @throws IOException
*/
public InputStream getBlockInputStream(ExtendedBlock b, long seekOffset)
throws IOException;
/**
* Returns an input stream at specified offset of the specified block
* The block is still in the tmp directory and is not finalized
* @return an input stream to read the contents of the specified block,
* starting at the offset
* @throws IOException
*/
public ReplicaInputStreams getTmpInputStreams(ExtendedBlock b, long blkoff,
long ckoff) throws IOException;
/**
* Creates a temporary replica and returns the meta information of the replica
*
* @param b block
* @return the meta info of the replica which is being written to
* @throws IOException if an error occurs
*/
public ReplicaInPipelineInterface createTemporary(ExtendedBlock b
) throws IOException;
/**
* Creates a RBW replica and returns the meta info of the replica
*
* @param b block
* @return the meta info of the replica which is being written to
* @throws IOException if an error occurs
*/
public ReplicaInPipelineInterface createRbw(ExtendedBlock b
) throws IOException;
/**
* Recovers a RBW replica and returns the meta info of the replica
*
* @param b block
* @param newGS the new generation stamp for the replica
* @param minBytesRcvd the minimum number of bytes that the replica could have
* @param maxBytesRcvd the maximum number of bytes that the replica could have
* @return the meta info of the replica which is being written to
* @throws IOException if an error occurs
*/
public ReplicaInPipelineInterface recoverRbw(ExtendedBlock b,
long newGS, long minBytesRcvd, long maxBytesRcvd) throws IOException;
/**
* Covert a temporary replica to a RBW.
* @param temporary the temporary replica being converted
* @return the result RBW
*/
public ReplicaInPipelineInterface convertTemporaryToRbw(
ExtendedBlock temporary) throws IOException;
/**
* Append to a finalized replica and returns the meta info of the replica
*
* @param b block
* @param newGS the new generation stamp for the replica
* @param expectedBlockLen the number of bytes the replica is expected to have
* @return the meata info of the replica which is being written to
* @throws IOException
*/
public ReplicaInPipelineInterface append(ExtendedBlock b, long newGS,
long expectedBlockLen) throws IOException;
/**
* Recover a failed append to a finalized replica
* and returns the meta info of the replica
*
* @param b block
* @param newGS the new generation stamp for the replica
* @param expectedBlockLen the number of bytes the replica is expected to have
* @return the meta info of the replica which is being written to
* @throws IOException
*/
public ReplicaInPipelineInterface recoverAppend(ExtendedBlock b, long newGS,
long expectedBlockLen) throws IOException;
/**
* Recover a failed pipeline close
* It bumps the replica's generation stamp and finalize it if RBW replica
*
* @param b block
* @param newGS the new generation stamp for the replica
* @param expectedBlockLen the number of bytes the replica is expected to have
* @return the storage uuid of the replica.
* @throws IOException
*/
public String recoverClose(ExtendedBlock b, long newGS, long expectedBlockLen
) throws IOException;
/**
* Finalizes the block previously opened for writing using writeToBlock.
* The block size is what is in the parameter b and it must match the amount
* of data written
* @throws IOException
*/
public void finalizeBlock(ExtendedBlock b) throws IOException;
/**
* Unfinalizes the block previously opened for writing using writeToBlock.
* The temporary file associated with this block is deleted.
* @throws IOException
*/
public void unfinalizeBlock(ExtendedBlock b) throws IOException;
/**
* Returns one block report per volume.
* @param bpid Block Pool Id
* @return - a map of DatanodeStorage to block report for the volume.
*/
public Map<DatanodeStorage, BlockListAsLongs> getBlockReports(String bpid);
/**
* Returns the cache report - the full list of cached block IDs of a
* block pool.
* @param bpid Block Pool Id
* @return the cache report - the full list of cached block IDs.
*/
public List<Long> getCacheReport(String bpid);
/** Does the dataset contain the block? */
public boolean contains(ExtendedBlock block);
/**
* Is the block valid?
* @return - true if the specified block is valid
*/
public boolean isValidBlock(ExtendedBlock b);
/**
* Is the block a valid RBW?
* @return - true if the specified block is a valid RBW
*/
public boolean isValidRbw(ExtendedBlock b);
/**
* Invalidates the specified blocks
* @param bpid Block pool Id
* @param invalidBlks - the blocks to be invalidated
* @throws IOException
*/
public void invalidate(String bpid, Block invalidBlks[]) throws IOException;
/**
* Caches the specified blocks
* @param bpid Block pool id
* @param blockIds - block ids to cache
*/
public void cache(String bpid, long[] blockIds);
/**
* Uncaches the specified blocks
* @param bpid Block pool id
* @param blockIds - blocks ids to uncache
*/
public void uncache(String bpid, long[] blockIds);
/**
* Determine if the specified block is cached.
* @param bpid Block pool id
* @param blockIds - block id
* @return true if the block is cached
*/
public boolean isCached(String bpid, long blockId);
/**
* Check if all the data directories are healthy
* @throws DiskErrorException
*/
public void checkDataDir() throws DiskErrorException;
/**
* Shutdown the FSDataset
*/
public void shutdown();
/**
* Sets the file pointer of the checksum stream so that the last checksum
* will be overwritten
* @param b block
* @param outs The streams for the data file and checksum file
* @param checksumSize number of bytes each checksum has
* @throws IOException
*/
public void adjustCrcChannelPosition(ExtendedBlock b,
ReplicaOutputStreams outs, int checksumSize) throws IOException;
/**
* Checks how many valid storage volumes there are in the DataNode.
* @return true if more than the minimum number of valid volumes are left
* in the FSDataSet.
*/
public boolean hasEnoughResource();
/**
* Get visible length of the specified replica.
*/
long getReplicaVisibleLength(final ExtendedBlock block) throws IOException;
/**
* Initialize a replica recovery.
* @return actual state of the replica on this data-node or
* null if data-node does not have the replica.
*/
public ReplicaRecoveryInfo initReplicaRecovery(RecoveringBlock rBlock
) throws IOException;
/**
* Update replica's generation stamp and length and finalize it.
* @return the ID of storage that stores the block
*/
public String updateReplicaUnderRecovery(ExtendedBlock oldBlock,
long recoveryId, long newLength) throws IOException;
/**
* add new block pool ID
* @param bpid Block pool Id
* @param conf Configuration
*/
public void addBlockPool(String bpid, Configuration conf) throws IOException;
/**
* Shutdown and remove the block pool from underlying storage.
* @param bpid Block pool Id to be removed
*/
public void shutdownBlockPool(String bpid) ;
/**
* Deletes the block pool directories. If force is false, directories are
* deleted only if no block files exist for the block pool. If force
* is true entire directory for the blockpool is deleted along with its
* contents.
* @param bpid BlockPool Id to be deleted.
* @param force If force is false, directories are deleted only if no
* block files exist for the block pool, otherwise entire
* directory for the blockpool is deleted along with its contents.
* @throws IOException
*/
public void deleteBlockPool(String bpid, boolean force) throws IOException;
/**
* Get {@link BlockLocalPathInfo} for the given block.
*/
public BlockLocalPathInfo getBlockLocalPathInfo(ExtendedBlock b
) throws IOException;
/**
* Get a {@link HdfsBlocksMetadata} corresponding to the list of blocks in
* <code>blocks</code>.
*
* @param bpid pool to query
* @param blockIds List of block ids for which to return metadata
* @return metadata Metadata for the list of blocks
* @throws IOException
*/
public HdfsBlocksMetadata getHdfsBlocksMetadata(String bpid,
long[] blockIds) throws IOException;
/**
* Enable 'trash' for the given dataset. When trash is enabled, files are
* moved to a separate trash directory instead of being deleted immediately.
* This can be useful for example during rolling upgrades.
*/
public void enableTrash(String bpid);
/**
* Restore trash
*/
public void restoreTrash(String bpid);
/**
* @return true when trash is enabled
*/
public boolean trashEnabled(String bpid);
}
| |
/*
* Copyright 2014 - 2017 Yannick Watier
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ca.watier.binclassreader.reader;
import ca.watier.binclassreader.abstracts.AbstractPoolData;
import ca.watier.binclassreader.abstracts.Readable;
import ca.watier.binclassreader.annotations.BinClassParser;
import ca.watier.binclassreader.annotations.PoolDataOptions;
import ca.watier.binclassreader.annotations.PoolItemIndex;
import ca.watier.binclassreader.enums.CollectionTypeEnum;
import ca.watier.binclassreader.enums.PoolTypeEnum;
import ca.watier.binclassreader.parsers.*;
import ca.watier.binclassreader.pojos.FieldPojo;
import ca.watier.binclassreader.structs.ConstAttributeInfo;
import ca.watier.binclassreader.structs.ConstClassInfo;
import ca.watier.binclassreader.structs.ConstFieldInfo;
import ca.watier.binclassreader.structs.ConstMethodInfo;
import ca.watier.binclassreader.tree.Tree;
import ca.watier.binclassreader.tree.TreeElement;
import ca.watier.binclassreader.utils.BaseUtils;
import ca.watier.defassert.Assert;
import ca.watier.multiarraymap.MultiArrayMap;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* Created by Yannick on 1/25/2016.
*/
@PoolDataOptions(storageType = CollectionTypeEnum.NONE)
public class ClassReader extends AbstractPoolData {
private Map<Short, FieldPojo> fieldSorter;
private InputStream classData;
private Map<Class<?>, Object> sections;
private Map<Integer, Object> constPool;
private MultiArrayMap<PoolTypeEnum, Object> pool;
private Object lastReadStruct;
public ClassReader(InputStream classData, Class<?>... classSections) {
Assert.assertNotEmpty(classSections);
Assert.assertNotNull(classData);
this.classData = classData;
fieldSorter = new TreeMap<Short, FieldPojo>();
sections = this.initSections(BaseUtils.createNewArrayOfObject(classSections));
pool = generateTree();
}
public ClassReader() {
fieldSorter = new TreeMap<Short, FieldPojo>();
}
public Map<Class<?>, Object> initSections(Object... type) {
Map<Class<?>, Object> values = new HashMap<Class<?>, Object>();
if (type != null) {
for (Object obj : type) {
if (obj == null) {
continue;
}
values.put(obj.getClass(), read(obj));
}
}
return values;
}
public void overwriteStreamWithBytes(byte[] bytes) {
if (bytes == null || bytes.length == 0) {
return;
}
this.classData = new ByteArrayInputStream(bytes);
}
/**
* @param obj
* @param values - This parameter is only used when there's a manual value (set via {@link BinClassParser#manualMode()}))
* @return
*/
public <T> T read(T obj, byte[]... values) {
fieldSorter.clear();
if (obj == null) {
return null;
}
lastReadStruct = obj;
Class<?> currentClass = obj.getClass();
//Fetch all readable fields (from child to top parent)
while (currentClass != null) {
for (Field field : currentClass.getDeclaredFields()) {
field.setAccessible(true);
for (Annotation annotation : field.getDeclaredAnnotations()) {
if (annotation instanceof BinClassParser) {
BinClassParser binClassParser = (BinClassParser) annotation;
fieldSorter.put(binClassParser.readOrder(), new FieldPojo(field, binClassParser.byteToRead(), binClassParser.manualMode()));
}
}
}
currentClass = currentClass.getSuperclass();
}
if (!fieldSorter.isEmpty()) { //Sort the fields
short manualInx = 0;
for (FieldPojo value : fieldSorter.values()) {
Field fieldToWrite = value.getFieldToWrite();
boolean isManual = value.isManualMode();
if (isManual && values == null || isManual && values.length == 0) {
continue;
}
fieldToWrite.setAccessible(true);
try {
byte nbByteToRead = value.getNbByteToRead();
byte[] bytes = (isManual) ? values[manualInx++] : readFromCurrentStream(nbByteToRead);
fieldToWrite.set(obj, bytes);
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
if (obj instanceof Readable) {
Readable readable = (Readable) obj;
if (readable.isEventsEnabled()) {
readable.afterFieldsInitialized(this);
}
}
}
return obj;
}
public Object getLastReadStruct() {
return lastReadStruct;
}
/**
* @param nbByteToRead
* @return An array of positives bytes (as short). The values will be -1 for the overflowing values.
* @throws IOException
*/
public byte[] readFromCurrentStream(int nbByteToRead) throws IOException {
if (nbByteToRead <= 0) {
return null;
}
byte[] buffer = new byte[nbByteToRead];
classData.read(buffer);
return buffer;
}
public MultiArrayMap<PoolTypeEnum, Object> generateTree() {
constPool = getPoolByClass(PoolParser.class);
if (constPool == null || constPool.isEmpty()) {
return null;
}
MultiArrayMap<PoolTypeEnum, Object> values = new MultiArrayMap<PoolTypeEnum, Object>();
iterateOnPool((List<Object>) getPoolByClass(InterfacesParser.class), ConstClassInfo.class, PoolTypeEnum.INTERFACE, values);
iterateOnPool((List<Object>) getPoolByClass(FieldsParser.class), ConstFieldInfo.class, PoolTypeEnum.FIELD, values);
iterateOnPool((List<Object>) getPoolByClass(MethodsParser.class), ConstMethodInfo.class, PoolTypeEnum.METHOD, values);
iterateOnPool((List<Object>) getPoolByClass(AttributesParser.class), ConstAttributeInfo.class, PoolTypeEnum.ATTRIBUTE, values);
return values;
}
private void iterateOnPool(List<Object> interfacePool, Class<?> mustBeOfType, PoolTypeEnum poolTypeEnum, MultiArrayMap<PoolTypeEnum, Object> data) {
if (interfacePool != null) {
for (Object interfaceObj : interfacePool) {
if (interfaceObj != null && interfaceObj.getClass().equals(mustBeOfType)) {
TreeElement rootTreeElement = new TreeElement(interfaceObj);
Tree tree = buildTree(poolTypeEnum, new Tree(rootTreeElement), rootTreeElement);
data.put(poolTypeEnum, tree);
if (interfaceObj instanceof Readable) {
Readable readable = (Readable) interfaceObj;
if (readable.isEventsEnabled()) {
readable.afterTreeIsBuilt(tree);
}
}
}
}
}
}
private Tree buildTree(PoolTypeEnum poolTypeEnum, Tree tree, TreeElement currentTreeElement) {
if (tree == null || currentTreeElement == null) {
return null;
}
Object currentObj = currentTreeElement.getCurrent();
Method[] declaredMethods = currentObj.getClass().getDeclaredMethods();
if (declaredMethods != null) {
for (Method declaredMethod : declaredMethods) {
PoolItemIndex annotation = declaredMethod.getAnnotation(PoolItemIndex.class);
if (annotation != null) {
try {
List<? extends Class<?>> mustBeOfTypeArr = Arrays.asList(annotation.mustBeOfType());
Integer invoke = (Integer) declaredMethod.invoke(currentObj);
if (invoke == null || invoke < 1 || invoke > (constPool.size() - 1)) {
continue;
}
Object child = constPool.get((invoke - 1));
if (child instanceof ConstClassInfo && PoolTypeEnum.INTERFACE.equals(poolTypeEnum)) {
child = constPool.get(((ConstClassInfo) child).getNameIndex() - 1); //Get the ConstUtf8Info from The ConstClassInfo
}
if (!mustBeOfTypeArr.contains(child.getClass())) {
break;
}
TreeElement childTreeElement = new TreeElement(child, annotation.type());
currentTreeElement.addChildren(childTreeElement);
childTreeElement.addParent(currentTreeElement);
buildTree(poolTypeEnum, tree, childTreeElement);
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
}
}
}
return tree;
}
public long skipFromCurrentStream(int nbByteToSkip) throws IOException {
if (nbByteToSkip <= 0) {
return 0;
}
return classData.skip(nbByteToSkip);
}
public int readFromCurrentStream() throws IOException {
return classData.read();
}
public Map<Class<?>, Object> getSections() {
return sections;
}
public MultiArrayMap<PoolTypeEnum, Object> getMappedPool() {
return pool;
}
public Map<Integer, Object> getConstPool() {
return constPool;
}
}
| |
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.reteoo.nodes;
import org.drools.core.base.DroolsQuery;
import org.drools.core.common.BetaConstraints;
import org.drools.core.common.InternalFactHandle;
import org.drools.core.common.InternalWorkingMemory;
import org.drools.core.reteoo.BetaMemory;
import org.drools.core.reteoo.ExistsNode;
import org.drools.core.reteoo.LeftTuple;
import org.drools.core.reteoo.LeftTupleMemory;
import org.drools.core.reteoo.LeftTupleSink;
import org.drools.core.reteoo.LeftTupleSource;
import org.drools.core.reteoo.LeftTupleSourceUtils;
import org.drools.core.reteoo.ModifyPreviousTuples;
import org.drools.core.reteoo.ObjectSource;
import org.drools.core.reteoo.ReteooBuilder;
import org.drools.core.reteoo.RightTuple;
import org.drools.core.reteoo.RightTupleMemory;
import org.drools.core.reteoo.RuleRemovalContext;
import org.drools.core.reteoo.builder.BuildContext;
import org.drools.core.rule.ContextEntry;
import org.drools.core.spi.PropagationContext;
import org.drools.core.util.FastIterator;
import org.drools.core.util.Iterator;
import org.drools.core.util.index.RightTupleList;
public class ReteExistsNode extends ExistsNode {
public ReteExistsNode() {
}
public ReteExistsNode(final int id,
final LeftTupleSource leftInput,
final ObjectSource rightInput,
final BetaConstraints joinNodeBinder,
final BuildContext context) {
super( id,
leftInput,
rightInput,
joinNodeBinder,
context );
}
public void assertObject( final InternalFactHandle factHandle,
final PropagationContext pctx,
final InternalWorkingMemory wm ) {
ReteBetaNodeUtils.assertObject(this, factHandle, pctx, wm);
}
public void attach(BuildContext context) {
ReteBetaNodeUtils.attach(this, context);
}
public boolean doRemove(RuleRemovalContext context, ReteooBuilder builder, InternalWorkingMemory[] workingMemories) {
return ReteBetaNodeUtils.doRemove(this, context, builder, workingMemories);
}
public void modifyObject(InternalFactHandle factHandle, ModifyPreviousTuples modifyPreviousTuples, PropagationContext context, InternalWorkingMemory workingMemory) {
ReteBetaNodeUtils.modifyObject(this, factHandle, modifyPreviousTuples, context, workingMemory);
}
public void modifyLeftTuple(InternalFactHandle factHandle,
ModifyPreviousTuples modifyPreviousTuples,
PropagationContext context,
InternalWorkingMemory workingMemory) {
LeftTupleSourceUtils.doModifyLeftTuple(factHandle, modifyPreviousTuples, context, workingMemory,
this, getLeftInputOtnId(), getLeftInferredMask());
}
public void assertLeftTuple(final LeftTuple leftTuple,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( this );
RightTupleMemory rightMemory = memory.getRightTupleMemory();
ContextEntry[] contextEntry = memory.getContext();
boolean useLeftMemory = true;
if ( !this.tupleMemoryEnabled ) {
// This is a hack, to not add closed DroolsQuery objects
Object object = ((InternalFactHandle) context.getFactHandle()).getObject();
if ( !(object instanceof DroolsQuery) || !((DroolsQuery) object).isOpen() ) {
useLeftMemory = false;
}
}
this.constraints.updateFromTuple( contextEntry,
workingMemory,
leftTuple );
FastIterator it = getRightIterator( rightMemory );
for ( RightTuple rightTuple = getFirstRightTuple(leftTuple, rightMemory, (InternalFactHandle) context.getFactHandle(), it); rightTuple != null; rightTuple = (RightTuple) it.next(rightTuple)) {
if ( this.constraints.isAllowedCachedLeft( contextEntry,
rightTuple.getFactHandle() ) ) {
leftTuple.setBlocker( rightTuple );
if ( useLeftMemory ) {
rightTuple.addBlocked( leftTuple );
}
break;
}
}
this.constraints.resetTuple( contextEntry );
if ( leftTuple.getBlocker() != null ) {
// tuple is not blocked to propagate
this.sink.propagateAssertLeftTuple( leftTuple,
context,
workingMemory,
useLeftMemory );
} else if ( useLeftMemory ) {
// LeftTuple is not blocked, so add to memory so other RightTuples can match
memory.getLeftTupleMemory().add( leftTuple );
}
}
public void assertRightTuple( final RightTuple rightTuple,
final PropagationContext context,
final InternalWorkingMemory workingMemory ) {
final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( this );
memory.getRightTupleMemory().add( rightTuple );
if ( memory.getLeftTupleMemory() == null || memory.getLeftTupleMemory().size() == 0 ) {
// do nothing here, as no left memory
return;
}
this.constraints.updateFromFactHandle( memory.getContext(),
workingMemory,
rightTuple.getFactHandle() );
LeftTupleMemory leftMemory = memory.getLeftTupleMemory();
FastIterator it = getLeftIterator( leftMemory );
for (LeftTuple leftTuple = getFirstLeftTuple( rightTuple, leftMemory, it ); leftTuple != null; ) {
// preserve next now, in case we remove this leftTuple
LeftTuple temp = (LeftTuple) it.next(leftTuple);
// we know that only unblocked LeftTuples are still in the memory
if ( this.constraints.isAllowedCachedRight( memory.getContext(),
leftTuple ) ) {
leftTuple.setBlocker( rightTuple );
rightTuple.addBlocked( leftTuple );
memory.getLeftTupleMemory().remove( leftTuple );
this.sink.propagateAssertLeftTuple( leftTuple,
context,
workingMemory,
true );
}
leftTuple = temp;
}
this.constraints.resetFactHandle( memory.getContext() );
}
public void retractRightTuple(final RightTuple rightTuple,
final PropagationContext pctx,
final InternalWorkingMemory workingMemory) {
final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( this );
RightTupleMemory rtm = memory.getRightTupleMemory();
if ( rightTuple.getBlocked() != null ) {
updateLeftTupleToNewBlocker(rightTuple, pctx, workingMemory, memory, memory.getLeftTupleMemory(), rightTuple.getBlocked(), rtm, false);
rightTuple.nullBlocked();
} else {
// it's also removed in the updateLeftTupleToNewBlocker
rtm.remove(rightTuple);
}
this.constraints.resetTuple( memory.getContext() );
}
/**
* Retract the
* <code>ReteTuple<code>, any resulting propagated joins are also retracted.
*
* @param leftTuple
* The tuple being retracted
* @param context
* The <code>PropagationContext</code>
* @param workingMemory
* The working memory session.
*/
public void retractLeftTuple(final LeftTuple leftTuple,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
RightTuple blocker = leftTuple.getBlocker();
if ( blocker == null ) {
final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( this );
memory.getLeftTupleMemory().remove( leftTuple );
} else {
this.sink.propagateRetractLeftTuple( leftTuple,
context,
workingMemory );
blocker.removeBlocked( leftTuple );
}
}
public void modifyLeftTuple(LeftTuple leftTuple,
PropagationContext context,
InternalWorkingMemory workingMemory) {
final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( this );
RightTupleMemory rightMemory = memory.getRightTupleMemory();
FastIterator rightIt = getRightIterator( rightMemory );
RightTuple firstRightTuple = getFirstRightTuple(leftTuple, rightMemory, (InternalFactHandle) context.getFactHandle(), rightIt);
// If in memory, remove it, because we'll need to add it anyway if it's not blocked, to ensure iteration order
RightTuple blocker = leftTuple.getBlocker();
if ( blocker == null ) {
memory.getLeftTupleMemory().remove( leftTuple );
} else {
// check if we changed bucket
if ( rightMemory.isIndexed()&& !rightIt.isFullIterator() ) {
// if newRightTuple is null, we assume there was a bucket change and that bucket is empty
if ( firstRightTuple == null || firstRightTuple.getMemory() != blocker.getMemory() ) {
// we changed bucket, so blocker no longer blocks
blocker.removeBlocked( leftTuple );
blocker = null;
}
}
}
this.constraints.updateFromTuple( memory.getContext(),
workingMemory,
leftTuple );
if ( blocker != null && !isLeftUpdateOptimizationAllowed() ) {
blocker.removeBlocked(leftTuple);
blocker = null;
}
// if we where not blocked before (or changed buckets), or the previous blocker no longer blocks, then find the next blocker
if ( blocker == null || !this.constraints.isAllowedCachedLeft( memory.getContext(),
blocker.getFactHandle() ) ) {
if ( blocker != null ) {
// remove previous blocker if it exists, as we know it doesn't block any more
blocker.removeBlocked( leftTuple );
}
// find first blocker, because it's a modify, we need to start from the beginning again
for ( RightTuple newBlocker = firstRightTuple; newBlocker != null; newBlocker = (RightTuple) rightIt.next(newBlocker) ) {
if ( this.constraints.isAllowedCachedLeft( memory.getContext(),
newBlocker.getFactHandle() ) ) {
leftTuple.setBlocker( newBlocker );
newBlocker.addBlocked( leftTuple );
break;
}
}
}
if ( leftTuple.getBlocker() == null ) {
// not blocked
memory.getLeftTupleMemory().add( leftTuple ); // add to memory so other fact handles can attempt to match
if ( leftTuple.getFirstChild() != null ) {
// with previous children, retract
this.sink.propagateRetractLeftTuple( leftTuple,
context,
workingMemory );
}
// with no previous children. do nothing.
} else if ( leftTuple.getFirstChild() == null ) {
// blocked, with no previous children, assert
this.sink.propagateAssertLeftTuple( leftTuple,
context,
workingMemory,
true );
} else {
// blocked, with previous children, modify
this.sink.propagateModifyChildLeftTuple( leftTuple,
context,
workingMemory,
true );
}
this.constraints.resetTuple( memory.getContext() );
}
public void modifyRightTuple(RightTuple rightTuple,
PropagationContext context,
InternalWorkingMemory workingMemory) {
final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( this );
if ( memory.getLeftTupleMemory() == null || (memory.getLeftTupleMemory().size() == 0 && rightTuple.getBlocked() == null) ) {
// do nothing here, as we know there are no left tuples
//normally do this at the end, but as we are exiting early, make sure the buckets are still correct.
memory.getRightTupleMemory().removeAdd( rightTuple );
return;
}
// TODO: wtd with behaviours?
// if ( !behavior.assertRightTuple( memory.getBehaviorContext(),
// rightTuple,
// workingMemory ) ) {
// // destroy right tuple
// rightTuple.unlinkFromRightParent();
// return;
// }
this.constraints.updateFromFactHandle( memory.getContext(),
workingMemory,
rightTuple.getFactHandle() );
LeftTupleMemory leftMemory = memory.getLeftTupleMemory();
FastIterator leftIt = getLeftIterator( leftMemory );
LeftTuple firstLeftTuple = getFirstLeftTuple( rightTuple, leftMemory, leftIt );
LeftTuple firstBlocked = rightTuple.getBlocked();
// we now have reference to the first Blocked, so null it in the rightTuple itself, so we can rebuild
rightTuple.nullBlocked();
// first process non-blocked tuples, as we know only those ones are in the left memory.
for ( LeftTuple leftTuple = firstLeftTuple; leftTuple != null; ) {
// preserve next now, in case we remove this leftTuple
LeftTuple temp = (LeftTuple) leftIt.next( leftTuple );
// we know that only unblocked LeftTuples are still in the memory
if ( this.constraints.isAllowedCachedRight( memory.getContext(),
leftTuple ) ) {
leftTuple.setBlocker( rightTuple );
rightTuple.addBlocked( leftTuple );
// this is now blocked so remove from memory
leftMemory.remove( leftTuple );
// subclasses like ForallNotNode might override this propagation
this.sink.propagateAssertLeftTuple( leftTuple,
context,
workingMemory,
true );
}
leftTuple = temp;
}
RightTupleMemory rightTupleMemory = memory.getRightTupleMemory();
if ( firstBlocked != null ) {
updateLeftTupleToNewBlocker(rightTuple, context, workingMemory, memory, leftMemory, firstBlocked, rightTupleMemory, true);
} else {
// we had to do this at the end, rather than beginning as this 'if' block needs the next memory tuple
rightTupleMemory.removeAdd( rightTuple );
}
this.constraints.resetFactHandle( memory.getContext() );
this.constraints.resetTuple( memory.getContext() );
}
private void updateLeftTupleToNewBlocker(RightTuple rightTuple, PropagationContext context, InternalWorkingMemory workingMemory, BetaMemory memory, LeftTupleMemory leftMemory, LeftTuple firstBlocked, RightTupleMemory rightTupleMemory, boolean removeAdd) {// will attempt to resume from the last blocker, if it's not a comparison or unification index.
boolean resumeFromCurrent = !(indexedUnificationJoin || rightTupleMemory.getIndexType().isComparison());
FastIterator rightIt;
RightTuple rootBlocker = null;
if ( resumeFromCurrent ) {
RightTupleList currentRtm = rightTuple.getMemory();
rightIt = currentRtm.fastIterator(); // only needs to iterate the current bucket, works for equality indexed and non indexed.
rootBlocker = (RightTuple) rightTuple.getNext();
if ( removeAdd ) {
// we must do this after we have the next in memory
// We add to the end to give an opportunity to re-match if in same bucket
rightTupleMemory.removeAdd( rightTuple );
} else {
rightTupleMemory.remove( rightTuple );
}
if ( rootBlocker == null && rightTuple.getMemory() == currentRtm) {
// there was no next root blocker, but the current was re-added to same list, so set for re-match attempt.
rootBlocker = rightTuple;
}
} else {
rightIt = getRightIterator( rightTupleMemory );
if ( removeAdd ) {
rightTupleMemory.removeAdd( rightTuple );
} else {
rightTupleMemory.remove( rightTuple );
}
}
// iterate all the existing previous blocked LeftTuples
for ( LeftTuple leftTuple = firstBlocked; leftTuple != null; ) {
LeftTuple temp = leftTuple.getBlockedNext();
leftTuple.clearBlocker();
this.constraints.updateFromTuple( memory.getContext(),
workingMemory,
leftTuple );
if (!resumeFromCurrent) {
rootBlocker = getFirstRightTuple( leftTuple, rightTupleMemory, (InternalFactHandle) context.getFactHandle(), rightIt );
}
// we know that older tuples have been checked so continue next
for ( RightTuple newBlocker = rootBlocker; newBlocker != null; newBlocker = (RightTuple) rightIt.next( newBlocker ) ) {
if ( this.constraints.isAllowedCachedLeft( memory.getContext(),
newBlocker.getFactHandle() ) ) {
leftTuple.setBlocker( newBlocker );
newBlocker.addBlocked( leftTuple );
break;
}
}
if ( leftTuple.getBlocker() == null ) {
// was previous blocked and not in memory, so add
leftMemory.add( leftTuple );
// subclasses like ForallNotNode might override this propagation
this.sink.propagateRetractLeftTuple( leftTuple,
context,
workingMemory );
}
leftTuple = temp;
}
}
/**
* Updates the given sink propagating all previously propagated tuples to it
*/
public void updateSink(final LeftTupleSink sink,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
final BetaMemory memory = (BetaMemory) workingMemory.getNodeMemory( this );
Iterator it = memory.getRightTupleMemory().iterator();
// Relies on the fact that any propagated LeftTuples are blocked, but due to lazy blocking
// they will only be blocked once. So we can iterate the right memory to find the left tuples to propagate
for ( RightTuple rightTuple = (RightTuple) it.next(); rightTuple != null; rightTuple = (RightTuple) it.next() ) {
LeftTuple leftTuple = rightTuple.getBlocked();
while ( leftTuple != null ) {
sink.assertLeftTuple( sink.createLeftTuple( leftTuple,
sink,
context, true),
context,
workingMemory );
leftTuple = leftTuple.getBlockedNext();
}
}
}
}
| |
// Copyright (c) 2015 Cloudera, Inc.
package com.cloudera.director.spi.tck;
import com.cloudera.director.spi.tck.util.ClassReference;
import com.cloudera.director.spi.tck.util.ConfigFragmentWrapper;
import com.cloudera.director.spi.tck.util.Stopwatch;
import com.cloudera.director.spi.v1.compute.ComputeProvider;
import com.cloudera.director.spi.v1.database.DatabaseServerProvider;
import com.cloudera.director.spi.v1.model.Instance;
import com.cloudera.director.spi.v1.model.InstanceState;
import com.cloudera.director.spi.v1.model.InstanceStatus;
import com.cloudera.director.spi.v1.model.InstanceTemplate;
import com.cloudera.director.spi.v1.model.LocalizationContext;
import com.cloudera.director.spi.v1.model.util.ChildLocalizationContext;
import com.cloudera.director.spi.v1.provider.CloudProvider;
import com.cloudera.director.spi.v1.provider.CloudProviderMetadata;
import com.cloudera.director.spi.v1.provider.InstanceProvider;
import com.cloudera.director.spi.v1.provider.Launcher;
import com.cloudera.director.spi.v1.provider.ResourceProvider;
import com.cloudera.director.spi.v1.provider.ResourceProviderMetadata;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigValue;
import java.io.File;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.logging.Logger;
/**
* Validate an implementation of the v1 of the Director SPI
*/
public class TCKv1 implements TCK {
private static final Logger LOG = Logger.getLogger(TCKv1.class.getName());
private static final String JAR_FILE_EXTENSION = ".jar";
private static final int DEFAULT_TIMEOUT_MINUTES = 10;
private static final int DEFAULT_WAIT_BETWEEN_ATTEMPTS_SECONDS = 5;
public Summary validate(File pluginFile, PluginMetadata metadata, Config config)
throws Exception {
Summary summary = new Summary();
// Check that the plugin is properly packaged and dependencies are shaded
LOG.info("Validating plugin jar file internal structure (shading of dependencies)");
validatePackaging(summary, metadata);
validateThereAreNoEmbeddedJarFiles(summary, metadata);
if (summary.hasErrors()) {
return summary; // no need to continue if we found some errors already
}
LOG.info("Loading plugin file via a new ClassLoader from: " + pluginFile.getAbsolutePath());
final URL pluginUrl;
try {
pluginUrl = pluginFile.toURI().toURL();
} catch (MalformedURLException e) {
throw new IOException("Failed to convert JAR file to URL", e);
}
URLClassLoader classLoader = AccessController.doPrivileged(
new PrivilegedAction<URLClassLoader>() {
public URLClassLoader run() {
return new URLClassLoader(new URL[] { pluginUrl },
this.getClass().getClassLoader());
}
});
// Load each launcher and try to create an instance using the default constructor
for (ClassReference launcherClassRef : metadata.getLauncherClasses("v1")) {
LOG.info(String.format("Creating an instance of the launcher class: %s",
launcherClassRef.getCanonicalClassName()));
Class<?> launcherClass = classLoader.loadClass(
launcherClassRef.getCanonicalClassName());
if (!Launcher.class.isAssignableFrom(launcherClass)) {
summary.addError("%s should implement the SPI Launcher interface",
launcherClass.getCanonicalName());
return summary;
}
Launcher launcher = (Launcher) launcherClass.newInstance();
// Initialize with a configuration directory the plugin test config file
String configurationDirectory = config.getString(Configurations.CONFIGURATION_DIRECTORY_PROPERTY);
LOG.info(String.format("Initializing the plugin with configuration directory: %s",
configurationDirectory));
launcher.initialize(new File(configurationDirectory), null);
// Sequentially validate all cloud providers that are part of this plugin
Locale locale = Locale.getDefault();
LocalizationContext rootLocalizationContext = launcher.getLocalizationContext(locale);
for (CloudProviderMetadata providerMetadata : launcher.getCloudProviderMetadata()) {
validateCloudProvider(summary, launcher, providerMetadata,
config.getConfig(providerMetadata.getId()), rootLocalizationContext);
if (summary.hasErrors()) {
break; // no need to continue if we found some errors for one cloud provider
}
}
if (summary.hasErrors()) {
break; // no need to continue if we found some errors for one launcher
}
}
return summary;
}
private void validateCloudProvider(Summary summary, Launcher launcher,
CloudProviderMetadata metadata, Config config, LocalizationContext rootLocalizationContext)
throws Exception {
LOG.info(String.format("Validating cloud provider ID: %s Name: %s",
metadata.getId(), metadata.getName(null)));
LocalizationContext cloudLocalizationContext =
metadata.getLocalizationContext(rootLocalizationContext);
ConfigFragmentWrapper configWrapper = new ConfigFragmentWrapper(
config.getConfig(Configurations.CONFIGS_SECTION),
metadata.getCredentialsProviderMetadata().getCredentialsConfigurationProperties(),
metadata.getProviderConfigurationProperties()
);
configWrapper.dump("Configuration properties for the cloud provider:", LOG,
cloudLocalizationContext);
CloudProvider provider =
launcher.createCloudProvider(metadata.getId(), configWrapper,
cloudLocalizationContext.getLocale());
for (ResourceProviderMetadata current : metadata.getResourceProviderMetadata()) {
validateResourceProvider(summary, provider, current, config.getConfig(current.getId()),
cloudLocalizationContext);
if (summary.hasErrors()) {
break; // no need to continue if we failed to validate one resource provider
}
}
}
private void validateResourceProvider(Summary summary, CloudProvider provider,
ResourceProviderMetadata metadata, Config config,
LocalizationContext cloudLocalizationContext) throws Exception {
LOG.info(String.format("Validating resource provider ID: %s Name: %s",
metadata.getId(), metadata.getDescription(null)));
LocalizationContext resourceProviderLocalizationContext =
metadata.getLocalizationContext(cloudLocalizationContext);
ConfigFragmentWrapper configWrapper = new ConfigFragmentWrapper(
config.getConfig(Configurations.CONFIGS_SECTION),
metadata.getProviderConfigurationProperties()
);
configWrapper.dump("Configuration properties for the resource provider:", LOG,
resourceProviderLocalizationContext);
ResourceProvider resourceProvider =
provider.createResourceProvider(metadata.getId(), configWrapper);
if (resourceProvider instanceof ComputeProvider) {
LOG.info("Attempting to use this provider as a COMPUTE provider");
validateInstanceProvider(summary, (ComputeProvider) resourceProvider, metadata, config,
resourceProviderLocalizationContext);
} else if (resourceProvider instanceof DatabaseServerProvider) {
LOG.info("Attempting to use this provider as a DATABASE SERVER provider");
validateInstanceProvider(summary, (DatabaseServerProvider) resourceProvider, metadata, config,
resourceProviderLocalizationContext);
} else {
summary.addError("Unknown resource provider type: %s",
resourceProvider.getClass().getCanonicalName());
}
}
private void validateInstanceProvider(Summary summary, InstanceProvider provider,
ResourceProviderMetadata metadata, Config config,
LocalizationContext resourceProviderLocalizationContext)
throws Exception {
LocalizationContext templateLocalizationContext =
new ChildLocalizationContext(resourceProviderLocalizationContext, "template");
ConfigFragmentWrapper configWrapper = new ConfigFragmentWrapper(
config.getConfig(Configurations.RESOURCE_CONFIGS_SECTION),
metadata.getResourceTemplateConfigurationProperties()
);
configWrapper.dump("Configuration properties for the instance template:", LOG,
templateLocalizationContext);
Map<String, String> tags = convertToMap(config.getConfig(Configurations.RESOURCE_TAGS_SECTION));
InstanceTemplate template = (InstanceTemplate) provider.createResourceTemplate("test", configWrapper, tags);
String id;
do {
id = UUID.randomUUID().toString();
} while (Character.isDigit(id.charAt(0)));
List<String> instanceIds = Collections.singletonList(id);
LOG.info("Allocating one instance with ID: " + id);
provider.allocate(template, instanceIds, 1);
try {
waitForInstanceStatus(summary, provider, template, id, InstanceStatus.RUNNING);
if (summary.hasErrors()) {
return;
}
Collection<Instance> instances = provider.find(template, instanceIds);
if (instances.size() != 1) {
summary.addError("Expected to find exactly one instance after allocation. Found: %s",
instances);
return;
}
Instance instance = instances.iterator().next();
int expectedOpenPort = config.getInt(Configurations.EXPECTED_OPEN_PORT_PROPERTY);
if (expectedOpenPort == -1) {
LOG.info(String.format("Skipping check of connectivity to %s because expected open port is %d",
instance.getPrivateIpAddress(), expectedOpenPort));
} else {
LOG.info(String.format("Checking connectivity on port %d to %s",
expectedOpenPort, instance.getPrivateIpAddress()));
waitForPort(summary, instance.getPrivateIpAddress(), expectedOpenPort);
}
} finally {
try {
LOG.info("Deleting allocated resources");
provider.delete(template, instanceIds);
} catch (Exception e) {
LOG.severe("CRITICAL: Failed to delete allocated resources. Manual clean-up is necessary");
throw e;
}
waitForInstanceStatus(summary, provider, template, id, InstanceStatus.DELETED,
InstanceStatus.UNKNOWN);
}
}
private Map<String, String> convertToMap(Config section) {
Map<String, String> result = new HashMap<String, String>();
for (Map.Entry<String, ConfigValue> entry : section.entrySet()) {
result.put(entry.getKey(), entry.getValue().unwrapped().toString());
}
return Collections.unmodifiableMap(result);
}
private void waitForPort(Summary summary, InetAddress privateIpAddress, int port)
throws InterruptedException, IOException {
Stopwatch stopwatch = Stopwatch.createStarted();
InetSocketAddress address = new InetSocketAddress(privateIpAddress.getHostName(), port);
while (stopwatch.elapsed(TimeUnit.MINUTES) < DEFAULT_TIMEOUT_MINUTES) {
LOG.info("Attempting connection to " + address);
Socket socket = new Socket();
try {
socket.connect(address, 500);
LOG.info(String.format("Connection successful. Found port %d open as expected", port));
break; // connection successful
} catch (IOException e) {
TimeUnit.SECONDS.sleep(DEFAULT_WAIT_BETWEEN_ATTEMPTS_SECONDS);
} finally {
socket.close();
}
}
if (stopwatch.elapsed(TimeUnit.MINUTES) >= DEFAULT_TIMEOUT_MINUTES) {
summary.addError("Unable to connect on port %s after %s minutes",
port, DEFAULT_TIMEOUT_MINUTES);
}
}
private void waitForInstanceStatus(Summary summary, InstanceProvider provider,
InstanceTemplate template, String id, InstanceStatus... expectedStatuses) throws InterruptedException {
List<InstanceStatus> expectedStatusesList = Arrays.asList(expectedStatuses);
LOG.info(String.format("Waiting for instance status to be in %s " +
"(%d seconds between checks, %d minutes timeout)", expectedStatusesList,
DEFAULT_WAIT_BETWEEN_ATTEMPTS_SECONDS, DEFAULT_TIMEOUT_MINUTES));
Stopwatch stopwatch = Stopwatch.createStarted();
List<String> instanceIds = Collections.singletonList(id);
while (stopwatch.elapsed(TimeUnit.MINUTES) < DEFAULT_TIMEOUT_MINUTES) {
Map<String, InstanceState> states = provider.getInstanceState(template, instanceIds);
if (states.containsKey(id)) {
InstanceStatus status = states.get(id).getInstanceStatus();
if (expectedStatusesList.contains(status)) {
LOG.info("Found instance as expected " + status);
break;
} else {
LOG.info("Instance status is " + status);
TimeUnit.SECONDS.sleep(DEFAULT_WAIT_BETWEEN_ATTEMPTS_SECONDS);
}
} else {
summary.addError("The instance ID was not part of the list of states");
break;
}
}
if (stopwatch.elapsed(TimeUnit.MINUTES) >= DEFAULT_TIMEOUT_MINUTES) {
summary.addError("Instance did not transition to status in %s in %s minutes",
expectedStatusesList, DEFAULT_TIMEOUT_MINUTES);
}
}
/**
* Checks that everything is shaded properly. The launcher classes should
* define the root of the Java package hierarchy; every other class in the
* plugin must be in that package or below it.
*/
private void validatePackaging(Summary summary, PluginMetadata metadata) {
// with multiple launcher classes, this only works if the launchers are all
// in the same package, which is what we want
for (ClassReference launcherClassRef : metadata.getLauncherClasses("v1")) {
String expectedNamespace = launcherClassRef.getPackageName();
for (String candidate : metadata.getClasses()) {
if (!candidate.startsWith(expectedNamespace)) {
summary.addError("Class '%s' should be relocated under '%s', due to launcher class '%s'",
candidate, expectedNamespace, launcherClassRef.getCanonicalClassName());
}
}
}
}
/**
* Checks that the packaged plugin doesn't contain any embedded jar files. The
* shading process should expand dependencies and relocate them as needed.
*/
private void validateThereAreNoEmbeddedJarFiles(Summary summary, PluginMetadata metadata) {
for (String candidate : metadata.getFiles()) {
if (candidate.toLowerCase(Locale.US).endsWith(JAR_FILE_EXTENSION)) {
summary.addError("Embedded jar files are not allowed: %s", candidate);
}
}
}
}
| |
package org.apache.solr.schema;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.apache.commons.io.IOUtils;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
import org.apache.lucene.collation.ICUCollationKeyAnalyzer;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DocTermOrdsRangeFilter;
import org.apache.lucene.search.FieldCacheRangeFilter;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Version;
import org.apache.lucene.analysis.util.ResourceLoader;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.util.Base64;
import org.apache.solr.response.TextResponseWriter;
import org.apache.solr.search.QParser;
import com.ibm.icu.text.Collator;
import com.ibm.icu.text.RuleBasedCollator;
import com.ibm.icu.util.ULocale;
/**
* Field for collated sort keys.
* These can be used for locale-sensitive sort and range queries.
* <p>
* This field can be created in two ways:
* <ul>
* <li>Based upon a system collator associated with a Locale.
* <li>Based upon a tailored ruleset.
* </ul>
* <p>
* Using a System collator:
* <ul>
* <li>locale: RFC 3066 locale ID (mandatory)
* <li>strength: 'primary','secondary','tertiary', 'quaternary', or 'identical' (optional)
* <li>decomposition: 'no', or 'canonical' (optional)
* </ul>
* <p>
* Using a Tailored ruleset:
* <ul>
* <li>custom: UTF-8 text file containing rules supported by RuleBasedCollator (mandatory)
* <li>strength: 'primary','secondary','tertiary', 'quaternary', or 'identical' (optional)
* <li>decomposition: 'no' or 'canonical' (optional)
* </ul>
* <p>
* Expert options:
* <ul>
* <li>alternate: 'shifted' or 'non-ignorable'. Can be used to ignore punctuation/whitespace.
* <li>caseLevel: 'true' or 'false'. Useful with strength=primary to ignore accents but not case.
* <li>caseFirst: 'lower' or 'upper'. Useful to control which is sorted first when case is not ignored.
* <li>numeric: 'true' or 'false'. Digits are sorted according to numeric value, e.g. foobar-9 sorts before foobar-10
* <li>variableTop: single character or contraction. Controls what is variable for 'alternate'
* </ul>
*
* @see Collator
* @see ULocale
* @see RuleBasedCollator
*/
public class ICUCollationField extends FieldType {
private Analyzer analyzer;
@Override
protected void init(IndexSchema schema, Map<String,String> args) {
properties |= TOKENIZED; // this ensures our analyzer gets hit
setup(schema.getResourceLoader(), args);
super.init(schema, args);
}
/**
* Setup the field according to the provided parameters
*/
private void setup(ResourceLoader loader, Map<String,String> args) {
String custom = args.remove("custom");
String localeID = args.remove("locale");
String strength = args.remove("strength");
String decomposition = args.remove("decomposition");
String alternate = args.remove("alternate");
String caseLevel = args.remove("caseLevel");
String caseFirst = args.remove("caseFirst");
String numeric = args.remove("numeric");
String variableTop = args.remove("variableTop");
if (custom == null && localeID == null)
throw new SolrException(ErrorCode.SERVER_ERROR, "Either custom or locale is required.");
if (custom != null && localeID != null)
throw new SolrException(ErrorCode.SERVER_ERROR, "Cannot specify both locale and custom. "
+ "To tailor rules for a built-in language, see the javadocs for RuleBasedCollator. "
+ "Then save the entire customized ruleset to a file, and use with the custom parameter");
final Collator collator;
if (localeID != null) {
// create from a system collator, based on Locale.
collator = createFromLocale(localeID);
} else {
// create from a custom ruleset
collator = createFromRules(custom, loader);
}
// set the strength flag, otherwise it will be the default.
if (strength != null) {
if (strength.equalsIgnoreCase("primary"))
collator.setStrength(Collator.PRIMARY);
else if (strength.equalsIgnoreCase("secondary"))
collator.setStrength(Collator.SECONDARY);
else if (strength.equalsIgnoreCase("tertiary"))
collator.setStrength(Collator.TERTIARY);
else if (strength.equalsIgnoreCase("quaternary"))
collator.setStrength(Collator.QUATERNARY);
else if (strength.equalsIgnoreCase("identical"))
collator.setStrength(Collator.IDENTICAL);
else
throw new SolrException(ErrorCode.SERVER_ERROR, "Invalid strength: " + strength);
}
// set the decomposition flag, otherwise it will be the default.
if (decomposition != null) {
if (decomposition.equalsIgnoreCase("no"))
collator.setDecomposition(Collator.NO_DECOMPOSITION);
else if (decomposition.equalsIgnoreCase("canonical"))
collator.setDecomposition(Collator.CANONICAL_DECOMPOSITION);
else
throw new SolrException(ErrorCode.SERVER_ERROR, "Invalid decomposition: " + decomposition);
}
// expert options: concrete subclasses are always a RuleBasedCollator
RuleBasedCollator rbc = (RuleBasedCollator) collator;
if (alternate != null) {
if (alternate.equalsIgnoreCase("shifted")) {
rbc.setAlternateHandlingShifted(true);
} else if (alternate.equalsIgnoreCase("non-ignorable")) {
rbc.setAlternateHandlingShifted(false);
} else {
throw new SolrException(ErrorCode.SERVER_ERROR, "Invalid alternate: " + alternate);
}
}
if (caseLevel != null) {
rbc.setCaseLevel(Boolean.parseBoolean(caseLevel));
}
if (caseFirst != null) {
if (caseFirst.equalsIgnoreCase("lower")) {
rbc.setLowerCaseFirst(true);
} else if (caseFirst.equalsIgnoreCase("upper")) {
rbc.setUpperCaseFirst(true);
} else {
throw new SolrException(ErrorCode.SERVER_ERROR, "Invalid caseFirst: " + caseFirst);
}
}
if (numeric != null) {
rbc.setNumericCollation(Boolean.parseBoolean(numeric));
}
if (variableTop != null) {
rbc.setVariableTop(variableTop);
}
analyzer = new ICUCollationKeyAnalyzer(Version.LUCENE_CURRENT, collator);
}
/**
* Create a locale from localeID.
* Then return the appropriate collator for the locale.
*/
private Collator createFromLocale(String localeID) {
return Collator.getInstance(new ULocale(localeID));
}
/**
* Read custom rules from a file, and create a RuleBasedCollator
* The file cannot support comments, as # might be in the rules!
*/
private Collator createFromRules(String fileName, ResourceLoader loader) {
InputStream input = null;
try {
input = loader.openResource(fileName);
String rules = IOUtils.toString(input, "UTF-8");
return new RuleBasedCollator(rules);
} catch (Exception e) {
// io error or invalid rules
throw new RuntimeException(e);
} finally {
IOUtils.closeQuietly(input);
}
}
@Override
public void write(TextResponseWriter writer, String name, StorableField f) throws IOException {
writer.writeStr(name, f.stringValue(), true);
}
@Override
public SortField getSortField(SchemaField field, boolean top) {
return getStringSort(field, top);
}
@Override
public Analyzer getAnalyzer() {
return analyzer;
}
@Override
public Analyzer getQueryAnalyzer() {
return analyzer;
}
/**
* analyze the text with the analyzer, instead of the collator.
* because icu collators are not thread safe, this keeps things
* simple (we already have a threadlocal clone in the reused TS)
*/
private BytesRef getCollationKey(String field, String text) {
try (TokenStream source = analyzer.tokenStream(field, text)) {
source.reset();
TermToBytesRefAttribute termAtt = source.getAttribute(TermToBytesRefAttribute.class);
BytesRef bytes = termAtt.getBytesRef();
// we control the analyzer here: most errors are impossible
if (!source.incrementToken())
throw new IllegalArgumentException("analyzer returned no terms for text: " + text);
termAtt.fillBytesRef();
assert !source.incrementToken();
source.end();
return BytesRef.deepCopyOf(bytes);
} catch (IOException e) {
throw new RuntimeException("Unable to analyze text: " + text, e);
}
}
@Override
public Query getRangeQuery(QParser parser, SchemaField field, String part1, String part2, boolean minInclusive, boolean maxInclusive) {
String f = field.getName();
BytesRef low = part1 == null ? null : getCollationKey(f, part1);
BytesRef high = part2 == null ? null : getCollationKey(f, part2);
if (!field.indexed() && field.hasDocValues()) {
if (field.multiValued()) {
return new ConstantScoreQuery(DocTermOrdsRangeFilter.newBytesRefRange(
field.getName(), low, high, minInclusive, maxInclusive));
} else {
return new ConstantScoreQuery(FieldCacheRangeFilter.newBytesRefRange(
field.getName(), low, high, minInclusive, maxInclusive));
}
} else {
return new TermRangeQuery(field.getName(), low, high, minInclusive, maxInclusive);
}
}
@Override
public void checkSchemaField(SchemaField field) {
// no-op
}
@Override
public List<StorableField> createFields(SchemaField field, Object value, float boost) {
if (field.hasDocValues()) {
List<StorableField> fields = new ArrayList<StorableField>();
fields.add(createField(field, value, boost));
final BytesRef bytes = getCollationKey(field.getName(), value.toString());
if (field.multiValued()) {
fields.add(new SortedSetDocValuesField(field.getName(), bytes));
} else {
fields.add(new SortedDocValuesField(field.getName(), bytes));
}
return fields;
} else {
return Collections.singletonList(createField(field, value, boost));
}
}
@Override
public Object marshalSortValue(Object value) {
if (null == value) {
return null;
}
final BytesRef val = (BytesRef)value;
return Base64.byteArrayToBase64(val.bytes, val.offset, val.length);
}
@Override
public Object unmarshalSortValue(Object value) {
if (null == value) {
return null;
}
final String val = (String)value;
final byte[] bytes = Base64.base64ToByteArray(val);
return new BytesRef(bytes);
}
}
| |
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.openapi.vcs.impl.projectlevelman;
import com.intellij.ide.impl.TrustedProjects;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.util.BackgroundTaskUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.impl.DirectoryIndex;
import com.intellij.openapi.ui.MessageType;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.*;
import com.intellij.openapi.vcs.impl.DefaultVcsRootPolicy;
import com.intellij.openapi.vcs.impl.ProjectLevelVcsManagerImpl;
import com.intellij.openapi.vcs.ui.VcsBalloonProblemNotifier;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.pointers.VirtualFilePointer;
import com.intellij.openapi.vfs.pointers.VirtualFilePointerListener;
import com.intellij.openapi.vfs.pointers.VirtualFilePointerManager;
import com.intellij.openapi.wm.ex.WindowManagerEx;
import com.intellij.openapi.wm.impl.ProjectFrameHelper;
import com.intellij.util.Alarm;
import com.intellij.util.ExceptionUtil;
import com.intellij.util.Functions;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import com.intellij.util.ui.update.DisposableUpdate;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.vcsUtil.VcsUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.util.*;
public final class NewMappings implements Disposable {
private static final Comparator<MappedRoot> ROOT_COMPARATOR = Comparator.comparing(it -> it.root.getPath());
private static final Comparator<VcsDirectoryMapping> MAPPINGS_COMPARATOR = Comparator.comparing(VcsDirectoryMapping::getDirectory);
private final static Logger LOG = Logger.getInstance(NewMappings.class);
private final Object myUpdateLock = new Object();
private FileWatchRequestsManager myFileWatchRequestsManager;
private final ProjectLevelVcsManager myVcsManager;
private final Project myProject;
@NotNull private Disposable myFilePointerDisposable = Disposer.newDisposable();
private volatile List<VcsDirectoryMapping> myMappings = Collections.emptyList(); // sorted by MAPPINGS_COMPARATOR
private volatile List<MappedRoot> myMappedRoots = Collections.emptyList(); // sorted by ROOT_COMPARATOR
private volatile RootMapping myMappedRootsMapping = new RootMapping(Collections.emptyList());
private volatile List<AbstractVcs> myActiveVcses = Collections.emptyList();
private volatile boolean myActivated = false;
@NotNull private final MergingUpdateQueue myRootUpdateQueue;
private final VirtualFilePointerListener myFilePointerListener;
public NewMappings(@NotNull Project project, @NotNull ProjectLevelVcsManagerImpl vcsManager) {
myProject = project;
myVcsManager = vcsManager;
myFileWatchRequestsManager = new FileWatchRequestsManager(myProject, this);
myRootUpdateQueue = new MergingUpdateQueue("NewMappings", 1000, true, null, this, null, Alarm.ThreadToUse.POOLED_THREAD)
.usePassThroughInUnitTestMode();
myFilePointerListener = new VirtualFilePointerListener() {
@Override
public void validityChanged(VirtualFilePointer @NotNull [] pointers) {
scheduleMappedRootsUpdate();
}
};
VcsRootChecker.EXTENSION_POINT_NAME.addChangeListener(() -> scheduleMappedRootsUpdate(), project);
}
@TestOnly
public void setFileWatchRequestsManager(FileWatchRequestsManager fileWatchRequestsManager) {
assert ApplicationManager.getApplication().isUnitTestMode();
myFileWatchRequestsManager = fileWatchRequestsManager;
}
public AbstractVcs @NotNull [] getActiveVcses() {
return myActiveVcses.toArray(new AbstractVcs[0]);
}
public boolean hasActiveVcss() {
return !myActiveVcses.isEmpty();
}
public void activateActiveVcses() {
synchronized (myUpdateLock) {
if (myActivated) return;
myActivated = true;
LOG.debug("activated");
}
updateActiveVcses();
updateMappedRoots(true);
}
/**
* @return {@link #myActivated} value
*/
private boolean updateActiveVcses() {
MyVcsActivator activator =
ReadAction.compute(() -> {
synchronized (myUpdateLock) {
return myActivated ? createVcsActivator() : null;
}
});
if (activator != null) {
activator.activate();
}
return activator != null;
}
public void setMapping(@NotNull String path, @Nullable String activeVcsName) {
if (LOG.isDebugEnabled()) {
LOG.debug("setMapping path = '" + path + "' vcs = " + activeVcsName, new Throwable());
}
final VcsDirectoryMapping newMapping = new VcsDirectoryMapping(path, activeVcsName);
List<VcsDirectoryMapping> newMappings = new ArrayList<>(myMappings);
newMappings.removeIf(mapping -> Objects.equals(mapping.getDirectory(), newMapping.getDirectory()));
newMappings.add(newMapping);
updateVcsMappings(newMappings);
}
@TestOnly
public void waitMappedRootsUpdate() {
myRootUpdateQueue.flush();
}
public void updateMappedVcsesImmediately() {
LOG.debug("updateMappingsImmediately");
if (!updateActiveVcses()) return;
synchronized (myUpdateLock) {
Disposer.dispose(myFilePointerDisposable);
myFilePointerDisposable = Disposer.newDisposable();
myMappedRoots = Collections.emptyList();
myMappedRootsMapping = new RootMapping(Collections.emptyList());
dumpMappedRootsToLog();
}
mappingsChanged();
scheduleMappedRootsUpdate();
}
public void scheduleMappedRootsUpdate() {
myRootUpdateQueue.queue(new DisposableUpdate(this, "update") {
@Override
public void doRun() {
updateMappedRoots(true);
}
});
}
private void updateVcsMappings(@NotNull Collection<? extends VcsDirectoryMapping> mappings) {
myRootUpdateQueue.cancelAllUpdates();
List<VcsDirectoryMapping> newMappings = Collections
.unmodifiableList(ContainerUtil.sorted(removeDuplicates(mappings), MAPPINGS_COMPARATOR));
synchronized (myUpdateLock) {
boolean mappingsChanged = !myMappings.equals(newMappings);
if (!mappingsChanged) return; // mappings are up-to-date
myMappings = newMappings;
dumpMappingsToLog();
}
updateActiveVcses();
updateMappedRoots(false);
mappingsChanged();
}
private void updateMappedRoots(boolean fireMappingsChangedEvent) {
myRootUpdateQueue.cancelAllUpdates();
if (!myActivated) return;
LOG.debug("updateMappedRoots");
List<VcsDirectoryMapping> mappings = myMappings;
Mappings newMappedRoots = collectMappedRoots(mappings);
boolean mappedRootsChanged;
synchronized (myUpdateLock) {
if (myMappings != mappings) {
Disposer.dispose(newMappedRoots.filePointerDisposable);
return;
}
Disposer.dispose(myFilePointerDisposable);
myFilePointerDisposable = newMappedRoots.filePointerDisposable;
mappedRootsChanged = !myMappedRoots.equals(newMappedRoots.mappedRoots);
if (mappedRootsChanged) {
myMappedRoots = newMappedRoots.mappedRoots;
myMappedRootsMapping = new RootMapping(newMappedRoots.mappedRoots);
dumpMappedRootsToLog();
}
}
if (fireMappingsChangedEvent && mappedRootsChanged) mappingsChanged();
}
private void refreshMainMenu() {
ApplicationManager.getApplication().invokeLater(() -> {
ProjectFrameHelper frame = WindowManagerEx.getInstanceEx().getFrameHelper(myProject);
if (frame != null && frame.getRootPane() != null) {
frame.updateView();
}
}, myProject.getDisposed());
}
@NotNull
private static List<VcsDirectoryMapping> removeDuplicates(@NotNull Collection<? extends VcsDirectoryMapping> mappings) {
List<VcsDirectoryMapping> newMapping = new ArrayList<>();
Set<String> paths = new HashSet<>();
for (VcsDirectoryMapping mapping : ContainerUtil.reverse(new ArrayList<>(mappings))) {
// take last mapping in collection in case of duplicates
if (paths.add(mapping.getDirectory())) {
newMapping.add(mapping);
}
}
return newMapping;
}
@NotNull
private Mappings collectMappedRoots(@NotNull List<VcsDirectoryMapping> mappings) {
VirtualFilePointerManager pointerManager = VirtualFilePointerManager.getInstance();
Map<VirtualFile, MappedRoot> mappedRoots = new HashMap<>();
Disposable pointerDisposable = Disposer.newDisposable();
if (!TrustedProjects.isTrusted(myProject)) {
return new Mappings(Collections.emptyList(), pointerDisposable);
}
try {
// direct mappings have priority over <Project> mappings
for (VcsDirectoryMapping mapping : mappings) {
if (mapping.isDefaultMapping()) {
continue;
}
AbstractVcs vcs = getMappingsVcs(mapping);
String rootPath = mapping.getDirectory();
ReadAction.run(() -> {
VirtualFile vcsRoot = LocalFileSystem.getInstance().findFileByPath(rootPath);
if (vcsRoot != null && vcsRoot.isDirectory()) {
if (checkMappedRoot(vcs, vcsRoot)) {
mappedRoots.putIfAbsent(vcsRoot, new MappedRoot(vcs, mapping, vcsRoot));
}
else {
mappedRoots.putIfAbsent(vcsRoot, new MappedRoot(null, mapping, vcsRoot));
}
}
pointerManager.create(VfsUtilCore.pathToUrl(rootPath), pointerDisposable, myFilePointerListener);
});
}
for (VcsDirectoryMapping mapping : mappings) {
if (!mapping.isDefaultMapping()) {
continue;
}
AbstractVcs vcs = getMappingsVcs(mapping);
if (vcs == null) {
continue;
}
Collection<VirtualFile> defaultRoots = detectDefaultRootsFor(vcs,
DefaultVcsRootPolicy.getInstance(myProject).getDefaultVcsRoots(),
ContainerUtil.map2Set(mappedRoots.values(), it -> it.root));
ReadAction.run(() -> {
for (VirtualFile vcsRoot : defaultRoots) {
if (vcsRoot != null && vcsRoot.isDirectory()) {
mappedRoots.putIfAbsent(vcsRoot, new MappedRoot(vcs, mapping, vcsRoot));
pointerManager.create(vcsRoot, pointerDisposable, myFilePointerListener);
}
}
});
}
List<MappedRoot> result = Collections.unmodifiableList(ContainerUtil.sorted(mappedRoots.values(), ROOT_COMPARATOR));
for (MappedRoot root : result) {
if (myVcsManager.isIgnored(VcsUtil.getFilePath(root.root))) {
LOG.warn("Root mapping is under ignored root: " + root.root);
}
}
return new Mappings(result, pointerDisposable);
}
catch (Throwable e) {
Disposer.dispose(pointerDisposable);
ExceptionUtil.rethrow(e);
return null;
}
}
@Nullable
private AbstractVcs getMappingsVcs(@NotNull VcsDirectoryMapping mapping) {
return AllVcses.getInstance(myProject).getByName(mapping.getVcs());
}
private boolean checkMappedRoot(@Nullable AbstractVcs vcs, @NotNull VirtualFile vcsRoot) {
try {
if (vcs == null) return false;
VcsRootChecker rootChecker = myVcsManager.getRootChecker(vcs);
return rootChecker.validateRoot(vcsRoot.getPath());
}
catch (ProcessCanceledException e) {
throw e;
}
catch (Throwable e) {
LOG.error(e);
return false;
}
}
@NotNull
private Collection<VirtualFile> detectDefaultRootsFor(@NotNull AbstractVcs vcs,
@NotNull Collection<VirtualFile> projectRoots,
@NotNull Set<VirtualFile> mappedDirs) {
try {
if (vcs.needsLegacyDefaultMappings()) return projectRoots;
DirectoryIndex directoryIndex = DirectoryIndex.getInstance(myProject);
VcsRootChecker rootChecker = myVcsManager.getRootChecker(vcs);
Map<VirtualFile, Boolean> checkedDirs = new HashMap<>();
Set<VirtualFile> vcsRoots = new HashSet<>();
for (VirtualFile f : projectRoots) {
while (f != null) {
if (vcsRoots.contains(f) || mappedDirs.contains(f)) break;
if (isVcsRoot(rootChecker, checkedDirs, f)) {
vcsRoots.add(f);
break;
}
VirtualFile parent = f.getParent();
if (parent != null && !isUnderProject(directoryIndex, parent)) {
if (rootChecker.areChildrenValidMappings()) {
while (parent != null) {
if (vcsRoots.contains(parent) || mappedDirs.contains(parent)) break;
if (isVcsRoot(rootChecker, checkedDirs, parent)) {
vcsRoots.add(f);
break;
}
parent = parent.getParent();
}
}
break;
}
f = parent;
}
}
return vcsRoots;
}
catch (ProcessCanceledException e) {
throw e;
}
catch (Throwable e) {
LOG.error(e);
return Collections.emptyList();
}
}
private static boolean isVcsRoot(@NotNull VcsRootChecker rootChecker,
@NotNull Map<VirtualFile, Boolean> checkedDirs,
@NotNull VirtualFile file) {
ProgressManager.checkCanceled();
return checkedDirs.computeIfAbsent(file, key -> rootChecker.isRoot(key.getPath()));
}
private boolean isUnderProject(@NotNull DirectoryIndex directoryIndex, @NotNull VirtualFile f) {
return ReadAction.compute(() -> {
if (myProject.isDisposed()) throw new ProcessCanceledException();
return directoryIndex.getInfoForFile(f).isInProject(f);
});
}
public void mappingsChanged() {
BackgroundTaskUtil.syncPublisher(myProject, ProjectLevelVcsManager.VCS_CONFIGURATION_CHANGED).directoryMappingChanged();
myFileWatchRequestsManager.ping();
}
private void dumpMappingsToLog() {
for (VcsDirectoryMapping mapping : myMappings) {
String path = mapping.isDefaultMapping() ? "<Project>" : mapping.getDirectory();
String vcs = mapping.getVcs();
LOG.info(String.format("VCS Root: [%s] - [%s]", vcs, path));
}
}
private void dumpMappedRootsToLog() {
if (LOG.isDebugEnabled()) {
for (MappedRoot root : myMappedRoots) {
LOG.debug(String.format("Mapped Root: [%s] - [%s]", root.vcs, root.root.getPath()));
}
}
else if (haveDefaultMapping() != null) {
LOG.info("Mapped Roots: " + myMappedRoots.size());
}
}
public void setDirectoryMappings(@NotNull List<? extends VcsDirectoryMapping> items) {
if (LOG.isDebugEnabled()) {
LOG.debug("setDirectoryMappings, size: " + items.size(), new Throwable());
}
updateVcsMappings(items);
}
@Nullable
public MappedRoot getMappedRootFor(@Nullable VirtualFile file) {
if (file == null || !file.isInLocalFileSystem()) return null;
if (myMappedRoots.isEmpty()) return null;
if (myVcsManager.isIgnored(file)) return null;
return myMappedRootsMapping.getRootFor(file);
}
@Nullable
public MappedRoot getMappedRootFor(@Nullable FilePath file) {
if (file == null || file.isNonLocal()) return null;
if (myMappedRoots.isEmpty()) return null;
if (myVcsManager.isIgnored(file)) return null;
return myMappedRootsMapping.getRootFor(file);
}
@NotNull
public List<VirtualFile> getMappingsAsFilesUnderVcs(@NotNull AbstractVcs vcs) {
return ContainerUtil.mapNotNull(myMappedRoots, root -> {
return vcs.equals(root.vcs) ? root.root : null;
});
}
@Override
public void dispose() {
LOG.debug("disposed");
MyVcsActivator activator;
synchronized (myUpdateLock) {
Disposer.dispose(myFilePointerDisposable);
myMappings = Collections.emptyList();
myMappedRoots = Collections.emptyList();
myFilePointerDisposable = Disposer.newDisposable();
activator = createVcsActivator();
}
activator.activate();
}
public List<VcsDirectoryMapping> getDirectoryMappings() {
return myMappings;
}
public List<VcsDirectoryMapping> getDirectoryMappings(String vcsName) {
return ContainerUtil.filter(myMappings, mapping -> Objects.equals(mapping.getVcs(), vcsName));
}
@Nullable
public String haveDefaultMapping() {
VcsDirectoryMapping defaultMapping = ContainerUtil.find(myMappings, mapping -> mapping.isDefaultMapping());
return defaultMapping != null ? defaultMapping.getVcs() : null;
}
public boolean isEmpty() {
return ContainerUtil.all(myMappings, mapping -> mapping.isNoneMapping());
}
public void removeDirectoryMapping(@NotNull VcsDirectoryMapping mapping) {
if (LOG.isDebugEnabled()) {
LOG.debug("remove mapping: " + mapping.getDirectory(), new Throwable());
}
List<VcsDirectoryMapping> newMappings = new ArrayList<>(myMappings);
newMappings.remove(mapping);
updateVcsMappings(newMappings);
}
public void cleanupMappings() {
LocalFileSystem lfs = LocalFileSystem.getInstance();
List<VcsDirectoryMapping> oldMappings = new ArrayList<>(getDirectoryMappings());
List<VcsDirectoryMapping> filteredMappings = new ArrayList<>();
VcsDirectoryMapping defaultMapping = ContainerUtil.find(oldMappings, it -> it.isDefaultMapping());
if (defaultMapping != null) {
oldMappings.remove(defaultMapping);
filteredMappings.add(defaultMapping);
}
MultiMap<String, VcsDirectoryMapping> groupedMappings = new MultiMap<>();
for (VcsDirectoryMapping mapping : oldMappings) {
groupedMappings.putValue(mapping.getVcs(), mapping);
}
for (Map.Entry<String, Collection<VcsDirectoryMapping>> entry : groupedMappings.entrySet()) {
String vcsName = entry.getKey();
Collection<VcsDirectoryMapping> mappings = entry.getValue();
List<Pair<VirtualFile, VcsDirectoryMapping>> objects = ContainerUtil.mapNotNull(mappings, dm -> {
VirtualFile vf = lfs.refreshAndFindFileByPath(dm.getDirectory());
return vf == null ? null : Pair.create(vf, dm);
});
if (StringUtil.isEmptyOrSpaces(vcsName)) {
filteredMappings.addAll(ContainerUtil.map(objects, Functions.pairSecond()));
}
else {
AbstractVcs vcs = myVcsManager.findVcsByName(vcsName);
if (vcs == null) {
VcsBalloonProblemNotifier.showOverChangesView(myProject,
VcsBundle.message("impl.notification.content.vcs.plugin.not.found.for.mapping.to", vcsName),
MessageType.ERROR);
filteredMappings.addAll(mappings);
}
else {
filteredMappings.addAll(ContainerUtil.map(vcs.filterUniqueRoots(objects, pair -> pair.getFirst()), Functions.pairSecond()));
}
}
}
updateVcsMappings(filteredMappings);
}
@NotNull
private MyVcsActivator createVcsActivator() {
Set<AbstractVcs> newVcses = !TrustedProjects.isTrusted(myProject)
? Collections.emptySet()
: ContainerUtil.map2SetNotNull(myMappings, mapping -> getMappingsVcs(mapping));
List<AbstractVcs> oldVcses = myActiveVcses;
myActiveVcses = Collections.unmodifiableList(new ArrayList<>(newVcses));
refreshMainMenu();
Collection<AbstractVcs> toAdd = ContainerUtil.subtract(myActiveVcses, oldVcses);
Collection<AbstractVcs> toRemove = ContainerUtil.subtract(oldVcses, myActiveVcses);
return new MyVcsActivator(toAdd, toRemove);
}
private static final class MyVcsActivator {
@NotNull private final Collection<? extends AbstractVcs> myAddVcses;
@NotNull private final Collection<? extends AbstractVcs> myRemoveVcses;
private MyVcsActivator(@NotNull Collection<? extends AbstractVcs> addVcses,
@NotNull Collection<? extends AbstractVcs> removeVcses) {
myAddVcses = addVcses;
myRemoveVcses = removeVcses;
}
public void activate() {
for (AbstractVcs vcs : myAddVcses) {
try {
vcs.doActivate();
}
catch (VcsException e) {
LOG.error(e);
}
}
for (AbstractVcs vcs : myRemoveVcses) {
try {
vcs.doDeactivate();
}
catch (VcsException e) {
LOG.error(e);
}
}
}
}
public boolean haveActiveVcs(final String name) {
return ContainerUtil.exists(myActiveVcses, vcs -> Objects.equals(vcs.getName(), name));
}
public void beingUnregistered(final String name) {
if (LOG.isDebugEnabled()) {
LOG.debug("beingUnregistered " + name, new Throwable());
}
List<VcsDirectoryMapping> newMappings = new ArrayList<>(myMappings);
newMappings.removeIf(mapping -> Objects.equals(mapping.getVcs(), name));
updateVcsMappings(newMappings);
}
public static final class MappedRoot {
@Nullable public final AbstractVcs vcs;
@NotNull public final VcsDirectoryMapping mapping;
@NotNull public final VirtualFile root;
private MappedRoot(@Nullable AbstractVcs vcs, @NotNull VcsDirectoryMapping mapping, @NotNull VirtualFile root) {
this.vcs = vcs;
this.mapping = mapping;
this.root = root;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MappedRoot other = (MappedRoot)o;
return Objects.equals(vcs, other.vcs) &&
mapping.equals(other.mapping) &&
root.equals(other.root);
}
@Override
public int hashCode() {
return Objects.hash(vcs, mapping, root);
}
}
private static final class Mappings {
@NotNull public final List<MappedRoot> mappedRoots;
@NotNull public final Disposable filePointerDisposable;
private Mappings(@NotNull List<MappedRoot> mappedRoots, @NotNull Disposable filePointerDisposable) {
this.mappedRoots = mappedRoots;
this.filePointerDisposable = filePointerDisposable;
}
}
private static final class RootMapping {
private final Map<VirtualFile, MappedRoot> myVFMap = new HashMap<>();
private final FilePathMapping<MappedRoot> myPathMapping = new FilePathMapping<>(SystemInfo.isFileSystemCaseSensitive);
private RootMapping(@NotNull List<MappedRoot> mappedRoots) {
for (MappedRoot root : mappedRoots) {
myVFMap.put(root.root, root);
myPathMapping.add(root.root.getPath(), root);
}
}
@Nullable
public MappedRoot getRootFor(@NotNull VirtualFile file) {
while (file != null) {
MappedRoot root = myVFMap.get(file);
if (root != null) return root;
file = file.getParent();
}
return null;
}
@Nullable
public MappedRoot getRootFor(@NotNull FilePath filePath) {
return myPathMapping.getMappingFor(filePath.getPath());
}
}
}
| |
package org.embulk.parser.csv_guessable;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableSet;
import com.opencsv.CSVReader; // TODO: use embulk's parser
import org.embulk.config.Config;
import org.embulk.config.ConfigDefault;
import org.embulk.config.ConfigException;
import org.embulk.config.ConfigSource;
import org.embulk.config.Task;
import org.embulk.config.TaskSource;
import org.embulk.spi.Column;
import org.embulk.spi.ColumnConfig;
import org.embulk.spi.ColumnVisitor;
import org.embulk.spi.DataException;
import org.embulk.spi.Exec;
import org.embulk.spi.FileInput;
import org.embulk.spi.PageBuilder;
import org.embulk.spi.PageOutput;
import org.embulk.spi.ParserPlugin;
import org.embulk.spi.Schema;
import org.embulk.spi.SchemaConfig;
import org.embulk.spi.json.JsonParseException;
import org.embulk.spi.json.JsonParser;
import org.embulk.spi.time.TimestampParseException;
import org.embulk.spi.time.TimestampParser;
import org.embulk.spi.type.Types;
import org.embulk.spi.unit.LocalFile;
import org.embulk.spi.util.LineDecoder;
import org.embulk.spi.util.Timestamps;
import org.embulk.standards.CsvParserPlugin;
import org.slf4j.Logger;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.StringReader;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
public class CsvGuessableParserPlugin
extends CsvParserPlugin
{
private static final ImmutableSet<String> TRUE_STRINGS =
ImmutableSet.of(
"true", "True", "TRUE",
"yes", "Yes", "YES",
"t", "T", "y", "Y",
"on", "On", "ON",
"1");
private final Logger log;
public CsvGuessableParserPlugin()
{
log = Exec.getLogger(CsvGuessableParserPlugin.class);
}
public interface PluginTask
extends Task, LineDecoder.DecoderTask, TimestampParser.Task
{
@Config("columns")
@ConfigDefault("null")
Optional<SchemaConfig> getSchemaConfig();
@Config("header_line")
@ConfigDefault("null")
Optional<Boolean> getHeaderLine();
@Config("skip_header_lines")
@ConfigDefault("0")
int getSkipHeaderLines();
void setSkipHeaderLines(int n);
@Config("delimiter")
@ConfigDefault("\",\"")
String getDelimiter();
@Config("quote")
@ConfigDefault("\"\\\"\"")
Optional<QuoteCharacter> getQuoteChar();
@Config("escape")
@ConfigDefault("\"\\\\\"")
Optional<EscapeCharacter> getEscapeChar();
// Null value handling: if the CsvParser found 'non-quoted empty string's,
// it replaces them to string that users specified like "\N", "NULL".
@Config("null_string")
@ConfigDefault("null")
Optional<String> getNullString();
@Config("trim_if_not_quoted")
@ConfigDefault("false")
boolean getTrimIfNotQuoted();
@Config("max_quoted_size_limit")
@ConfigDefault("131072") //128kB
long getMaxQuotedSizeLimit();
@Config("comment_line_marker")
@ConfigDefault("null")
Optional<String> getCommentLineMarker();
@Config("allow_optional_columns")
@ConfigDefault("false")
boolean getAllowOptionalColumns();
@Config("allow_extra_columns")
@ConfigDefault("false")
boolean getAllowExtraColumns();
@Config("stop_on_invalid_record")
@ConfigDefault("false")
boolean getStopOnInvalidRecord();
@Config("schema_file")
@ConfigDefault("null")
public Optional<LocalFile> getSchemaFile();
@Config("schema_line")
@ConfigDefault("1")
public int getSchemaLine();
}
@Override
public void transaction(ConfigSource config, ParserPlugin.Control control)
{
PluginTask task = config.loadConfig(PluginTask.class);
SchemaConfig schemaConfig = null;
if (task.getSchemaFile().isPresent()) {
int schemaLine = task.getSchemaLine();
if (schemaLine > task.getSkipHeaderLines()) {
task.setSkipHeaderLines(schemaLine);
}
String header = readHeader(task.getSchemaFile().get().getPath(), schemaLine, task.getCharset());
log.debug(header);
String delimiter = task.getDelimiter();
ArrayList<ColumnConfig> schema = newColumns(header, config, delimiter);
/* alias and set type */
if (task.getSchemaConfig().isPresent()) {
List<ColumnConfig> columns = task.getSchemaConfig().get().getColumns();
for (ColumnConfig column : columns) {
String name = column.getName();
try {
name = column.getConfigSource().get(String.class, "value_name");
}
catch (ConfigException e) {
/* only setType */
}
for (int i = 0; i < schema.size(); ++i) {
ColumnConfig c = schema.get(i);
if (c.getName().equals(name)) {
schema.set(i, new ColumnConfig(column.getName(), column.getType(), column.getOption()));
}
}
}
}
log.debug(schema.toString());
schemaConfig = new SchemaConfig(schema);
}
else if (task.getSchemaConfig().isPresent()) { /* original CsvParserPlugin */
// backward compatibility
if (task.getHeaderLine().isPresent()) {
if (task.getSkipHeaderLines() > 0) {
throw new ConfigException("'header_line' option is invalid if 'skip_header_lines' is set.");
}
if (task.getHeaderLine().get()) {
task.setSkipHeaderLines(1);
}
else {
task.setSkipHeaderLines(0);
}
}
schemaConfig = task.getSchemaConfig().get();
}
else {
throw new ConfigException("Field 'columns' or 'schema_file' is required but not set");
}
control.run(task.dump(), schemaConfig.toSchema());
}
@Override
public void run(TaskSource taskSource, final Schema schema,
FileInput input, PageOutput output)
{
PluginTask task = taskSource.loadTask(PluginTask.class);
TimestampParser[] timestampParsers = null;
if (task.getSchemaConfig().isPresent()) {
timestampParsers = Timestamps.newTimestampColumnParsers(task, task.getSchemaConfig().get());
}
final JsonParser jsonParser = new JsonParser();
final CsvTokenizer tokenizer = new CsvTokenizer(new LineDecoder(input, task), task);
final boolean allowOptionalColumns = task.getAllowOptionalColumns();
final boolean allowExtraColumns = task.getAllowExtraColumns();
final boolean stopOnInvalidRecord = task.getStopOnInvalidRecord();
int skipHeaderLines = task.getSkipHeaderLines();
try (final PageBuilder pageBuilder = new PageBuilder(Exec.getBufferAllocator(), schema, output)) {
while (tokenizer.nextFile()) {
// skip the header lines for each file
for (; skipHeaderLines > 0; skipHeaderLines--) {
if (!tokenizer.skipHeaderLine()) {
break;
}
}
if (!tokenizer.nextRecord()) {
// empty file
continue;
}
while (true) {
boolean hasNextRecord;
try {
schema.visitColumns(new ColumnVisitor() {
@Override
public void booleanColumn(Column column)
{
String v = nextColumn();
if (v == null) {
pageBuilder.setNull(column);
}
else {
pageBuilder.setBoolean(column, TRUE_STRINGS.contains(v));
}
}
@Override
public void longColumn(Column column)
{
String v = nextColumn();
if (v == null) {
pageBuilder.setNull(column);
}
else {
try {
pageBuilder.setLong(column, Long.parseLong(v));
}
catch (NumberFormatException e) {
// TODO support default value
throw new CsvRecordValidateException(e);
}
}
}
@Override
public void doubleColumn(Column column)
{
String v = nextColumn();
if (v == null) {
pageBuilder.setNull(column);
}
else {
try {
pageBuilder.setDouble(column, Double.parseDouble(v));
}
catch (NumberFormatException e) {
// TODO support default value
throw new CsvRecordValidateException(e);
}
}
}
@Override
public void stringColumn(Column column)
{
String v = nextColumn();
if (v == null) {
pageBuilder.setNull(column);
}
else {
pageBuilder.setString(column, v);
}
}
@Override
public void timestampColumn(Column column)
{
String v = nextColumn();
if (v == null) {
pageBuilder.setNull(column);
}
else {
try {
// pageBuilder.setTimestamp(column, timestampParsers[column.getIndex()].parse(v));
}
catch (TimestampParseException e) {
// TODO support default value
throw new CsvRecordValidateException(e);
}
}
}
@Override
public void jsonColumn(Column column)
{
String v = nextColumn();
if (v == null) {
pageBuilder.setNull(column);
}
else {
try {
pageBuilder.setJson(column, jsonParser.parse(v));
}
catch (JsonParseException e) {
// TODO support default value
throw new CsvRecordValidateException(e);
}
}
}
private String nextColumn()
{
if (allowOptionalColumns && !tokenizer.hasNextColumn()) {
//TODO warning
return null;
}
return tokenizer.nextColumnOrNull();
}
});
try {
hasNextRecord = tokenizer.nextRecord();
}
catch (CsvTokenizer.TooManyColumnsException ex) {
if (allowExtraColumns) {
String tooManyColumnsLine = tokenizer.skipCurrentLine();
// TODO warning
hasNextRecord = tokenizer.nextRecord();
}
else {
// this line will be skipped at the following catch section
throw ex;
}
}
pageBuilder.addRecord();
}
catch (CsvTokenizer.InvalidFormatException | CsvTokenizer.InvalidValueException | CsvRecordValidateException e) {
String skippedLine = tokenizer.skipCurrentLine();
long lineNumber = tokenizer.getCurrentLineNumber();
if (stopOnInvalidRecord) {
throw new DataException(String.format("Invalid record at line %d: %s", lineNumber, skippedLine), e);
}
log.warn(String.format("Skipped line %d (%s): %s", lineNumber, e.getMessage(), skippedLine));
//exec.notice().skippedLine(skippedLine);
hasNextRecord = tokenizer.nextRecord();
}
if (!hasNextRecord) {
break;
}
}
}
pageBuilder.finish();
}
}
static class CsvRecordValidateException
extends DataException
{
CsvRecordValidateException(Throwable cause)
{
super(cause);
}
}
private String readHeader(Path path, int schemaLine, Charset charset)
{
if (schemaLine <= 0) {
throw new ConfigException("'schemaLine' must be set '> 0'");
}
String line = null;
try (BufferedReader br = Files.newBufferedReader(path, charset)) {
for (int i = 1; i <= schemaLine; ++i) {
line = br.readLine();
if (line == null) {
throw new ConfigException("not found 'schema_line' in 'schema_file'");
}
}
} catch (IOException e) {
throw new ConfigException(e);
}
return line;
}
private ArrayList<ColumnConfig> newColumns(String header, ConfigSource config, String delimiter)
{
ArrayList<ColumnConfig> columns = new ArrayList<ColumnConfig>();
PluginTask task = config.loadConfig(PluginTask.class);
try (CSVReader reader = new CSVReader(new StringReader(header), delimiter.charAt(0))) {
String[] csv = reader.readNext();
for (String column : csv) {
columns.add(new ColumnConfig(column, Types.STRING, config));
}
} catch (IOException e) {
throw new ConfigException(e);
}
return columns;
}
}
| |
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.core.view.accessibility;
import android.annotation.SuppressLint;
import android.os.Build;
import android.os.Parcelable;
import android.view.View;
import android.view.accessibility.AccessibilityEvent;
import android.view.accessibility.AccessibilityNodeInfo;
import android.view.accessibility.AccessibilityRecord;
import androidx.annotation.DoNotInline;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import java.util.List;
/**
* Helper for accessing {@link AccessibilityRecord}.
*/
public class AccessibilityRecordCompat {
private final AccessibilityRecord mRecord;
/**
* @deprecated This is not type safe. If you want to modify an
* {@link AccessibilityEvent}'s properties defined in
* {@link AccessibilityRecord} use
* {@link AccessibilityEventCompat#asRecord(AccessibilityEvent)}. This method will be removed
* in a subsequent release of the support library.
*/
@Deprecated
public AccessibilityRecordCompat(Object record) {
mRecord = (AccessibilityRecord) record;
}
/**
* @return The wrapped implementation.
*
* @deprecated This method will be removed in a subsequent release of
* the support library.
*/
@Deprecated
public Object getImpl() {
return mRecord;
}
/**
* Returns a cached instance if such is available or a new one is
* instantiated. The instance is initialized with data from the
* given record.
*
* @return An instance.
*
* @deprecated Use {@link AccessibilityRecord#obtain(AccessibilityRecord)} directly.
*/
@SuppressWarnings("deprecation")
@Deprecated
public static AccessibilityRecordCompat obtain(AccessibilityRecordCompat record) {
return new AccessibilityRecordCompat(AccessibilityRecord.obtain(record.mRecord));
}
/**
* Returns a cached instance if such is available or a new one is
* instantiated.
*
* @return An instance.
*
* @deprecated Use {@link AccessibilityRecord#obtain()} directly.
*/
@SuppressWarnings("deprecation")
@Deprecated
public static AccessibilityRecordCompat obtain() {
return new AccessibilityRecordCompat(AccessibilityRecord.obtain());
}
/**
* Sets the event source.
*
* @param source The source.
*
* @throws IllegalStateException If called from an AccessibilityService.
*
* @deprecated Use {@link AccessibilityRecord#setSource(View)} directly.
*/
@SuppressLint("KotlinPropertyAccess")
@Deprecated
public void setSource(View source) {
mRecord.setSource(source);
}
/**
* Sets the source to be a virtual descendant of the given <code>root</code>.
* If <code>virtualDescendantId</code> equals to {@link View#NO_ID} the root
* is set as the source.
* <p>
* A virtual descendant is an imaginary View that is reported as a part of the view
* hierarchy for accessibility purposes. This enables custom views that draw complex
* content to report them selves as a tree of virtual views, thus conveying their
* logical structure.
* </p>
*
* @param root The root of the virtual subtree.
* @param virtualDescendantId The id of the virtual descendant.
*
* @deprecated Use {@link #setSource(AccessibilityRecord, View, int)} instead.
*/
@Deprecated
public void setSource(View root, int virtualDescendantId) {
AccessibilityRecordCompat.setSource(mRecord, root, virtualDescendantId);
}
/**
* Sets the source to be a virtual descendant of the given <code>root</code>.
* If <code>virtualDescendantId</code> equals to {@link View#NO_ID} the root
* is set as the source.
* <p>
* A virtual descendant is an imaginary View that is reported as a part of the view
* hierarchy for accessibility purposes. This enables custom views that draw complex
* content to report them selves as a tree of virtual views, thus conveying their
* logical structure.
* </p>
*
* @param record The {@link AccessibilityRecord} instance to use.
* @param root The root of the virtual subtree.
* @param virtualDescendantId The id of the virtual descendant.
*/
public static void setSource(@NonNull AccessibilityRecord record, @Nullable View root,
int virtualDescendantId) {
if (Build.VERSION.SDK_INT >= 16) {
Api16Impl.setSource(record, root, virtualDescendantId);
}
}
/**
* Gets the {@link AccessibilityNodeInfo} of
* the event source.
* <p>
* <strong>Note:</strong> It is a client responsibility to recycle the
* received info by calling
* {@link AccessibilityNodeInfo#recycle()
* AccessibilityNodeInfo#recycle()} to avoid creating of multiple instances.
*</p>
*
* @return The info of the source.
*
* @deprecated Use {@link AccessibilityRecord#getSource()} directly.
*/
@SuppressLint("KotlinPropertyAccess")
@Deprecated
public AccessibilityNodeInfoCompat getSource() {
return AccessibilityNodeInfoCompat.wrapNonNullInstance(mRecord.getSource());
}
/**
* Gets the id of the window from which the event comes from.
*
* @return The window id.
*
* @deprecated Use {@link AccessibilityRecord#getWindowId()} directly.
*/
@Deprecated
public int getWindowId() {
return mRecord.getWindowId();
}
/**
* Gets if the source is checked.
*
* @return True if the view is checked, false otherwise.
*
* @deprecated Use {@link AccessibilityRecord#isChecked()} directly.
*/
@Deprecated
public boolean isChecked() {
return mRecord.isChecked();
}
/**
* Sets if the source is checked.
*
* @param isChecked True if the view is checked, false otherwise.
*
* @throws IllegalStateException If called from an AccessibilityService.
*
* @deprecated Use {@link AccessibilityRecord#setChecked(boolean)} directly.
*/
@Deprecated
public void setChecked(boolean isChecked) {
mRecord.setChecked(isChecked);
}
/**
* Gets if the source is enabled.
*
* @return True if the view is enabled, false otherwise.
*
* @deprecated Use {@link AccessibilityRecord#isEnabled()} directly.
*/
@Deprecated
public boolean isEnabled() {
return mRecord.isEnabled();
}
/**
* Sets if the source is enabled.
*
* @param isEnabled True if the view is enabled, false otherwise.
*
* @throws IllegalStateException If called from an AccessibilityService.
*
* @deprecated Use {@link AccessibilityRecord#isEnabled()} directly.
*/
@Deprecated
public void setEnabled(boolean isEnabled) {
mRecord.setEnabled(isEnabled);
}
/**
* Gets if the source is a password field.
*
* @return True if the view is a password field, false otherwise.
*
* @deprecated Use {@link AccessibilityRecord#isPassword()} directly.
*/
@Deprecated
public boolean isPassword() {
return mRecord.isPassword();
}
/**
* Sets if the source is a password field.
*
* @param isPassword True if the view is a password field, false otherwise.
*
* @throws IllegalStateException If called from an AccessibilityService.
*
* @deprecated Use {@link AccessibilityRecord#setPassword(boolean)} directly.
*/
@Deprecated
public void setPassword(boolean isPassword) {
mRecord.setPassword(isPassword);
}
/**
* Gets if the source is taking the entire screen.
*
* @return True if the source is full screen, false otherwise.
*
* @deprecated Use {@link AccessibilityRecord#isFullScreen()} directly.
*/
@Deprecated
public boolean isFullScreen() {
return mRecord.isFullScreen();
}
/**
* Sets if the source is taking the entire screen.
*
* @param isFullScreen True if the source is full screen, false otherwise.
*
* @throws IllegalStateException If called from an AccessibilityService.
*
* @deprecated Use {@link AccessibilityRecord#setFullScreen(boolean)} directly.
*/
@Deprecated
public void setFullScreen(boolean isFullScreen) {
mRecord.setFullScreen(isFullScreen);
}
/**
* Gets if the source is scrollable.
*
* @return True if the source is scrollable, false otherwise.
*
* @deprecated Use {@link AccessibilityRecord#isScrollable()} directly.
*/
@Deprecated
public boolean isScrollable() {
return mRecord.isScrollable();
}
/**
* Sets if the source is scrollable.
*
* @param scrollable True if the source is scrollable, false otherwise.
*
* @throws IllegalStateException If called from an AccessibilityService.
*
* @deprecated Use {@link AccessibilityRecord#setScrollable(boolean)} directly.
*/
@Deprecated
public void setScrollable(boolean scrollable) {
mRecord.setScrollable(scrollable);
}
/**
* Gets the number of items that can be visited.
*
* @return The number of items.
*
* @deprecated Use {@link AccessibilityRecord#getItemCount()} directly.
*/
@Deprecated
public int getItemCount() {
return mRecord.getItemCount();
}
/**
* Sets the number of items that can be visited.
*
* @param itemCount The number of items.
*
* @throws IllegalStateException If called from an AccessibilityService.
*
* @deprecated Use {@link AccessibilityRecord#setItemCount(int)} directly.
*/
@Deprecated
public void setItemCount(int itemCount) {
mRecord.setItemCount(itemCount);
}
/**
* Gets the index of the source in the list of items the can be visited.
*
* @return The current item index.
*
* @deprecated Use {@link AccessibilityRecord#getCurrentItemIndex()} directly.
*/
@Deprecated
public int getCurrentItemIndex() {
return mRecord.getCurrentItemIndex();
}
/**
* Sets the index of the source in the list of items that can be visited.
*
* @param currentItemIndex The current item index.
*
* @throws IllegalStateException If called from an AccessibilityService.
*
* @deprecated Use {@link AccessibilityRecord#setCurrentItemIndex(int)} directly.
*/
@Deprecated
public void setCurrentItemIndex(int currentItemIndex) {
mRecord.setCurrentItemIndex(currentItemIndex);
}
/**
* Gets the index of the first character of the changed sequence,
* or the beginning of a text selection or the index of the first
* visible item when scrolling.
*
* @return The index of the first character or selection
* start or the first visible item.
*
* @deprecated Use {@link AccessibilityRecord#getFromIndex()} directly.
*/
@Deprecated
public int getFromIndex() {
return mRecord.getFromIndex();
}
/**
* Sets the index of the first character of the changed sequence
* or the beginning of a text selection or the index of the first
* visible item when scrolling.
*
* @param fromIndex The index of the first character or selection
* start or the first visible item.
*
* @throws IllegalStateException If called from an AccessibilityService.
*
* @deprecated Use {@link AccessibilityRecord#setFromIndex(int)} directly.
*/
@Deprecated
public void setFromIndex(int fromIndex) {
mRecord.setFromIndex(fromIndex);
}
/**
* Gets the index of text selection end or the index of the last
* visible item when scrolling.
*
* @return The index of selection end or last item index.
*
* @deprecated Use {@link AccessibilityRecord#getToIndex()} directly.
*/
@Deprecated
public int getToIndex() {
return mRecord.getToIndex();
}
/**
* Sets the index of text selection end or the index of the last
* visible item when scrolling.
*
* @param toIndex The index of selection end or last item index.
*
* @deprecated Use {@link AccessibilityRecord#setToIndex(int)} directly.
*/
@Deprecated
public void setToIndex(int toIndex) {
mRecord.setToIndex(toIndex);
}
/**
* Gets the scroll offset of the source left edge in pixels.
*
* @return The scroll.
*
* @deprecated Use {@link AccessibilityRecord#getScrollX()} directly.
*/
@Deprecated
public int getScrollX() {
return mRecord.getScrollX();
}
/**
* Sets the scroll offset of the source left edge in pixels.
*
* @param scrollX The scroll.
*
* @deprecated Use {@link AccessibilityRecord#setScrollX(int)} directly.
*/
@Deprecated
public void setScrollX(int scrollX) {
mRecord.setScrollX(scrollX);
}
/**
* Gets the scroll offset of the source top edge in pixels.
*
* @return The scroll.
*
* @deprecated Use {@link AccessibilityRecord#getScrollY()} directly.
*/
@Deprecated
public int getScrollY() {
return mRecord.getScrollY();
}
/**
* Sets the scroll offset of the source top edge in pixels.
*
* @param scrollY The scroll.
*
* @deprecated Use {@link AccessibilityRecord#setScrollY(int)} directly.
*/
@Deprecated
public void setScrollY(int scrollY) {
mRecord.setScrollY(scrollY);
}
/**
* Gets the max scroll offset of the source left edge in pixels.
*
* @return The max scroll.
*
* @deprecated Use {@link #getMaxScrollX(AccessibilityRecord)} instead.
*/
@Deprecated
public int getMaxScrollX() {
return AccessibilityRecordCompat.getMaxScrollX(mRecord);
}
/**
* Gets the max scroll offset of the source left edge in pixels.
*
* @param record The {@link AccessibilityRecord} instance to use.
* @return The max scroll.
*/
public static int getMaxScrollX(@NonNull AccessibilityRecord record) {
if (Build.VERSION.SDK_INT >= 15) {
return Api15Impl.getMaxScrollX(record);
} else {
return 0;
}
}
/**
* Sets the max scroll offset of the source left edge in pixels.
*
* @param maxScrollX The max scroll.
*
* @deprecated Use {@link #setMaxScrollX(AccessibilityRecord, int)} instead.
*/
@Deprecated
public void setMaxScrollX(int maxScrollX) {
AccessibilityRecordCompat.setMaxScrollX(mRecord, maxScrollX);
}
/**
* Sets the max scroll offset of the source left edge in pixels.
*
* @param record The {@link AccessibilityRecord} instance to use.
* @param maxScrollX The max scroll.
*/
public static void setMaxScrollX(@NonNull AccessibilityRecord record, int maxScrollX) {
if (Build.VERSION.SDK_INT >= 15) {
Api15Impl.setMaxScrollX(record, maxScrollX);
}
}
/**
* Gets the max scroll offset of the source top edge in pixels.
*
* @return The max scroll.
*
* @deprecated Use {@link #getMaxScrollY(AccessibilityRecord)} instead.
*/
@Deprecated
public int getMaxScrollY() {
return AccessibilityRecordCompat.getMaxScrollY(mRecord);
}
/**
* Gets the max scroll offset of the source top edge in pixels.
*
* @param record The {@link AccessibilityRecord} instance to use.
* @return The max scroll.
*/
public static int getMaxScrollY(@NonNull AccessibilityRecord record) {
if (Build.VERSION.SDK_INT >= 15) {
return Api15Impl.getMaxScrollY(record);
} else {
return 0;
}
}
/**
* Sets the max scroll offset of the source top edge in pixels.
*
* @param maxScrollY The max scroll.
*
* @deprecated Use {@link #setMaxScrollY(AccessibilityRecord, int)} instead.
*/
@Deprecated
public void setMaxScrollY(int maxScrollY) {
AccessibilityRecordCompat.setMaxScrollY(mRecord, maxScrollY);
}
/**
* Sets the max scroll offset of the source top edge in pixels.
*
* @param record The {@link AccessibilityRecord} instance to use.
* @param maxScrollY The max scroll.
*/
public static void setMaxScrollY(@NonNull AccessibilityRecord record, int maxScrollY) {
if (Build.VERSION.SDK_INT >= 15) {
Api15Impl.setMaxScrollY(record, maxScrollY);
}
}
/**
* Gets the number of added characters.
*
* @return The number of added characters.
*
* @deprecated Use {@link AccessibilityRecord#getAddedCount()} directly.
*/
@Deprecated
public int getAddedCount() {
return mRecord.getAddedCount();
}
/**
* Sets the number of added characters.
*
* @param addedCount The number of added characters.
*
* @throws IllegalStateException If called from an AccessibilityService.
*
* @deprecated Use {@link AccessibilityRecord#setAddedCount(int)} directly.
*/
@Deprecated
public void setAddedCount(int addedCount) {
mRecord.setAddedCount(addedCount);
}
/**
* Gets the number of removed characters.
*
* @return The number of removed characters.
*
* @deprecated Use {@link AccessibilityRecord#getRemovedCount()} directly.
*/
@Deprecated
public int getRemovedCount() {
return mRecord.getRemovedCount();
}
/**
* Sets the number of removed characters.
*
* @param removedCount The number of removed characters.
*
* @throws IllegalStateException If called from an AccessibilityService.
*
* @deprecated Use {@link AccessibilityRecord#setRemovedCount(int)} directly.
*/
@Deprecated
public void setRemovedCount(int removedCount) {
mRecord.setRemovedCount(removedCount);
}
/**
* Gets the class name of the source.
*
* @return The class name.
*
* @deprecated Use {@link AccessibilityRecord#getClassName()} directly.
*/
@Deprecated
public CharSequence getClassName() {
return mRecord.getClassName();
}
/**
* Sets the class name of the source.
*
* @param className The lass name.
*
* @throws IllegalStateException If called from an AccessibilityService.
*
* @deprecated Use {@link AccessibilityRecord#setClassName(CharSequence)} directly.
*/
@Deprecated
public void setClassName(CharSequence className) {
mRecord.setClassName(className);
}
/**
* Gets the text of the event. The index in the list represents the priority
* of the text. Specifically, the lower the index the higher the priority.
*
* @return The text.
*
* @deprecated Use {@link AccessibilityRecord#getText()} directly.
*/
@Deprecated
public List<CharSequence> getText() {
return mRecord.getText();
}
/**
* Sets the text before a change.
*
* @return The text before the change.
*
* @deprecated Use {@link AccessibilityRecord#getBeforeText()} directly.
*/
@Deprecated
public CharSequence getBeforeText() {
return mRecord.getBeforeText();
}
/**
* Sets the text before a change.
*
* @param beforeText The text before the change.
*
* @throws IllegalStateException If called from an AccessibilityService.
*
* @deprecated Use {@link AccessibilityRecord#setBeforeText(CharSequence)} directly.
*/
@Deprecated
public void setBeforeText(CharSequence beforeText) {
mRecord.setBeforeText(beforeText);
}
/**
* Gets the description of the source.
*
* @return The description.
*
* @deprecated Use {@link AccessibilityRecord#getContentDescription()} directly.
*/
@Deprecated
public CharSequence getContentDescription() {
return mRecord.getContentDescription();
}
/**
* Sets the description of the source.
*
* @param contentDescription The description.
*
* @throws IllegalStateException If called from an AccessibilityService.
*
* @deprecated Use {@link AccessibilityRecord#setContentDescription(CharSequence)} directly.
*/
@Deprecated
public void setContentDescription(CharSequence contentDescription) {
mRecord.setContentDescription(contentDescription);
}
/**
* Gets the {@link Parcelable} data.
*
* @return The parcelable data.
*
* @deprecated Use {@link AccessibilityRecord#getParcelableData()} directly.
*/
@Deprecated
public Parcelable getParcelableData() {
return mRecord.getParcelableData();
}
/**
* Sets the {@link Parcelable} data of the event.
*
* @param parcelableData The parcelable data.
*
* @throws IllegalStateException If called from an AccessibilityService.
*
* @deprecated Use {@link AccessibilityRecord#setParcelableData(Parcelable)} directly.
*/
@Deprecated
public void setParcelableData(Parcelable parcelableData) {
mRecord.setParcelableData(parcelableData);
}
/**
* Return an instance back to be reused.
* <p>
* <strong>Note:</strong> You must not touch the object after calling this
* function.
* </p>
*
* @throws IllegalStateException If the record is already recycled.
*
* @deprecated Use {@link AccessibilityRecord#recycle()} directly.
*/
@Deprecated
public void recycle() {
mRecord.recycle();
}
/**
* @deprecated Use {@link AccessibilityRecord#hashCode()} directly.
*/
@Deprecated
@Override
public int hashCode() {
return (mRecord == null) ? 0 : mRecord.hashCode();
}
/**
* @deprecated Use {@link AccessibilityRecord} directly.
*/
@Deprecated
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof AccessibilityRecordCompat)) {
return false;
}
AccessibilityRecordCompat other = (AccessibilityRecordCompat) obj;
if (mRecord == null) {
return other.mRecord == null;
}
return mRecord.equals(other.mRecord);
}
@RequiresApi(16)
static class Api16Impl {
private Api16Impl() {
// This class is not instantiable.
}
@DoNotInline
static void setSource(AccessibilityRecord accessibilityRecord, View root,
int virtualDescendantId) {
accessibilityRecord.setSource(root, virtualDescendantId);
}
}
@RequiresApi(15)
static class Api15Impl {
private Api15Impl() {
// This class is not instantiable.
}
@DoNotInline
static int getMaxScrollX(AccessibilityRecord accessibilityRecord) {
return accessibilityRecord.getMaxScrollX();
}
@DoNotInline
static void setMaxScrollX(AccessibilityRecord accessibilityRecord, int maxScrollX) {
accessibilityRecord.setMaxScrollX(maxScrollX);
}
@DoNotInline
static int getMaxScrollY(AccessibilityRecord accessibilityRecord) {
return accessibilityRecord.getMaxScrollY();
}
@DoNotInline
static void setMaxScrollY(AccessibilityRecord accessibilityRecord, int maxScrollY) {
accessibilityRecord.setMaxScrollY(maxScrollY);
}
}
}
| |
/*
* Copyright (c) 2012, United States Government, as represented by the Secretary of Health and Human Services.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the United States Government nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package gov.hhs.fha.nhinc.docsubmission.adapter.component;
import ihe.iti.xds_b._2007.ProvideAndRegisterDocumentSetRequestType;
import java.util.ArrayList;
import java.util.List;
import oasis.names.tc.ebxml_regrep.xsd.rim._3.ExtrinsicObjectType;
import oasis.names.tc.ebxml_regrep.xsd.rim._3.RegistryObjectListType;
import oasis.names.tc.ebxml_regrep.xsd.rim._3.RegistryPackageType;
import oasis.names.tc.ebxml_regrep.xsd.rim._3.SlotType1;
import oasis.names.tc.ebxml_regrep.xsd.rs._3.RegistryError;
import oasis.names.tc.ebxml_regrep.xsd.rs._3.RegistryErrorList;
import oasis.names.tc.ebxml_regrep.xsd.rs._3.RegistryResponseType;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import gov.hhs.fha.nhinc.docsubmission.adapter.component.routing.RoutingObjectFactory;
import gov.hhs.fha.nhinc.nhinclib.NhincConstants;
import gov.hhs.fha.nhinc.properties.PropertyAccessor;
/**
*
* @author dunnek
*/
public class XDRHelper {
private static Log log = null;
public static String XDR_EC_XDSMissingDocument = "XDSMissingDocument";
public static String XDR_EC_XDSMissingDocumentMetadata = "XDSMissingDocumentMetadata";
public static String XDR_EC_XDSNonIdenticalHash = "XDSNonIdenticalHash";
public static String XDR_EC_XDSRegistryDuplicateUniqueIdInMessage = "XDSRegistryDuplicateUniqueIdInMessage";
public static String XDR_EC_XDSRegistryBusy = "XDSRegistryBusy";
public static String XDR_EC_XDSRegistryMetadataError = "XDSRegistryMetadataError";
public static String XDR_EC_XDSUnknownPatientId = "XDSUnknownPatientId";
public static String XDR_EC_XDSPatientIdDoesNotMatch = "XDSPatientIdDoesNotMatch";
public static final String XDS_RETRIEVE_RESPONSE_STATUS_FAILURE = "urn:oasis:names:tc:ebxml-regrep:ResponseStatusType:Failure";
public static final String XDS_RETRIEVE_RESPONSE_STATUS_SUCCESS = "urn:oasis:names:tc:ebxml-regrep:ResponseStatusType:Success";
public static final String XDS_AVAILABLILTY_STATUS_APPROVED = "Active";
public static final String XDS_STATUS = "urn:oasis:names:tc:ebxml-regrep:StatusType:Approved";
public static final String XDS_STATUS_ONLINE = "Online";
public static final String XDS_STATUS_OFFLINE = "Offline";
public static final String XDS_NAME = "Name";
public static final String XDS_CLASSIFIED_OBJECT = "classifiedObject"; // this is the reference to the
// extrinsicObject/document element
public static final String XDS_NODE_REPRESENTATION = "nodeRepresentation"; // this the actual code in a
// classification element
public static final String XDS_CLASSIFICATION_ID = "id"; // this is the id of the classification element
public static final String XDS_DOCUMENT_UNIQUE_ID = "XDSDocumentEntry.uniqueId";
public static final String XDS_PATIENT_ID = "XDSDocumentEntry.patientId";
public static final String XDS_CREATION_TIME_SLOT = "creationTime";
public static final String XDS_START_TIME_SLOT = "serviceStartTime";
public static final String XDS_STOP_TIME_SLOT = "serviceStopTime";
public static final String XDS_SOURCE_PATIENT_ID_SLOT = "sourcePatientId";
public static final String XDS_SOURCE_PATIENT_INFO_SLOT = "sourcePatientInfo";
public static final String XDS_AUTHOR_PERSON_SLOT = "authorPerson";
public static final String XDS_AUTHOR_INSTITUTION_SLOT = "authorInstitution";
public static final String XDS_AUTHOR_ROLE_SLOT = "authorRole";
public static final String XDS_AUTHOR_SPECIALITY_SLOT = "authorSpecialty";
public static final String XDS_CODING_SCHEME_SLOT = "codingScheme";
public static final String XDS_INTENDED_RECIPIENT_SLOT = "intendedRecipient";
public static final String XDS_LANGUAGE_CODE_SLOT = "languageCode";
public static final String XDS_LEGAL_AUTHENTICATOR_SLOT = "legalAuthenticator";
public static final String XDS_SOURCE_PATIENT_INFO_PID3 = "PID-3";
public static final String XDS_SOURCE_PATIENT_INFO_PID5 = "PID-5";
public static final String XDS_SOURCE_PATIENT_INFO_PID7 = "PID-7";
public static final String XDS_SOURCE_PATIENT_INFO_PID8 = "PID-8";
public static final String XDS_SOURCE_PATIENT_INFO_PID11 = "PID-11";
public static final String XDS_AUTHOR_CLASSIFICATION = "urn:uuid:93606bcf-9494-43ec-9b4e-a7748d1a838d";
public static final String XDS_CLASSCODE_CLASSIFICATION = "urn:uuid:41a5887f-8865-4c09-adf7-e362475b143a";
public static final String XDS_CONTENT_TYPE_CODE_CLASSIFICATION = "urn:uuid:aa543740-bdda-424e-8c96-df4873be8500";
public static final String XDS_CONFIDENTIALITY_CODE_CLASSIFICATION = "urn:uuid:f4f85eac-e6cb-4883-b524-f2705394840f";
public static final String XDS_FORMAT_CODE_CLASSIFICATION = "urn:uuid:a09d5840-386c-46f2-b5ad-9c3699a4309d";
public static final String XDS_HEALTHCARE_FACILITY_TYPE_CODE_CLASSIFICATION = "urn:uuid:f33fb8ac-18af-42cc-ae0e-ed0b0bdb91e1";
public static final String XDS_PRACTICE_SETTING_CODE_CLASSIFICATION = "urn:uuid:cccf5598-8b07-4b77-a05e-ae952c785ead";
public static final String XDS_EVENT_CODE_LIST_CLASSIFICATION = "urn:uuid:2c6b8cb7-8b2a-4051-b291-b1ae6a575ef4";
public static final String XDS_CODE_LIST_CLASSIFICATION = "urn:uuid:1ba97051-7806-41a8-a48b-8fce7af683c5";
public static final String XDS_TYPE_CODE_CLASSIFICATION = "urn:uuid:f0306f51-975f-434e-a61c-c59651d33983";
public static final String XDS_ERROR_CODE_MISSING_REQUEST_MESSAGE_DATA = "MISSING_DATA";
public static final String XDS_ERROR_CODE_MISSING_DOCUMENT_METADATA = "MISSING_METADATA";
public static final String XDS_ERROR_CODE_REPOSITORY_ERROR = "REPOSITORY_ERROR";
public static final String XDS_MISSING_REQUEST_MESSAGE_DATA = "The ProvideAndRegisterDocumentSetRequest message did not contain any data to operate on. No documents will be stored.";
public static final String XDS_MISSING_DOCUMENT_METADATA = "A document exists in the submission with no corresponding document metadata. Document will not be stored.";
public static final String XDS_REPOSITORY_ERROR = "An error occurred while storing a document to the repository.";
public static final String XDS_ASSOCIATION_TYPE_REPLACE = "urn:oasis:names:tc:ebxml-regrep:AssociationType:RPLC";
public XDRHelper() {
log = createLogger();
}
public RegistryResponseType createErrorResponse(RegistryErrorList errorList) {
RegistryResponseType result = new RegistryResponseType();
log.debug("begin createErrorResponse()");
result.setStatus(XDS_RETRIEVE_RESPONSE_STATUS_FAILURE);
result.setRegistryErrorList(errorList);
return result;
}
public RegistryResponseType createPositiveAck() {
RegistryResponseType result = new RegistryResponseType();
result.setStatus(XDS_RETRIEVE_RESPONSE_STATUS_SUCCESS);
return result;
}
public RegistryErrorList validateDocumentMetaData(ProvideAndRegisterDocumentSetRequestType body) {
RegistryErrorList result = new RegistryErrorList();
log.debug("begin validateDocumentMetaData()");
if (body == null) {
RegistryError error = createRegistryError(XDR_EC_XDSMissingDocument, NhincConstants.XDS_REGISTRY_ERROR_SEVERITY_ERROR,
"ProvideAndRegisterDocumentSetRequestType was null");
result.getRegistryError().add(error);
// Request message was null, cannot continue. Return result.
return processErrorList(result);
}
if (body.getDocument() == null) {
RegistryError error = createRegistryError(XDR_EC_XDSMissingDocument, NhincConstants.XDS_REGISTRY_ERROR_SEVERITY_ERROR,
"ProvideAndRegisterDocumentSetRequestType did not contain a DocumentList");
result.getRegistryError().add(error);
} else if (body.getDocument().size() == 0) {
RegistryError error = createRegistryError(XDR_EC_XDSMissingDocument, NhincConstants.XDS_REGISTRY_ERROR_SEVERITY_ERROR,
"DocumentList did not contain any documents");
result.getRegistryError().add(error);
}
if (result.getRegistryError().size() > 0) {
return processErrorList(result);
}
RegistryObjectListType regList = body.getSubmitObjectsRequest().getRegistryObjectList();
ArrayList<String> metaDocIds = new ArrayList<String>();
ArrayList<String> metaPatIds = new ArrayList<String>();
for (int x = 0; x < regList.getIdentifiable().size(); x++) {
if (regList.getIdentifiable().get(x).getDeclaredType().equals(ExtrinsicObjectType.class)) {
ExtrinsicObjectType extObj = (ExtrinsicObjectType) regList.getIdentifiable().get(x).getValue();
String mimeType = extObj.getMimeType();
if (isSupportedMimeType(mimeType) == false) {
RegistryError error = createRegistryError(XDR_EC_XDSMissingDocumentMetadata,
NhincConstants.XDS_REGISTRY_ERROR_SEVERITY_ERROR, "Unsupported Mime Type: " + mimeType);
result.getRegistryError().add(error);
}
String docId = extObj.getId();
metaDocIds.add(docId);
if (isDocIdPresent(body.getDocument(), docId) == false) {
RegistryError error = createRegistryError(XDR_EC_XDSMissingDocument, NhincConstants.XDS_REGISTRY_ERROR_SEVERITY_ERROR,
"Document Id: " + docId + " exists in metadata with no corresponding attached document");
result.getRegistryError().add(error);
}
String localPatId = getPatientId(extObj.getSlot());
if (localPatId.isEmpty()) {
RegistryError error = createRegistryError(XDR_EC_XDSUnknownPatientId, NhincConstants.XDS_REGISTRY_ERROR_SEVERITY_ERROR,
"Patient ID referenced in metadata is not known to the Receiving NHIE");
result.getRegistryError().add(error);
}
metaPatIds.add(localPatId);
}
}
if (patientIdsMatch(metaPatIds) == false) {
RegistryError error = createRegistryError(XDR_EC_XDSPatientIdDoesNotMatch, NhincConstants.XDS_REGISTRY_ERROR_SEVERITY_ERROR,
"Patient Ids do not match");
result.getRegistryError().add(error);
}
return processErrorList(result);
}
public List<String> getIntendedRecepients(ProvideAndRegisterDocumentSetRequestType body) {
List<String> result = new ArrayList<String>();
log.debug("begin getIntendedRecepients()");
if (body == null || body.getSubmitObjectsRequest() == null) {
return null;
}
try {
RegistryObjectListType regList = body.getSubmitObjectsRequest().getRegistryObjectList();
for (int x = 0; x < regList.getIdentifiable().size(); x++) {
if (regList.getIdentifiable().get(x).getDeclaredType().equals(ExtrinsicObjectType.class)) {
ExtrinsicObjectType extObj = (ExtrinsicObjectType) regList.getIdentifiable().get(x).getValue();
SlotType1 recipSlot = getNamedSlotItem(extObj.getSlot(), XDS_INTENDED_RECIPIENT_SLOT);
if (recipSlot != null) {
result = recipSlot.getValueList().getValue();
}
}
}
} catch (Exception ex) {
log.error("Unable to pull intended recipients" + ex.getMessage());
}
log.debug("Found " + result.size() + " recipients");
return result;
}
public List<String> getRoutingBeans(List<String> intendedRecipients) {
ArrayList<String> result = new ArrayList<String>();
ConfigurationManager configMgr = new ConfigurationManager();
Config config = configMgr.loadConfiguration();
for (String recipient : intendedRecipients) {
// Loop through List of configured beans
for (RoutingConfig rc : config.getRoutingInfo()) {
if (rc.getRecepient().equalsIgnoreCase(recipient)) {
if (result.contains(rc.getBean()) == false) {
result.add(rc.getBean());
}
break;
}
}
}
if (result.isEmpty()) {
result.add(RoutingObjectFactory.BEAN_REFERENCE_IMPLEMENTATION);
}
log.debug("Found " + result.size() + " beans");
return result;
}
protected boolean checkIdsMatch() {
boolean checkIds = false;
try {
checkIds = PropertyAccessor.getInstance().getPropertyBoolean("adapter", "XDR.CheckPatientIdsMatch");
} catch (Exception ex) {
log.error("Unable to load XDR.CheckPatientIdsMatch");
}
return checkIds;
}
protected boolean isSupportedMimeType(String mimeType) {
String[] mimeArray = getSupportedMimeTypes();
boolean result = false;
for (int x = 0; x < mimeArray.length; x++) {
if (mimeArray[x].equalsIgnoreCase(mimeType)) {
result = true;
break;
}
}
return result;
}
protected String[] getSupportedMimeTypes() {
String[] mimeArray = new String[0];
try {
String list = PropertyAccessor.getInstance().getProperty("adapter", "XDR.SupportedMimeTypes");
mimeArray = list.split(";");
} catch (Exception ex) {
}
return mimeArray;
}
protected Log createLogger() {
return ((log != null) ? log : LogFactory.getLog(getClass()));
}
private boolean isDocIdPresent(List<ProvideAndRegisterDocumentSetRequestType.Document> documents, String docId) {
boolean result = false;
for (ProvideAndRegisterDocumentSetRequestType.Document doc : documents) {
if (doc.getId().equals(docId)) {
result = true;
}
}
return result;
}
private RegistryError createRegistryError(String errorCode, String severity, String codeContext) {
RegistryError result = new oasis.names.tc.ebxml_regrep.xsd.rs._3.ObjectFactory().createRegistryError();
result.setSeverity(severity);
result.setCodeContext(codeContext);
result.setErrorCode(errorCode);
return result;
}
public String getSubmissionSetPatientId(ProvideAndRegisterDocumentSetRequestType body) {
String result = "";
RegistryObjectListType object = body.getSubmitObjectsRequest().getRegistryObjectList();
for (int x = 0; x < object.getIdentifiable().size(); x++) {
System.out.println(object.getIdentifiable().get(x).getName());
if (object.getIdentifiable().get(x).getDeclaredType().equals(RegistryPackageType.class)) {
RegistryPackageType registryPackage = (RegistryPackageType) object.getIdentifiable().get(x).getValue();
System.out.println(registryPackage.getSlot().size());
for (int y = 0; y < registryPackage.getExternalIdentifier().size(); y++) {
String test = registryPackage.getExternalIdentifier().get(y).getName().getLocalizedString().get(0)
.getValue();
if (test.equals("XDSSubmissionSet.patientId")) {
result = registryPackage.getExternalIdentifier().get(y).getValue();
}
}
}
}
return result;
}
public String getSourcePatientId(ProvideAndRegisterDocumentSetRequestType body) {
String result = "";
RegistryObjectListType object = body.getSubmitObjectsRequest().getRegistryObjectList();
for (int x = 0; x < object.getIdentifiable().size(); x++) {
System.out.println(object.getIdentifiable().get(x).getName());
if (object.getIdentifiable().get(x).getDeclaredType().equals(ExtrinsicObjectType.class)) {
ExtrinsicObjectType extObj = (ExtrinsicObjectType) object.getIdentifiable().get(x).getValue();
System.out.println(extObj.getSlot().size());
SlotType1 slot = getNamedSlotItem(extObj.getSlot(), "sourcePatientId");
if (slot != null) {
if (slot.getValueList() != null) {
if (slot.getValueList().getValue().size() == 1) {
result = slot.getValueList().getValue().get(0);
}
}
}
}
}
return result;
}
private String getPatientId(List<SlotType1> slots) {
String result = "";
SlotType1 patientIdSlot;
patientIdSlot = getNamedSlotItem(slots, XDS_SOURCE_PATIENT_ID_SLOT);
if (patientIdSlot != null) {
if (patientIdSlot.getValueList().getValue().size() == 1) {
result = patientIdSlot.getValueList().getValue().get(0);
}
}
return result;
}
private SlotType1 getNamedSlotItem(List<SlotType1> slots, String name) {
SlotType1 result = null;
log.debug("begin getNamedSlotItem()");
for (SlotType1 slot : slots) {
if (slot.getName().equalsIgnoreCase(name)) {
result = slot;
log.info("Slot=" + result.getName());
break;
}
}
return result;
}
private boolean patientIdsMatch(List<String> patIds) {
boolean result = true;
if (checkIdsMatch()) {
if (patIds.size() > 1) {
// Get the first id
String patId = patIds.get(0);
// loop through all ids, make sure they all equal
for (String id : patIds) {
if (id.equalsIgnoreCase(patId) == false) {
result = false;
break;
}
}
}
}
return result;
}
private RegistryErrorList processErrorList(RegistryErrorList list) {
int highestError = 0;
if (list == null) {
return null;
}
for (RegistryError error : list.getRegistryError()) {
int currentError = getErrorRanking(error.getSeverity());
if (currentError > highestError) {
highestError = currentError;
}
}
list.setHighestSeverity(getErrorDescription(highestError));
return list;
}
private String getErrorDescription(int rank) {
String result = "";
switch (rank) {
case 0: {
result = "";
break;
}
case 1: {
result = NhincConstants.XDS_REGISTRY_ERROR_SEVERITY_WARNING;
break;
}
case 2:
case 3: {
result = NhincConstants.XDS_REGISTRY_ERROR_SEVERITY_ERROR;
break;
}
default: {
result = "";
break;
}
}
return result;
}
private int getErrorRanking(String severity) {
int result;
if (severity.equalsIgnoreCase("")) {
result = 0;
} else if (severity.equalsIgnoreCase(NhincConstants.XDS_REGISTRY_ERROR_SEVERITY_WARNING)) {
result = 1;
} else if (severity.equalsIgnoreCase(NhincConstants.XDS_REGISTRY_ERROR_SEVERITY_ERROR)) {
result = 2;
} else {
result = -1;
}
return result;
}
}
| |
/*
* Copyright (c) 2004, PostgreSQL Global Development Group
* See the LICENSE file in the project root for more information.
*/
package org.postgresql.jdbc;
import org.postgresql.Driver;
import org.postgresql.PGNotification;
import org.postgresql.PGProperty;
import org.postgresql.copy.CopyManager;
import org.postgresql.core.BaseConnection;
import org.postgresql.core.BaseStatement;
import org.postgresql.core.CachedQuery;
import org.postgresql.core.ConnectionFactory;
import org.postgresql.core.Encoding;
import org.postgresql.core.Oid;
import org.postgresql.core.Provider;
import org.postgresql.core.Query;
import org.postgresql.core.QueryExecutor;
import org.postgresql.core.ReplicationProtocol;
import org.postgresql.core.ResultHandlerBase;
import org.postgresql.core.ServerVersion;
import org.postgresql.core.SqlCommand;
import org.postgresql.core.TransactionState;
import org.postgresql.core.TypeInfo;
import org.postgresql.core.Utils;
import org.postgresql.core.Version;
import org.postgresql.fastpath.Fastpath;
import org.postgresql.largeobject.LargeObjectManager;
import org.postgresql.replication.PGReplicationConnection;
import org.postgresql.replication.PGReplicationConnectionImpl;
import org.postgresql.util.GT;
import org.postgresql.util.HostSpec;
import org.postgresql.util.LruCache;
import org.postgresql.util.PGBinaryObject;
import org.postgresql.util.PGobject;
import org.postgresql.util.PSQLException;
import org.postgresql.util.PSQLState;
import java.io.IOException;
import java.sql.Array;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.ClientInfoStatus;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.NClob;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLClientInfoException;
import java.sql.SQLException;
import java.sql.SQLPermission;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Savepoint;
import java.sql.Statement;
import java.sql.Struct;
import java.sql.Types;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.TimeZone;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.Executor;
import java.util.logging.Level;
import java.util.logging.Logger;
public class PgConnection implements BaseConnection {
private static final Logger LOGGER = Logger.getLogger(PgConnection.class.getName());
private static final SQLPermission SQL_PERMISSION_ABORT = new SQLPermission("callAbort");
private static final SQLPermission SQL_PERMISSION_NETWORK_TIMEOUT = new SQLPermission("setNetworkTimeout");
//
// Data initialized on construction:
//
private final Properties _clientInfo;
/* URL we were created via */
private final String creatingURL;
private Throwable openStackTrace;
/* Actual network handler */
private final QueryExecutor queryExecutor;
/* Query that runs COMMIT */
private final Query commitQuery;
/* Query that runs ROLLBACK */
private final Query rollbackQuery;
private final TypeInfo _typeCache;
private boolean disableColumnSanitiser = false;
// Default statement prepare threshold.
protected int prepareThreshold;
/**
* Default fetch size for statement
*
* @see PGProperty#DEFAULT_ROW_FETCH_SIZE
*/
protected int defaultFetchSize;
// Default forcebinary option.
protected boolean forcebinary = false;
private int rsHoldability = ResultSet.CLOSE_CURSORS_AT_COMMIT;
private int savepointId = 0;
// Connection's autocommit state.
private boolean autoCommit = true;
// Connection's readonly state.
private boolean readOnly = false;
// Bind String to UNSPECIFIED or VARCHAR?
private final boolean bindStringAsVarchar;
// Current warnings; there might be more on queryExecutor too.
private SQLWarning firstWarning = null;
// Timer for scheduling TimerTasks for this connection.
// Only instantiated if a task is actually scheduled.
private volatile Timer cancelTimer = null;
private PreparedStatement checkConnectionQuery;
/**
* Replication protocol in current version postgresql(10devel) supports a limited number of
* commands.
*/
private final boolean replicationConnection;
private final LruCache<FieldMetadata.Key, FieldMetadata> fieldMetadataCache;
final CachedQuery borrowQuery(String sql) throws SQLException {
return queryExecutor.borrowQuery(sql);
}
final CachedQuery borrowCallableQuery(String sql) throws SQLException {
return queryExecutor.borrowCallableQuery(sql);
}
private CachedQuery borrowReturningQuery(String sql, String[] columnNames) throws SQLException {
return queryExecutor.borrowReturningQuery(sql, columnNames);
}
@Override
public CachedQuery createQuery(String sql, boolean escapeProcessing, boolean isParameterized,
String... columnNames)
throws SQLException {
return queryExecutor.createQuery(sql, escapeProcessing, isParameterized, columnNames);
}
void releaseQuery(CachedQuery cachedQuery) {
queryExecutor.releaseQuery(cachedQuery);
}
@Override
public void setFlushCacheOnDeallocate(boolean flushCacheOnDeallocate) {
queryExecutor.setFlushCacheOnDeallocate(flushCacheOnDeallocate);
LOGGER.log(Level.FINE, " setFlushCacheOnDeallocate = {0}", flushCacheOnDeallocate);
}
//
// Ctor.
//
public PgConnection(HostSpec[] hostSpecs,
String user,
String database,
Properties info,
String url) throws SQLException {
// Print out the driver version number
LOGGER.log(Level.FINE, org.postgresql.util.DriverInfo.DRIVER_FULL_NAME);
this.creatingURL = url;
setDefaultFetchSize(PGProperty.DEFAULT_ROW_FETCH_SIZE.getInt(info));
setPrepareThreshold(PGProperty.PREPARE_THRESHOLD.getInt(info));
if (prepareThreshold == -1) {
setForceBinary(true);
}
// Now make the initial connection and set up local state
this.queryExecutor = ConnectionFactory.openConnection(hostSpecs, user, database, info);
// WARNING for unsupported servers (8.1 and lower are not supported)
if (LOGGER.isLoggable(Level.WARNING) && !haveMinimumServerVersion(ServerVersion.v8_2)) {
LOGGER.log(Level.WARNING, "Unsupported Server Version: {0}", queryExecutor.getServerVersion());
}
// Set read-only early if requested
if (PGProperty.READ_ONLY.getBoolean(info)) {
setReadOnly(true);
}
boolean binaryTransfer = PGProperty.BINARY_TRANSFER.getBoolean(info);
// Formats that currently have binary protocol support
Set<Integer> binaryOids = new HashSet<Integer>();
if (binaryTransfer && queryExecutor.getProtocolVersion() >= 3) {
binaryOids.add(Oid.BYTEA);
binaryOids.add(Oid.INT2);
binaryOids.add(Oid.INT4);
binaryOids.add(Oid.INT8);
binaryOids.add(Oid.FLOAT4);
binaryOids.add(Oid.FLOAT8);
binaryOids.add(Oid.TIME);
binaryOids.add(Oid.DATE);
binaryOids.add(Oid.TIMETZ);
binaryOids.add(Oid.TIMESTAMP);
binaryOids.add(Oid.TIMESTAMPTZ);
binaryOids.add(Oid.INT2_ARRAY);
binaryOids.add(Oid.INT4_ARRAY);
binaryOids.add(Oid.INT8_ARRAY);
binaryOids.add(Oid.FLOAT4_ARRAY);
binaryOids.add(Oid.FLOAT8_ARRAY);
binaryOids.add(Oid.FLOAT8_ARRAY);
binaryOids.add(Oid.VARCHAR_ARRAY);
binaryOids.add(Oid.TEXT_ARRAY);
binaryOids.add(Oid.POINT);
binaryOids.add(Oid.BOX);
binaryOids.add(Oid.UUID);
}
binaryOids.addAll(getOidSet(PGProperty.BINARY_TRANSFER_ENABLE.get(info)));
binaryOids.removeAll(getOidSet(PGProperty.BINARY_TRANSFER_DISABLE.get(info)));
// split for receive and send for better control
Set<Integer> useBinarySendForOids = new HashSet<Integer>();
useBinarySendForOids.addAll(binaryOids);
Set<Integer> useBinaryReceiveForOids = new HashSet<Integer>();
useBinaryReceiveForOids.addAll(binaryOids);
/*
* Does not pass unit tests because unit tests expect setDate to have millisecond accuracy
* whereas the binary transfer only supports date accuracy.
*/
useBinarySendForOids.remove(Oid.DATE);
queryExecutor.setBinaryReceiveOids(useBinaryReceiveForOids);
queryExecutor.setBinarySendOids(useBinarySendForOids);
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.log(Level.FINEST, " types using binary send = {0}", oidsToString(useBinarySendForOids));
LOGGER.log(Level.FINEST, " types using binary receive = {0}", oidsToString(useBinaryReceiveForOids));
LOGGER.log(Level.FINEST, " integer date/time = {0}", queryExecutor.getIntegerDateTimes());
}
//
// String -> text or unknown?
//
String stringType = PGProperty.STRING_TYPE.get(info);
if (stringType != null) {
if (stringType.equalsIgnoreCase("unspecified")) {
bindStringAsVarchar = false;
} else if (stringType.equalsIgnoreCase("varchar")) {
bindStringAsVarchar = true;
} else {
throw new PSQLException(
GT.tr("Unsupported value for stringtype parameter: {0}", stringType),
PSQLState.INVALID_PARAMETER_VALUE);
}
} else {
bindStringAsVarchar = true;
}
// Initialize timestamp stuff
timestampUtils = new TimestampUtils(!queryExecutor.getIntegerDateTimes(), new Provider<TimeZone>() {
@Override
public TimeZone get() {
return queryExecutor.getTimeZone();
}
});
// Initialize common queries.
// isParameterized==true so full parse is performed and the engine knows the query
// is not a compound query with ; inside, so it could use parse/bind/exec messages
commitQuery = createQuery("COMMIT", false, true).query;
rollbackQuery = createQuery("ROLLBACK", false, true).query;
int unknownLength = PGProperty.UNKNOWN_LENGTH.getInt(info);
// Initialize object handling
_typeCache = createTypeInfo(this, unknownLength);
initObjectTypes(info);
if (PGProperty.LOG_UNCLOSED_CONNECTIONS.getBoolean(info)) {
openStackTrace = new Throwable("Connection was created at this point:");
}
this.disableColumnSanitiser = PGProperty.DISABLE_COLUMN_SANITISER.getBoolean(info);
TypeInfo types1 = getTypeInfo();
if (haveMinimumServerVersion(ServerVersion.v8_3)) {
types1.addCoreType("uuid", Oid.UUID, Types.OTHER, "java.util.UUID", Oid.UUID_ARRAY);
}
TypeInfo types = getTypeInfo();
if (haveMinimumServerVersion(ServerVersion.v8_3)) {
types.addCoreType("xml", Oid.XML, Types.SQLXML, "java.sql.SQLXML", Oid.XML_ARRAY);
}
this._clientInfo = new Properties();
if (haveMinimumServerVersion(ServerVersion.v9_0)) {
String appName = PGProperty.APPLICATION_NAME.get(info);
if (appName == null) {
appName = "";
}
this._clientInfo.put("ApplicationName", appName);
}
fieldMetadataCache = new LruCache<FieldMetadata.Key, FieldMetadata>(
Math.max(0, PGProperty.DATABASE_METADATA_CACHE_FIELDS.getInt(info)),
Math.max(0, PGProperty.DATABASE_METADATA_CACHE_FIELDS_MIB.getInt(info) * 1024 * 1024),
false);
replicationConnection = PGProperty.REPLICATION.get(info) != null;
}
private Set<Integer> getOidSet(String oidList) throws PSQLException {
Set<Integer> oids = new HashSet<Integer>();
StringTokenizer tokenizer = new StringTokenizer(oidList, ",");
while (tokenizer.hasMoreTokens()) {
String oid = tokenizer.nextToken();
oids.add(Oid.valueOf(oid));
}
return oids;
}
private String oidsToString(Set<Integer> oids) {
StringBuilder sb = new StringBuilder();
for (Integer oid : oids) {
sb.append(Oid.toString(oid));
sb.append(',');
}
if (sb.length() > 0) {
sb.setLength(sb.length() - 1);
} else {
sb.append(" <none>");
}
return sb.toString();
}
private final TimestampUtils timestampUtils;
public TimestampUtils getTimestampUtils() {
return timestampUtils;
}
/**
* The current type mappings
*/
protected Map<String, Class<?>> typemap;
public java.sql.Statement createStatement() throws SQLException {
// We now follow the spec and default to TYPE_FORWARD_ONLY.
return createStatement(java.sql.ResultSet.TYPE_FORWARD_ONLY,
java.sql.ResultSet.CONCUR_READ_ONLY);
}
public java.sql.PreparedStatement prepareStatement(String sql) throws SQLException {
return prepareStatement(sql, java.sql.ResultSet.TYPE_FORWARD_ONLY,
java.sql.ResultSet.CONCUR_READ_ONLY);
}
public java.sql.CallableStatement prepareCall(String sql) throws SQLException {
return prepareCall(sql, java.sql.ResultSet.TYPE_FORWARD_ONLY,
java.sql.ResultSet.CONCUR_READ_ONLY);
}
public Map<String, Class<?>> getTypeMap() throws SQLException {
checkClosed();
return typemap;
}
public QueryExecutor getQueryExecutor() {
return queryExecutor;
}
public ReplicationProtocol getReplicationProtocol() {
return queryExecutor.getReplicationProtocol();
}
/**
* This adds a warning to the warning chain.
*
* @param warn warning to add
*/
public void addWarning(SQLWarning warn) {
// Add the warning to the chain
if (firstWarning != null) {
firstWarning.setNextWarning(warn);
} else {
firstWarning = warn;
}
}
public ResultSet execSQLQuery(String s) throws SQLException {
return execSQLQuery(s, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
}
public ResultSet execSQLQuery(String s, int resultSetType, int resultSetConcurrency)
throws SQLException {
BaseStatement stat = (BaseStatement) createStatement(resultSetType, resultSetConcurrency);
boolean hasResultSet = stat.executeWithFlags(s, QueryExecutor.QUERY_SUPPRESS_BEGIN);
while (!hasResultSet && stat.getUpdateCount() != -1) {
hasResultSet = stat.getMoreResults();
}
if (!hasResultSet) {
throw new PSQLException(GT.tr("No results were returned by the query."), PSQLState.NO_DATA);
}
// Transfer warnings to the connection, since the user never
// has a chance to see the statement itself.
SQLWarning warnings = stat.getWarnings();
if (warnings != null) {
addWarning(warnings);
}
return stat.getResultSet();
}
public void execSQLUpdate(String s) throws SQLException {
BaseStatement stmt = (BaseStatement) createStatement();
if (stmt.executeWithFlags(s, QueryExecutor.QUERY_NO_METADATA | QueryExecutor.QUERY_NO_RESULTS
| QueryExecutor.QUERY_SUPPRESS_BEGIN)) {
throw new PSQLException(GT.tr("A result was returned when none was expected."),
PSQLState.TOO_MANY_RESULTS);
}
// Transfer warnings to the connection, since the user never
// has a chance to see the statement itself.
SQLWarning warnings = stmt.getWarnings();
if (warnings != null) {
addWarning(warnings);
}
stmt.close();
}
/**
* In SQL, a result table can be retrieved through a cursor that is named. The current row of a
* result can be updated or deleted using a positioned update/delete statement that references the
* cursor name.
* <p>
* We do not support positioned update/delete, so this is a no-op.
*
* @param cursor the cursor name
* @throws SQLException if a database access error occurs
*/
public void setCursorName(String cursor) throws SQLException {
checkClosed();
// No-op.
}
/**
* getCursorName gets the cursor name.
*
* @return the current cursor name
* @throws SQLException if a database access error occurs
*/
public String getCursorName() throws SQLException {
checkClosed();
return null;
}
/**
* We are required to bring back certain information by the DatabaseMetaData class. These
* functions do that.
* <p>
* Method getURL() brings back the URL (good job we saved it)
*
* @return the url
* @throws SQLException just in case...
*/
public String getURL() throws SQLException {
return creatingURL;
}
/**
* Method getUserName() brings back the User Name (again, we saved it)
*
* @return the user name
* @throws SQLException just in case...
*/
public String getUserName() throws SQLException {
return queryExecutor.getUser();
}
public Fastpath getFastpathAPI() throws SQLException {
checkClosed();
if (fastpath == null) {
fastpath = new Fastpath(this);
}
return fastpath;
}
// This holds a reference to the Fastpath API if already open
private Fastpath fastpath = null;
public LargeObjectManager getLargeObjectAPI() throws SQLException {
checkClosed();
if (largeobject == null) {
largeobject = new LargeObjectManager(this);
}
return largeobject;
}
// This holds a reference to the LargeObject API if already open
private LargeObjectManager largeobject = null;
/*
* This method is used internally to return an object based around org.postgresql's more unique
* data types.
*
* <p>It uses an internal HashMap to get the handling class. If the type is not supported, then an
* instance of org.postgresql.util.PGobject is returned.
*
* You can use the getValue() or setValue() methods to handle the returned object. Custom objects
* can have their own methods.
*
* @return PGobject for this type, and set to value
*
* @exception SQLException if value is not correct for this type
*/
public Object getObject(String type, String value, byte[] byteValue) throws SQLException {
if (typemap != null) {
Class<?> c = typemap.get(type);
if (c != null) {
// Handle the type (requires SQLInput & SQLOutput classes to be implemented)
throw new PSQLException(GT.tr("Custom type maps are not supported."),
PSQLState.NOT_IMPLEMENTED);
}
}
PGobject obj = null;
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.log(Level.FINEST, "Constructing object from type={0} value=<{1}>", new Object[]{type, value});
}
try {
Class<? extends PGobject> klass = _typeCache.getPGobject(type);
// If className is not null, then try to instantiate it,
// It must be basetype PGobject
// This is used to implement the org.postgresql unique types (like lseg,
// point, etc).
if (klass != null) {
obj = klass.newInstance();
obj.setType(type);
if (byteValue != null && obj instanceof PGBinaryObject) {
PGBinaryObject binObj = (PGBinaryObject) obj;
binObj.setByteValue(byteValue, 0);
} else {
obj.setValue(value);
}
} else {
// If className is null, then the type is unknown.
// so return a PGobject with the type set, and the value set
obj = new PGobject();
obj.setType(type);
obj.setValue(value);
}
return obj;
} catch (SQLException sx) {
// rethrow the exception. Done because we capture any others next
throw sx;
} catch (Exception ex) {
throw new PSQLException(GT.tr("Failed to create object for: {0}.", type),
PSQLState.CONNECTION_FAILURE, ex);
}
}
protected TypeInfo createTypeInfo(BaseConnection conn, int unknownLength) {
return new TypeInfoCache(conn, unknownLength);
}
public TypeInfo getTypeInfo() {
return _typeCache;
}
@Override
public void addDataType(String type, String name) {
try {
addDataType(type, Class.forName(name).asSubclass(PGobject.class));
} catch (Exception e) {
throw new RuntimeException("Cannot register new type: " + e);
}
}
@Override
public void addDataType(String type, Class<? extends PGobject> klass) throws SQLException {
checkClosed();
_typeCache.addDataType(type, klass);
}
// This initialises the objectTypes hash map
private void initObjectTypes(Properties info) throws SQLException {
// Add in the types that come packaged with the driver.
// These can be overridden later if desired.
addDataType("box", org.postgresql.geometric.PGbox.class);
addDataType("circle", org.postgresql.geometric.PGcircle.class);
addDataType("line", org.postgresql.geometric.PGline.class);
addDataType("lseg", org.postgresql.geometric.PGlseg.class);
addDataType("path", org.postgresql.geometric.PGpath.class);
addDataType("point", org.postgresql.geometric.PGpoint.class);
addDataType("polygon", org.postgresql.geometric.PGpolygon.class);
addDataType("money", org.postgresql.util.PGmoney.class);
addDataType("interval", org.postgresql.util.PGInterval.class);
Enumeration<?> e = info.propertyNames();
while (e.hasMoreElements()) {
String propertyName = (String) e.nextElement();
if (propertyName.startsWith("datatype.")) {
String typeName = propertyName.substring(9);
String className = info.getProperty(propertyName);
Class<?> klass;
try {
klass = Class.forName(className);
} catch (ClassNotFoundException cnfe) {
throw new PSQLException(
GT.tr("Unable to load the class {0} responsible for the datatype {1}",
className, typeName),
PSQLState.SYSTEM_ERROR, cnfe);
}
addDataType(typeName, klass.asSubclass(PGobject.class));
}
}
}
/**
* <B>Note:</B> even though {@code Statement} is automatically closed when it is garbage
* collected, it is better to close it explicitly to lower resource consumption.
*
* {@inheritDoc}
*/
public void close() throws SQLException {
releaseTimer();
queryExecutor.close();
openStackTrace = null;
}
public String nativeSQL(String sql) throws SQLException {
checkClosed();
CachedQuery cachedQuery = queryExecutor.createQuery(sql, false, true);
return cachedQuery.query.getNativeSql();
}
public synchronized SQLWarning getWarnings() throws SQLException {
checkClosed();
SQLWarning newWarnings = queryExecutor.getWarnings(); // NB: also clears them.
if (firstWarning == null) {
firstWarning = newWarnings;
} else {
firstWarning.setNextWarning(newWarnings); // Chain them on.
}
return firstWarning;
}
public synchronized void clearWarnings() throws SQLException {
checkClosed();
queryExecutor.getWarnings(); // Clear and discard.
firstWarning = null;
}
public void setReadOnly(boolean readOnly) throws SQLException {
checkClosed();
if (queryExecutor.getTransactionState() != TransactionState.IDLE) {
throw new PSQLException(
GT.tr("Cannot change transaction read-only property in the middle of a transaction."),
PSQLState.ACTIVE_SQL_TRANSACTION);
}
if (readOnly != this.readOnly) {
String readOnlySql
= "SET SESSION CHARACTERISTICS AS TRANSACTION " + (readOnly ? "READ ONLY" : "READ WRITE");
execSQLUpdate(readOnlySql); // nb: no BEGIN triggered.
}
this.readOnly = readOnly;
LOGGER.log(Level.FINE, " setReadOnly = {0}", readOnly);
}
public boolean isReadOnly() throws SQLException {
checkClosed();
return readOnly;
}
public void setAutoCommit(boolean autoCommit) throws SQLException {
checkClosed();
if (this.autoCommit == autoCommit) {
return;
}
if (!this.autoCommit) {
commit();
}
this.autoCommit = autoCommit;
LOGGER.log(Level.FINE, " setAutoCommit = {0}", autoCommit);
}
public boolean getAutoCommit() throws SQLException {
checkClosed();
return this.autoCommit;
}
private void executeTransactionCommand(Query query) throws SQLException {
int flags = QueryExecutor.QUERY_NO_METADATA | QueryExecutor.QUERY_NO_RESULTS
| QueryExecutor.QUERY_SUPPRESS_BEGIN;
if (prepareThreshold == 0) {
flags |= QueryExecutor.QUERY_ONESHOT;
}
try {
getQueryExecutor().execute(query, null, new TransactionCommandHandler(), 0, 0, flags);
} catch (SQLException e) {
// Don't retry composite queries as it might get partially executed
if (query.getSubqueries() != null || !queryExecutor.willHealOnRetry(e)) {
throw e;
}
query.close();
// retry
getQueryExecutor().execute(query, null, new TransactionCommandHandler(), 0, 0, flags);
}
}
public void commit() throws SQLException {
checkClosed();
if (autoCommit) {
throw new PSQLException(GT.tr("Cannot commit when autoCommit is enabled."),
PSQLState.NO_ACTIVE_SQL_TRANSACTION);
}
if (queryExecutor.getTransactionState() != TransactionState.IDLE) {
executeTransactionCommand(commitQuery);
}
}
protected void checkClosed() throws SQLException {
if (isClosed()) {
throw new PSQLException(GT.tr("This connection has been closed."),
PSQLState.CONNECTION_DOES_NOT_EXIST);
}
}
public void rollback() throws SQLException {
checkClosed();
if (autoCommit) {
throw new PSQLException(GT.tr("Cannot rollback when autoCommit is enabled."),
PSQLState.NO_ACTIVE_SQL_TRANSACTION);
}
if (queryExecutor.getTransactionState() != TransactionState.IDLE) {
executeTransactionCommand(rollbackQuery);
}
}
public TransactionState getTransactionState() {
return queryExecutor.getTransactionState();
}
public int getTransactionIsolation() throws SQLException {
checkClosed();
String level = null;
final ResultSet rs = execSQLQuery("SHOW TRANSACTION ISOLATION LEVEL"); // nb: no BEGIN triggered
if (rs.next()) {
level = rs.getString(1);
}
rs.close();
// TODO revisit: throw exception instead of silently eating the error in unknown cases?
if (level == null) {
return Connection.TRANSACTION_READ_COMMITTED; // Best guess.
}
level = level.toUpperCase(Locale.US);
if (level.equals("READ COMMITTED")) {
return Connection.TRANSACTION_READ_COMMITTED;
}
if (level.equals("READ UNCOMMITTED")) {
return Connection.TRANSACTION_READ_UNCOMMITTED;
}
if (level.equals("REPEATABLE READ")) {
return Connection.TRANSACTION_REPEATABLE_READ;
}
if (level.equals("SERIALIZABLE")) {
return Connection.TRANSACTION_SERIALIZABLE;
}
return Connection.TRANSACTION_READ_COMMITTED; // Best guess.
}
public void setTransactionIsolation(int level) throws SQLException {
checkClosed();
if (queryExecutor.getTransactionState() != TransactionState.IDLE) {
throw new PSQLException(
GT.tr("Cannot change transaction isolation level in the middle of a transaction."),
PSQLState.ACTIVE_SQL_TRANSACTION);
}
String isolationLevelName = getIsolationLevelName(level);
if (isolationLevelName == null) {
throw new PSQLException(GT.tr("Transaction isolation level {0} not supported.", level),
PSQLState.NOT_IMPLEMENTED);
}
String isolationLevelSQL =
"SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL " + isolationLevelName;
execSQLUpdate(isolationLevelSQL); // nb: no BEGIN triggered
LOGGER.log(Level.FINE, " setTransactionIsolation = {0}", isolationLevelName);
}
protected String getIsolationLevelName(int level) {
switch (level) {
case Connection.TRANSACTION_READ_COMMITTED:
return "READ COMMITTED";
case Connection.TRANSACTION_SERIALIZABLE:
return "SERIALIZABLE";
case Connection.TRANSACTION_READ_UNCOMMITTED:
return "READ UNCOMMITTED";
case Connection.TRANSACTION_REPEATABLE_READ:
return "REPEATABLE READ";
default:
return null;
}
}
public void setCatalog(String catalog) throws SQLException {
checkClosed();
// no-op
}
public String getCatalog() throws SQLException {
checkClosed();
return queryExecutor.getDatabase();
}
/**
* Overrides finalize(). If called, it closes the connection.
* <p>
* This was done at the request of <a href="mailto:rachel@enlarion.demon.co.uk">Rachel
* Greenham</a> who hit a problem where multiple clients didn't close the connection, and once a
* fortnight enough clients were open to kill the postgres server.
*/
protected void finalize() throws Throwable {
try {
if (openStackTrace != null) {
LOGGER.log(Level.WARNING, GT.tr("Finalizing a Connection that was never closed:"), openStackTrace);
}
close();
} finally {
super.finalize();
}
}
/**
* Get server version number
*
* @return server version number
*/
public String getDBVersionNumber() {
return queryExecutor.getServerVersion();
}
/**
* Get server major version
*
* @return server major version
*/
public int getServerMajorVersion() {
try {
StringTokenizer versionTokens = new StringTokenizer(queryExecutor.getServerVersion(), "."); // aaXbb.ccYdd
return integerPart(versionTokens.nextToken()); // return X
} catch (NoSuchElementException e) {
return 0;
}
}
/**
* Get server minor version
*
* @return server minor version
*/
public int getServerMinorVersion() {
try {
StringTokenizer versionTokens = new StringTokenizer(queryExecutor.getServerVersion(), "."); // aaXbb.ccYdd
versionTokens.nextToken(); // Skip aaXbb
return integerPart(versionTokens.nextToken()); // return Y
} catch (NoSuchElementException e) {
return 0;
}
}
@Override
public boolean haveMinimumServerVersion(int ver) {
return queryExecutor.getServerVersionNum() >= ver;
}
@Override
public boolean haveMinimumServerVersion(Version ver) {
return haveMinimumServerVersion(ver.getVersionNum());
}
@Override
public Encoding getEncoding() {
return queryExecutor.getEncoding();
}
@Override
public byte[] encodeString(String str) throws SQLException {
try {
return getEncoding().encode(str);
} catch (IOException ioe) {
throw new PSQLException(GT.tr("Unable to translate data into the desired encoding."),
PSQLState.DATA_ERROR, ioe);
}
}
@Override
public String escapeString(String str) throws SQLException {
return Utils.escapeLiteral(null, str, queryExecutor.getStandardConformingStrings())
.toString();
}
@Override
public boolean getStandardConformingStrings() {
return queryExecutor.getStandardConformingStrings();
}
// This is a cache of the DatabaseMetaData instance for this connection
protected java.sql.DatabaseMetaData metadata;
@Override
public boolean isClosed() throws SQLException {
return queryExecutor.isClosed();
}
@Override
public void cancelQuery() throws SQLException {
checkClosed();
queryExecutor.sendQueryCancel();
}
@Override
public PGNotification[] getNotifications() throws SQLException {
return getNotifications(-1);
}
@Override
public PGNotification[] getNotifications(int timeoutMillis) throws SQLException {
checkClosed();
getQueryExecutor().processNotifies(timeoutMillis);
// Backwards-compatibility hand-holding.
PGNotification[] notifications = queryExecutor.getNotifications();
return (notifications.length == 0 ? null : notifications);
}
/**
* Handler for transaction queries
*/
private class TransactionCommandHandler extends ResultHandlerBase {
public void handleCompletion() throws SQLException {
SQLWarning warning = getWarning();
if (warning != null) {
PgConnection.this.addWarning(warning);
}
super.handleCompletion();
}
}
public int getPrepareThreshold() {
return prepareThreshold;
}
public void setDefaultFetchSize(int fetchSize) throws SQLException {
if (fetchSize < 0) {
throw new PSQLException(GT.tr("Fetch size must be a value greater to or equal to 0."),
PSQLState.INVALID_PARAMETER_VALUE);
}
this.defaultFetchSize = fetchSize;
LOGGER.log(Level.FINE, " setDefaultFetchSize = {0}", fetchSize);
}
public int getDefaultFetchSize() {
return defaultFetchSize;
}
public void setPrepareThreshold(int newThreshold) {
this.prepareThreshold = newThreshold;
LOGGER.log(Level.FINE, " setPrepareThreshold = {0}", newThreshold);
}
public boolean getForceBinary() {
return forcebinary;
}
public void setForceBinary(boolean newValue) {
this.forcebinary = newValue;
LOGGER.log(Level.FINE, " setForceBinary = {0}", newValue);
}
public void setTypeMapImpl(Map<String, Class<?>> map) throws SQLException {
typemap = map;
}
public Logger getLogger() {
return LOGGER;
}
public int getProtocolVersion() {
return queryExecutor.getProtocolVersion();
}
public boolean getStringVarcharFlag() {
return bindStringAsVarchar;
}
private CopyManager copyManager = null;
public CopyManager getCopyAPI() throws SQLException {
checkClosed();
if (copyManager == null) {
copyManager = new CopyManager(this);
}
return copyManager;
}
public boolean binaryTransferSend(int oid) {
return queryExecutor.useBinaryForSend(oid);
}
public int getBackendPID() {
return queryExecutor.getBackendPID();
}
public boolean isColumnSanitiserDisabled() {
return this.disableColumnSanitiser;
}
public void setDisableColumnSanitiser(boolean disableColumnSanitiser) {
this.disableColumnSanitiser = disableColumnSanitiser;
LOGGER.log(Level.FINE, " setDisableColumnSanitiser = {0}", disableColumnSanitiser);
}
@Override
public PreferQueryMode getPreferQueryMode() {
return queryExecutor.getPreferQueryMode();
}
@Override
public AutoSave getAutosave() {
return queryExecutor.getAutoSave();
}
@Override
public void setAutosave(AutoSave autoSave) {
queryExecutor.setAutoSave(autoSave);
LOGGER.log(Level.FINE, " setAutosave = {0}", autoSave.value());
}
protected void abort() {
queryExecutor.abort();
}
private synchronized Timer getTimer() {
if (cancelTimer == null) {
cancelTimer = Driver.getSharedTimer().getTimer();
}
return cancelTimer;
}
private synchronized void releaseTimer() {
if (cancelTimer != null) {
cancelTimer = null;
Driver.getSharedTimer().releaseTimer();
}
}
@Override
public void addTimerTask(TimerTask timerTask, long milliSeconds) {
Timer timer = getTimer();
timer.schedule(timerTask, milliSeconds);
}
@Override
public void purgeTimerTasks() {
Timer timer = cancelTimer;
if (timer != null) {
timer.purge();
}
}
@Override
public String escapeIdentifier(String identifier) throws SQLException {
return Utils.escapeIdentifier(null, identifier).toString();
}
@Override
public String escapeLiteral(String literal) throws SQLException {
return Utils.escapeLiteral(null, literal, queryExecutor.getStandardConformingStrings())
.toString();
}
@Override
public LruCache<FieldMetadata.Key, FieldMetadata> getFieldMetadataCache() {
return fieldMetadataCache;
}
@Override
public PGReplicationConnection getReplicationAPI() {
return new PGReplicationConnectionImpl(this);
}
private static void appendArray(StringBuilder sb, Object elements, char delim) {
sb.append('{');
int nElements = java.lang.reflect.Array.getLength(elements);
for (int i = 0; i < nElements; i++) {
if (i > 0) {
sb.append(delim);
}
Object o = java.lang.reflect.Array.get(elements, i);
if (o == null) {
sb.append("NULL");
} else if (o.getClass().isArray()) {
final PrimitiveArraySupport arraySupport = PrimitiveArraySupport.getArraySupport(o);
if (arraySupport != null) {
arraySupport.appendArray(sb, delim, o);
} else {
appendArray(sb, o, delim);
}
} else {
String s = o.toString();
PgArray.escapeArrayElement(sb, s);
}
}
sb.append('}');
}
// Parse a "dirty" integer surrounded by non-numeric characters
private static int integerPart(String dirtyString) {
int start;
int end;
for (start = 0; start < dirtyString.length()
&& !Character.isDigit(dirtyString.charAt(start)); ++start) {
;
}
for (end = start; end < dirtyString.length()
&& Character.isDigit(dirtyString.charAt(end)); ++end) {
;
}
if (start == end) {
return 0;
}
return Integer.parseInt(dirtyString.substring(start, end));
}
@Override
public Statement createStatement(int resultSetType, int resultSetConcurrency,
int resultSetHoldability) throws SQLException {
checkClosed();
return new PgStatement(this, resultSetType, resultSetConcurrency, resultSetHoldability);
}
@Override
public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency,
int resultSetHoldability) throws SQLException {
checkClosed();
return new PgPreparedStatement(this, sql, resultSetType, resultSetConcurrency,
resultSetHoldability);
}
@Override
public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency,
int resultSetHoldability) throws SQLException {
checkClosed();
return new PgCallableStatement(this, sql, resultSetType, resultSetConcurrency,
resultSetHoldability);
}
@Override
public DatabaseMetaData getMetaData() throws SQLException {
checkClosed();
if (metadata == null) {
metadata = new PgDatabaseMetaData(this);
}
return metadata;
}
@Override
public void setTypeMap(Map<String, Class<?>> map) throws SQLException {
setTypeMapImpl(map);
LOGGER.log(Level.FINE, " setTypeMap = {0}", map);
}
protected Array makeArray(int oid, String fieldString) throws SQLException {
return new PgArray(this, oid, fieldString);
}
protected Blob makeBlob(long oid) throws SQLException {
return new PgBlob(this, oid);
}
protected Clob makeClob(long oid) throws SQLException {
return new PgClob(this, oid);
}
protected SQLXML makeSQLXML() throws SQLException {
return new PgSQLXML(this);
}
@Override
public Clob createClob() throws SQLException {
checkClosed();
throw org.postgresql.Driver.notImplemented(this.getClass(), "createClob()");
}
@Override
public Blob createBlob() throws SQLException {
checkClosed();
throw org.postgresql.Driver.notImplemented(this.getClass(), "createBlob()");
}
@Override
public NClob createNClob() throws SQLException {
checkClosed();
throw org.postgresql.Driver.notImplemented(this.getClass(), "createNClob()");
}
@Override
public SQLXML createSQLXML() throws SQLException {
checkClosed();
return makeSQLXML();
}
@Override
public Struct createStruct(String typeName, Object[] attributes) throws SQLException {
checkClosed();
throw org.postgresql.Driver.notImplemented(this.getClass(), "createStruct(String, Object[])");
}
@Override
public Array createArrayOf(String typeName, Object elements) throws SQLException {
checkClosed();
final TypeInfo typeInfo = getTypeInfo();
final int oid = typeInfo.getPGArrayType(typeName);
final char delim = typeInfo.getArrayDelimiter(oid);
if (oid == Oid.UNSPECIFIED) {
throw new PSQLException(GT.tr("Unable to find server array type for provided name {0}.", typeName),
PSQLState.INVALID_NAME);
}
if (elements == null) {
return makeArray(oid, null);
}
final String arrayString;
final PrimitiveArraySupport arraySupport = PrimitiveArraySupport.getArraySupport(elements);
if (arraySupport != null) {
// if the oid for the given type matches the default type, we might be
// able to go straight to binary representation
if (oid == arraySupport.getDefaultArrayTypeOid(typeInfo) && arraySupport.supportBinaryRepresentation()
&& getPreferQueryMode() != PreferQueryMode.SIMPLE) {
return new PgArray(this, oid, arraySupport.toBinaryRepresentation(this, elements));
}
arrayString = arraySupport.toArrayString(delim, elements);
} else {
final Class<?> clazz = elements.getClass();
if (!clazz.isArray()) {
throw new PSQLException(GT.tr("Invalid elements {0}", elements), PSQLState.INVALID_PARAMETER_TYPE);
}
StringBuilder sb = new StringBuilder();
appendArray(sb, elements, delim);
arrayString = sb.toString();
}
return makeArray(oid, arrayString);
}
@Override
public Array createArrayOf(String typeName, Object[] elements) throws SQLException {
checkClosed();
int oid = getTypeInfo().getPGArrayType(typeName);
if (oid == Oid.UNSPECIFIED) {
throw new PSQLException(
GT.tr("Unable to find server array type for provided name {0}.", typeName),
PSQLState.INVALID_NAME);
}
if (elements == null) {
return makeArray(oid, null);
}
char delim = getTypeInfo().getArrayDelimiter(oid);
StringBuilder sb = new StringBuilder();
appendArray(sb, elements, delim);
return makeArray(oid, sb.toString());
}
@Override
public boolean isValid(int timeout) throws SQLException {
if (timeout < 0) {
throw new PSQLException(GT.tr("Invalid timeout ({0}<0).", timeout),
PSQLState.INVALID_PARAMETER_VALUE);
}
if (isClosed()) {
return false;
}
try {
if (replicationConnection) {
Statement statement = createStatement();
statement.execute("IDENTIFY_SYSTEM");
statement.close();
} else {
if (checkConnectionQuery == null) {
checkConnectionQuery = prepareStatement("");
}
checkConnectionQuery.setQueryTimeout(timeout);
checkConnectionQuery.executeUpdate();
}
return true;
} catch (SQLException e) {
if (PSQLState.IN_FAILED_SQL_TRANSACTION.getState().equals(e.getSQLState())) {
// "current transaction aborted", assume the connection is up and running
return true;
}
LOGGER.log(Level.WARNING, GT.tr("Validating connection."), e);
}
return false;
}
@Override
public void setClientInfo(String name, String value) throws SQLClientInfoException {
try {
checkClosed();
} catch (final SQLException cause) {
Map<String, ClientInfoStatus> failures = new HashMap<String, ClientInfoStatus>();
failures.put(name, ClientInfoStatus.REASON_UNKNOWN);
throw new SQLClientInfoException(GT.tr("This connection has been closed."), failures, cause);
}
if (haveMinimumServerVersion(ServerVersion.v9_0) && "ApplicationName".equals(name)) {
if (value == null) {
value = "";
}
final String oldValue = queryExecutor.getApplicationName();
if (value.equals(oldValue)) {
return;
}
try {
StringBuilder sql = new StringBuilder("SET application_name = '");
Utils.escapeLiteral(sql, value, getStandardConformingStrings());
sql.append("'");
execSQLUpdate(sql.toString());
} catch (SQLException sqle) {
Map<String, ClientInfoStatus> failures = new HashMap<String, ClientInfoStatus>();
failures.put(name, ClientInfoStatus.REASON_UNKNOWN);
throw new SQLClientInfoException(
GT.tr("Failed to set ClientInfo property: {0}", "ApplicationName"), sqle.getSQLState(),
failures, sqle);
}
if (LOGGER.isLoggable(Level.FINE)) {
LOGGER.log(Level.FINE, " setClientInfo = {0} {1}", new Object[]{name, value});
}
_clientInfo.put(name, value);
return;
}
addWarning(new SQLWarning(GT.tr("ClientInfo property not supported."),
PSQLState.NOT_IMPLEMENTED.getState()));
}
@Override
public void setClientInfo(Properties properties) throws SQLClientInfoException {
try {
checkClosed();
} catch (final SQLException cause) {
Map<String, ClientInfoStatus> failures = new HashMap<String, ClientInfoStatus>();
for (Map.Entry<Object, Object> e : properties.entrySet()) {
failures.put((String) e.getKey(), ClientInfoStatus.REASON_UNKNOWN);
}
throw new SQLClientInfoException(GT.tr("This connection has been closed."), failures, cause);
}
Map<String, ClientInfoStatus> failures = new HashMap<String, ClientInfoStatus>();
for (String name : new String[]{"ApplicationName"}) {
try {
setClientInfo(name, properties.getProperty(name, null));
} catch (SQLClientInfoException e) {
failures.putAll(e.getFailedProperties());
}
}
if (!failures.isEmpty()) {
throw new SQLClientInfoException(GT.tr("One ore more ClientInfo failed."),
PSQLState.NOT_IMPLEMENTED.getState(), failures);
}
}
@Override
public String getClientInfo(String name) throws SQLException {
checkClosed();
_clientInfo.put("ApplicationName", queryExecutor.getApplicationName());
return _clientInfo.getProperty(name);
}
@Override
public Properties getClientInfo() throws SQLException {
checkClosed();
_clientInfo.put("ApplicationName", queryExecutor.getApplicationName());
return _clientInfo;
}
public <T> T createQueryObject(Class<T> ifc) throws SQLException {
checkClosed();
throw org.postgresql.Driver.notImplemented(this.getClass(), "createQueryObject(Class<T>)");
}
public boolean isWrapperFor(Class<?> iface) throws SQLException {
checkClosed();
return iface.isAssignableFrom(getClass());
}
public <T> T unwrap(Class<T> iface) throws SQLException {
checkClosed();
if (iface.isAssignableFrom(getClass())) {
return iface.cast(this);
}
throw new SQLException("Cannot unwrap to " + iface.getName());
}
public String getSchema() throws SQLException {
checkClosed();
Statement stmt = createStatement();
try {
ResultSet rs = stmt.executeQuery("select current_schema()");
try {
if (!rs.next()) {
return null; // Is it ever possible?
}
return rs.getString(1);
} finally {
rs.close();
}
} finally {
stmt.close();
}
}
public void setSchema(String schema) throws SQLException {
checkClosed();
Statement stmt = createStatement();
try {
if (schema == null) {
stmt.executeUpdate("SET SESSION search_path TO DEFAULT");
} else {
StringBuilder sb = new StringBuilder();
sb.append("SET SESSION search_path TO '");
Utils.escapeLiteral(sb, schema, getStandardConformingStrings());
sb.append("'");
stmt.executeUpdate(sb.toString());
LOGGER.log(Level.FINE, " setSchema = {0}", schema);
}
} finally {
stmt.close();
}
}
public class AbortCommand implements Runnable {
public void run() {
abort();
}
}
public void abort(Executor executor) throws SQLException {
if (isClosed()) {
return;
}
SQL_PERMISSION_ABORT.checkGuard(this);
AbortCommand command = new AbortCommand();
if (executor != null) {
executor.execute(command);
} else {
command.run();
}
}
public void setNetworkTimeout(Executor executor /*not used*/, int milliseconds) throws SQLException {
checkClosed();
if (milliseconds < 0) {
throw new PSQLException(GT.tr("Network timeout must be a value greater than or equal to 0."),
PSQLState.INVALID_PARAMETER_VALUE);
}
SecurityManager securityManager = System.getSecurityManager();
if (securityManager != null) {
securityManager.checkPermission(SQL_PERMISSION_NETWORK_TIMEOUT);
}
try {
queryExecutor.setNetworkTimeout(milliseconds);
} catch (IOException ioe) {
throw new PSQLException(GT.tr("Unable to set network timeout."),
PSQLState.COMMUNICATION_ERROR, ioe);
}
}
public int getNetworkTimeout() throws SQLException {
checkClosed();
try {
return queryExecutor.getNetworkTimeout();
} catch (IOException ioe) {
throw new PSQLException(GT.tr("Unable to get network timeout."),
PSQLState.COMMUNICATION_ERROR, ioe);
}
}
@Override
public void setHoldability(int holdability) throws SQLException {
checkClosed();
switch (holdability) {
case ResultSet.CLOSE_CURSORS_AT_COMMIT:
rsHoldability = holdability;
break;
case ResultSet.HOLD_CURSORS_OVER_COMMIT:
rsHoldability = holdability;
break;
default:
throw new PSQLException(GT.tr("Unknown ResultSet holdability setting: {0}.", holdability),
PSQLState.INVALID_PARAMETER_VALUE);
}
LOGGER.log(Level.FINE, " setHoldability = {0}", holdability);
}
@Override
public int getHoldability() throws SQLException {
checkClosed();
return rsHoldability;
}
@Override
public Savepoint setSavepoint() throws SQLException {
checkClosed();
String pgName;
if (getAutoCommit()) {
throw new PSQLException(GT.tr("Cannot establish a savepoint in auto-commit mode."),
PSQLState.NO_ACTIVE_SQL_TRANSACTION);
}
PSQLSavepoint savepoint = new PSQLSavepoint(savepointId++);
pgName = savepoint.getPGName();
// Note we can't use execSQLUpdate because we don't want
// to suppress BEGIN.
Statement stmt = createStatement();
stmt.executeUpdate("SAVEPOINT " + pgName);
stmt.close();
return savepoint;
}
@Override
public Savepoint setSavepoint(String name) throws SQLException {
checkClosed();
if (getAutoCommit()) {
throw new PSQLException(GT.tr("Cannot establish a savepoint in auto-commit mode."),
PSQLState.NO_ACTIVE_SQL_TRANSACTION);
}
PSQLSavepoint savepoint = new PSQLSavepoint(name);
// Note we can't use execSQLUpdate because we don't want
// to suppress BEGIN.
Statement stmt = createStatement();
stmt.executeUpdate("SAVEPOINT " + savepoint.getPGName());
stmt.close();
return savepoint;
}
@Override
public void rollback(Savepoint savepoint) throws SQLException {
checkClosed();
PSQLSavepoint pgSavepoint = (PSQLSavepoint) savepoint;
execSQLUpdate("ROLLBACK TO SAVEPOINT " + pgSavepoint.getPGName());
}
@Override
public void releaseSavepoint(Savepoint savepoint) throws SQLException {
checkClosed();
PSQLSavepoint pgSavepoint = (PSQLSavepoint) savepoint;
execSQLUpdate("RELEASE SAVEPOINT " + pgSavepoint.getPGName());
pgSavepoint.invalidate();
}
public Statement createStatement(int resultSetType, int resultSetConcurrency)
throws SQLException {
checkClosed();
return createStatement(resultSetType, resultSetConcurrency, getHoldability());
}
public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency)
throws SQLException {
checkClosed();
return prepareStatement(sql, resultSetType, resultSetConcurrency, getHoldability());
}
public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency)
throws SQLException {
checkClosed();
return prepareCall(sql, resultSetType, resultSetConcurrency, getHoldability());
}
@Override
public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException {
if (autoGeneratedKeys != Statement.RETURN_GENERATED_KEYS) {
return prepareStatement(sql);
}
return prepareStatement(sql, (String[]) null);
}
@Override
public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException {
if (columnIndexes != null && columnIndexes.length == 0) {
return prepareStatement(sql);
}
checkClosed();
throw new PSQLException(GT.tr("Returning autogenerated keys is not supported."),
PSQLState.NOT_IMPLEMENTED);
}
public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException {
if (columnNames != null && columnNames.length == 0) {
return prepareStatement(sql);
}
CachedQuery cachedQuery = borrowReturningQuery(sql, columnNames);
PgPreparedStatement ps =
new PgPreparedStatement(this, cachedQuery,
ResultSet.TYPE_FORWARD_ONLY,
ResultSet.CONCUR_READ_ONLY,
getHoldability());
Query query = cachedQuery.query;
SqlCommand sqlCommand = query.getSqlCommand();
if (sqlCommand != null) {
ps.wantsGeneratedKeysAlways = sqlCommand.isReturningKeywordPresent();
} else {
// If composite query is given, just ignore "generated keys" arguments
}
return ps;
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.cosmosdb.v2020_04_01.implementation;
import retrofit2.Retrofit;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.CloudException;
import com.microsoft.azure.management.cosmosdb.v2020_04_01.ErrorResponseException;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import com.microsoft.rest.Validator;
import java.io.IOException;
import java.util.List;
import okhttp3.ResponseBody;
import retrofit2.http.Body;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.HTTP;
import retrofit2.http.Path;
import retrofit2.http.PUT;
import retrofit2.http.Query;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in PrivateEndpointConnections.
*/
public class PrivateEndpointConnectionsInner {
/** The Retrofit service to perform REST calls. */
private PrivateEndpointConnectionsService service;
/** The service client containing this operation class. */
private CosmosDBManagementClientImpl client;
/**
* Initializes an instance of PrivateEndpointConnectionsInner.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public PrivateEndpointConnectionsInner(Retrofit retrofit, CosmosDBManagementClientImpl client) {
this.service = retrofit.create(PrivateEndpointConnectionsService.class);
this.client = client;
}
/**
* The interface defining all the services for PrivateEndpointConnections to be
* used by Retrofit to perform actually REST calls.
*/
interface PrivateEndpointConnectionsService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.cosmosdb.v2020_04_01.PrivateEndpointConnections listByDatabaseAccount" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/privateEndpointConnections")
Observable<Response<ResponseBody>> listByDatabaseAccount(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.cosmosdb.v2020_04_01.PrivateEndpointConnections get" })
@GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}")
Observable<Response<ResponseBody>> get(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Path("privateEndpointConnectionName") String privateEndpointConnectionName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.cosmosdb.v2020_04_01.PrivateEndpointConnections createOrUpdate" })
@PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}")
Observable<Response<ResponseBody>> createOrUpdate(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Path("privateEndpointConnectionName") String privateEndpointConnectionName, @Query("api-version") String apiVersion, @Body PrivateEndpointConnectionInner parameters, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.cosmosdb.v2020_04_01.PrivateEndpointConnections beginCreateOrUpdate" })
@PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}")
Observable<Response<ResponseBody>> beginCreateOrUpdate(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Path("privateEndpointConnectionName") String privateEndpointConnectionName, @Query("api-version") String apiVersion, @Body PrivateEndpointConnectionInner parameters, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.cosmosdb.v2020_04_01.PrivateEndpointConnections delete" })
@HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}", method = "DELETE", hasBody = true)
Observable<Response<ResponseBody>> delete(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Path("privateEndpointConnectionName") String privateEndpointConnectionName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.cosmosdb.v2020_04_01.PrivateEndpointConnections beginDelete" })
@HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}", method = "DELETE", hasBody = true)
Observable<Response<ResponseBody>> beginDelete(@Path("subscriptionId") String subscriptionId, @Path("resourceGroupName") String resourceGroupName, @Path("accountName") String accountName, @Path("privateEndpointConnectionName") String privateEndpointConnectionName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
}
/**
* List all private endpoint connections on a Cosmos DB account.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the List<PrivateEndpointConnectionInner> object if successful.
*/
public List<PrivateEndpointConnectionInner> listByDatabaseAccount(String resourceGroupName, String accountName) {
return listByDatabaseAccountWithServiceResponseAsync(resourceGroupName, accountName).toBlocking().single().body();
}
/**
* List all private endpoint connections on a Cosmos DB account.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<PrivateEndpointConnectionInner>> listByDatabaseAccountAsync(String resourceGroupName, String accountName, final ServiceCallback<List<PrivateEndpointConnectionInner>> serviceCallback) {
return ServiceFuture.fromResponse(listByDatabaseAccountWithServiceResponseAsync(resourceGroupName, accountName), serviceCallback);
}
/**
* List all private endpoint connections on a Cosmos DB account.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<PrivateEndpointConnectionInner> object
*/
public Observable<List<PrivateEndpointConnectionInner>> listByDatabaseAccountAsync(String resourceGroupName, String accountName) {
return listByDatabaseAccountWithServiceResponseAsync(resourceGroupName, accountName).map(new Func1<ServiceResponse<List<PrivateEndpointConnectionInner>>, List<PrivateEndpointConnectionInner>>() {
@Override
public List<PrivateEndpointConnectionInner> call(ServiceResponse<List<PrivateEndpointConnectionInner>> response) {
return response.body();
}
});
}
/**
* List all private endpoint connections on a Cosmos DB account.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<PrivateEndpointConnectionInner> object
*/
public Observable<ServiceResponse<List<PrivateEndpointConnectionInner>>> listByDatabaseAccountWithServiceResponseAsync(String resourceGroupName, String accountName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (accountName == null) {
throw new IllegalArgumentException("Parameter accountName is required and cannot be null.");
}
final String apiVersion = "2019-08-01-preview";
return service.listByDatabaseAccount(this.client.subscriptionId(), resourceGroupName, accountName, apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<List<PrivateEndpointConnectionInner>>>>() {
@Override
public Observable<ServiceResponse<List<PrivateEndpointConnectionInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<PrivateEndpointConnectionInner>> result = listByDatabaseAccountDelegate(response);
List<PrivateEndpointConnectionInner> items = null;
if (result.body() != null) {
items = result.body().items();
}
ServiceResponse<List<PrivateEndpointConnectionInner>> clientResponse = new ServiceResponse<List<PrivateEndpointConnectionInner>>(items, result.response());
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl<PrivateEndpointConnectionInner>> listByDatabaseAccountDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl<PrivateEndpointConnectionInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl<PrivateEndpointConnectionInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Gets a private endpoint connection.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PrivateEndpointConnectionInner object if successful.
*/
public PrivateEndpointConnectionInner get(String resourceGroupName, String accountName, String privateEndpointConnectionName) {
return getWithServiceResponseAsync(resourceGroupName, accountName, privateEndpointConnectionName).toBlocking().single().body();
}
/**
* Gets a private endpoint connection.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<PrivateEndpointConnectionInner> getAsync(String resourceGroupName, String accountName, String privateEndpointConnectionName, final ServiceCallback<PrivateEndpointConnectionInner> serviceCallback) {
return ServiceFuture.fromResponse(getWithServiceResponseAsync(resourceGroupName, accountName, privateEndpointConnectionName), serviceCallback);
}
/**
* Gets a private endpoint connection.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PrivateEndpointConnectionInner object
*/
public Observable<PrivateEndpointConnectionInner> getAsync(String resourceGroupName, String accountName, String privateEndpointConnectionName) {
return getWithServiceResponseAsync(resourceGroupName, accountName, privateEndpointConnectionName).map(new Func1<ServiceResponse<PrivateEndpointConnectionInner>, PrivateEndpointConnectionInner>() {
@Override
public PrivateEndpointConnectionInner call(ServiceResponse<PrivateEndpointConnectionInner> response) {
return response.body();
}
});
}
/**
* Gets a private endpoint connection.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PrivateEndpointConnectionInner object
*/
public Observable<ServiceResponse<PrivateEndpointConnectionInner>> getWithServiceResponseAsync(String resourceGroupName, String accountName, String privateEndpointConnectionName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (accountName == null) {
throw new IllegalArgumentException("Parameter accountName is required and cannot be null.");
}
if (privateEndpointConnectionName == null) {
throw new IllegalArgumentException("Parameter privateEndpointConnectionName is required and cannot be null.");
}
final String apiVersion = "2019-08-01-preview";
return service.get(this.client.subscriptionId(), resourceGroupName, accountName, privateEndpointConnectionName, apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<PrivateEndpointConnectionInner>>>() {
@Override
public Observable<ServiceResponse<PrivateEndpointConnectionInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PrivateEndpointConnectionInner> clientResponse = getDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PrivateEndpointConnectionInner> getDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PrivateEndpointConnectionInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PrivateEndpointConnectionInner>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Approve or reject a private endpoint connection with a given name.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @param parameters the PrivateEndpointConnectionInner value
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorResponseException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PrivateEndpointConnectionInner object if successful.
*/
public PrivateEndpointConnectionInner createOrUpdate(String resourceGroupName, String accountName, String privateEndpointConnectionName, PrivateEndpointConnectionInner parameters) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, accountName, privateEndpointConnectionName, parameters).toBlocking().last().body();
}
/**
* Approve or reject a private endpoint connection with a given name.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @param parameters the PrivateEndpointConnectionInner value
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<PrivateEndpointConnectionInner> createOrUpdateAsync(String resourceGroupName, String accountName, String privateEndpointConnectionName, PrivateEndpointConnectionInner parameters, final ServiceCallback<PrivateEndpointConnectionInner> serviceCallback) {
return ServiceFuture.fromResponse(createOrUpdateWithServiceResponseAsync(resourceGroupName, accountName, privateEndpointConnectionName, parameters), serviceCallback);
}
/**
* Approve or reject a private endpoint connection with a given name.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @param parameters the PrivateEndpointConnectionInner value
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<PrivateEndpointConnectionInner> createOrUpdateAsync(String resourceGroupName, String accountName, String privateEndpointConnectionName, PrivateEndpointConnectionInner parameters) {
return createOrUpdateWithServiceResponseAsync(resourceGroupName, accountName, privateEndpointConnectionName, parameters).map(new Func1<ServiceResponse<PrivateEndpointConnectionInner>, PrivateEndpointConnectionInner>() {
@Override
public PrivateEndpointConnectionInner call(ServiceResponse<PrivateEndpointConnectionInner> response) {
return response.body();
}
});
}
/**
* Approve or reject a private endpoint connection with a given name.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @param parameters the PrivateEndpointConnectionInner value
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<ServiceResponse<PrivateEndpointConnectionInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String accountName, String privateEndpointConnectionName, PrivateEndpointConnectionInner parameters) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (accountName == null) {
throw new IllegalArgumentException("Parameter accountName is required and cannot be null.");
}
if (privateEndpointConnectionName == null) {
throw new IllegalArgumentException("Parameter privateEndpointConnectionName is required and cannot be null.");
}
if (parameters == null) {
throw new IllegalArgumentException("Parameter parameters is required and cannot be null.");
}
Validator.validate(parameters);
final String apiVersion = "2019-08-01-preview";
Observable<Response<ResponseBody>> observable = service.createOrUpdate(this.client.subscriptionId(), resourceGroupName, accountName, privateEndpointConnectionName, apiVersion, parameters, this.client.acceptLanguage(), this.client.userAgent());
return client.getAzureClient().getPutOrPatchResultAsync(observable, new TypeToken<PrivateEndpointConnectionInner>() { }.getType());
}
/**
* Approve or reject a private endpoint connection with a given name.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @param parameters the PrivateEndpointConnectionInner value
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorResponseException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the PrivateEndpointConnectionInner object if successful.
*/
public PrivateEndpointConnectionInner beginCreateOrUpdate(String resourceGroupName, String accountName, String privateEndpointConnectionName, PrivateEndpointConnectionInner parameters) {
return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, accountName, privateEndpointConnectionName, parameters).toBlocking().single().body();
}
/**
* Approve or reject a private endpoint connection with a given name.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @param parameters the PrivateEndpointConnectionInner value
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<PrivateEndpointConnectionInner> beginCreateOrUpdateAsync(String resourceGroupName, String accountName, String privateEndpointConnectionName, PrivateEndpointConnectionInner parameters, final ServiceCallback<PrivateEndpointConnectionInner> serviceCallback) {
return ServiceFuture.fromResponse(beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, accountName, privateEndpointConnectionName, parameters), serviceCallback);
}
/**
* Approve or reject a private endpoint connection with a given name.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @param parameters the PrivateEndpointConnectionInner value
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PrivateEndpointConnectionInner object
*/
public Observable<PrivateEndpointConnectionInner> beginCreateOrUpdateAsync(String resourceGroupName, String accountName, String privateEndpointConnectionName, PrivateEndpointConnectionInner parameters) {
return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, accountName, privateEndpointConnectionName, parameters).map(new Func1<ServiceResponse<PrivateEndpointConnectionInner>, PrivateEndpointConnectionInner>() {
@Override
public PrivateEndpointConnectionInner call(ServiceResponse<PrivateEndpointConnectionInner> response) {
return response.body();
}
});
}
/**
* Approve or reject a private endpoint connection with a given name.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @param parameters the PrivateEndpointConnectionInner value
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the PrivateEndpointConnectionInner object
*/
public Observable<ServiceResponse<PrivateEndpointConnectionInner>> beginCreateOrUpdateWithServiceResponseAsync(String resourceGroupName, String accountName, String privateEndpointConnectionName, PrivateEndpointConnectionInner parameters) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (accountName == null) {
throw new IllegalArgumentException("Parameter accountName is required and cannot be null.");
}
if (privateEndpointConnectionName == null) {
throw new IllegalArgumentException("Parameter privateEndpointConnectionName is required and cannot be null.");
}
if (parameters == null) {
throw new IllegalArgumentException("Parameter parameters is required and cannot be null.");
}
Validator.validate(parameters);
final String apiVersion = "2019-08-01-preview";
return service.beginCreateOrUpdate(this.client.subscriptionId(), resourceGroupName, accountName, privateEndpointConnectionName, apiVersion, parameters, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<PrivateEndpointConnectionInner>>>() {
@Override
public Observable<ServiceResponse<PrivateEndpointConnectionInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PrivateEndpointConnectionInner> clientResponse = beginCreateOrUpdateDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PrivateEndpointConnectionInner> beginCreateOrUpdateDelegate(Response<ResponseBody> response) throws ErrorResponseException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PrivateEndpointConnectionInner, ErrorResponseException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PrivateEndpointConnectionInner>() { }.getType())
.register(202, new TypeToken<Void>() { }.getType())
.registerError(ErrorResponseException.class)
.build(response);
}
/**
* Deletes a private endpoint connection with a given name.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorResponseException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void delete(String resourceGroupName, String accountName, String privateEndpointConnectionName) {
deleteWithServiceResponseAsync(resourceGroupName, accountName, privateEndpointConnectionName).toBlocking().last().body();
}
/**
* Deletes a private endpoint connection with a given name.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> deleteAsync(String resourceGroupName, String accountName, String privateEndpointConnectionName, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(deleteWithServiceResponseAsync(resourceGroupName, accountName, privateEndpointConnectionName), serviceCallback);
}
/**
* Deletes a private endpoint connection with a given name.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<Void> deleteAsync(String resourceGroupName, String accountName, String privateEndpointConnectionName) {
return deleteWithServiceResponseAsync(resourceGroupName, accountName, privateEndpointConnectionName).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Deletes a private endpoint connection with a given name.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
public Observable<ServiceResponse<Void>> deleteWithServiceResponseAsync(String resourceGroupName, String accountName, String privateEndpointConnectionName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (accountName == null) {
throw new IllegalArgumentException("Parameter accountName is required and cannot be null.");
}
if (privateEndpointConnectionName == null) {
throw new IllegalArgumentException("Parameter privateEndpointConnectionName is required and cannot be null.");
}
final String apiVersion = "2019-08-01-preview";
Observable<Response<ResponseBody>> observable = service.delete(this.client.subscriptionId(), resourceGroupName, accountName, privateEndpointConnectionName, apiVersion, this.client.acceptLanguage(), this.client.userAgent());
return client.getAzureClient().getPostOrDeleteResultAsync(observable, new TypeToken<Void>() { }.getType());
}
/**
* Deletes a private endpoint connection with a given name.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws ErrorResponseException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
*/
public void beginDelete(String resourceGroupName, String accountName, String privateEndpointConnectionName) {
beginDeleteWithServiceResponseAsync(resourceGroupName, accountName, privateEndpointConnectionName).toBlocking().single().body();
}
/**
* Deletes a private endpoint connection with a given name.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<Void> beginDeleteAsync(String resourceGroupName, String accountName, String privateEndpointConnectionName, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(beginDeleteWithServiceResponseAsync(resourceGroupName, accountName, privateEndpointConnectionName), serviceCallback);
}
/**
* Deletes a private endpoint connection with a given name.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<Void> beginDeleteAsync(String resourceGroupName, String accountName, String privateEndpointConnectionName) {
return beginDeleteWithServiceResponseAsync(resourceGroupName, accountName, privateEndpointConnectionName).map(new Func1<ServiceResponse<Void>, Void>() {
@Override
public Void call(ServiceResponse<Void> response) {
return response.body();
}
});
}
/**
* Deletes a private endpoint connection with a given name.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param accountName Cosmos DB database account name.
* @param privateEndpointConnectionName The name of the private endpoint connection.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceResponse} object if successful.
*/
public Observable<ServiceResponse<Void>> beginDeleteWithServiceResponseAsync(String resourceGroupName, String accountName, String privateEndpointConnectionName) {
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (accountName == null) {
throw new IllegalArgumentException("Parameter accountName is required and cannot be null.");
}
if (privateEndpointConnectionName == null) {
throw new IllegalArgumentException("Parameter privateEndpointConnectionName is required and cannot be null.");
}
final String apiVersion = "2019-08-01-preview";
return service.beginDelete(this.client.subscriptionId(), resourceGroupName, accountName, privateEndpointConnectionName, apiVersion, this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() {
@Override
public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) {
try {
ServiceResponse<Void> clientResponse = beginDeleteDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<Void> beginDeleteDelegate(Response<ResponseBody> response) throws ErrorResponseException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<Void, ErrorResponseException>newInstance(this.client.serializerAdapter())
.register(202, new TypeToken<Void>() { }.getType())
.register(204, new TypeToken<Void>() { }.getType())
.registerError(ErrorResponseException.class)
.build(response);
}
}
| |
/* Generic definitions */
/* Assertions (useful to generate conditional code) */
/* Current type and class (and size, if applicable) */
/* Value methods */
/* Interfaces (keys) */
/* Interfaces (values) */
/* Abstract implementations (keys) */
/* Abstract implementations (values) */
/* Static containers (keys) */
/* Static containers (values) */
/* Implementations */
/* Synchronized wrappers */
/* Unmodifiable wrappers */
/* Other wrappers */
/* Methods (keys) */
/* Methods (values) */
/* Methods (keys/values) */
/* Methods that have special names depending on keys (but the special names depend on values) */
/* Equality */
/* Object/Reference-only definitions (keys) */
/* Primitive-type-only definitions (keys) */
/* Object/Reference-only definitions (values) */
/* Primitive-type-only definitions (values) */
/*
* Copyright (C) 2002-2013 Sebastiano Vigna
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.unimi.dsi.fastutil.chars;
/** A class providing static methods and objects that do useful things with type-specific functions.
*
* @see it.unimi.dsi.fastutil.Function
* @see java.util.Collections
*/
public class Char2FloatFunctions {
private Char2FloatFunctions() {}
/** An immutable class representing an empty type-specific function.
*
* <P>This class may be useful to implement your own in case you subclass
* a type-specific function.
*/
public static class EmptyFunction extends AbstractChar2FloatFunction implements java.io.Serializable, Cloneable {
private static final long serialVersionUID = -7046029254386353129L;
protected EmptyFunction() {}
public float get( final char k ) { return (0); }
public boolean containsKey( final char k ) { return false; }
public float defaultReturnValue() { return (0); }
public void defaultReturnValue( final float defRetValue ) { throw new UnsupportedOperationException(); }
public Float get( final Object k ) { return null; }
public int size() { return 0; }
public void clear() {}
private Object readResolve() { return EMPTY_FUNCTION; }
public Object clone() { return EMPTY_FUNCTION; }
}
/** An empty type-specific function (immutable). It is serializable and cloneable. */
@SuppressWarnings("rawtypes")
public static final EmptyFunction EMPTY_FUNCTION = new EmptyFunction();
/** An immutable class representing a type-specific singleton function.
*
* <P>This class may be useful to implement your own in case you subclass
* a type-specific function.
*/
public static class Singleton extends AbstractChar2FloatFunction implements java.io.Serializable, Cloneable {
private static final long serialVersionUID = -7046029254386353129L;
protected final char key;
protected final float value;
protected Singleton( final char key, final float value ) {
this.key = key;
this.value = value;
}
public boolean containsKey( final char k ) { return ( (key) == (k) ); }
public float get( final char k ) { if ( ( (key) == (k) ) ) return value; return defRetValue; }
public int size() { return 1; }
public Object clone() { return this; }
}
/** Returns a type-specific immutable function containing only the specified pair. The returned function is serializable and cloneable.
*
* <P>Note that albeit the returned function is immutable, its default return value may be changed.
*
* @param key the only key of the returned function.
* @param value the only value of the returned function.
* @return a type-specific immutable function containing just the pair <code><key,value></code>.
*/
public static Char2FloatFunction singleton( final char key, float value ) {
return new Singleton ( key, value );
}
/** Returns a type-specific immutable function containing only the specified pair. The returned function is serializable and cloneable.
*
* <P>Note that albeit the returned function is immutable, its default return value may be changed.
*
* @param key the only key of the returned function.
* @param value the only value of the returned function.
* @return a type-specific immutable function containing just the pair <code><key,value></code>.
*/
public static Char2FloatFunction singleton( final Character key, final Float value ) {
return new Singleton ( ((key).charValue()), ((value).floatValue()) );
}
/** A synchronized wrapper class for functions. */
public static class SynchronizedFunction extends AbstractChar2FloatFunction implements java.io.Serializable {
private static final long serialVersionUID = -7046029254386353129L;
protected final Char2FloatFunction function;
protected final Object sync;
protected SynchronizedFunction( final Char2FloatFunction f, final Object sync ) {
if ( f == null ) throw new NullPointerException();
this.function = f;
this.sync = sync;
}
protected SynchronizedFunction( final Char2FloatFunction f ) {
if ( f == null ) throw new NullPointerException();
this.function = f;
this.sync = this;
}
public int size() { synchronized( sync ) { return function.size(); } }
public boolean containsKey( final char k ) { synchronized( sync ) { return function.containsKey( k ); } }
public float defaultReturnValue() { synchronized( sync ) { return function.defaultReturnValue(); } }
public void defaultReturnValue( final float defRetValue ) { synchronized( sync ) { function.defaultReturnValue( defRetValue ); } }
public float put( final char k, final float v ) { synchronized( sync ) { return function.put( k, v ); } }
public void clear() { synchronized( sync ) { function.clear(); } }
public String toString() { synchronized( sync ) { return function.toString(); } }
public Float put( final Character k, final Float v ) { synchronized( sync ) { return function.put( k, v ); } }
public Float get( final Object k ) { synchronized( sync ) { return function.get( k ); } }
public Float remove( final Object k ) { synchronized( sync ) { return function.remove( k ); } }
public float remove( final char k ) { synchronized( sync ) { return function.remove( k ); } }
public float get( final char k ) { synchronized( sync ) { return function.get( k ); } }
public boolean containsKey( final Object ok ) { synchronized( sync ) { return function.containsKey( ok ); } }
}
/** Returns a synchronized type-specific function backed by the given type-specific function.
*
* @param f the function to be wrapped in a synchronized function.
* @return a synchronized view of the specified function.
* @see java.util.Collections#synchronizedMap(java.util.Map)
*/
public static Char2FloatFunction synchronize( final Char2FloatFunction f ) { return new SynchronizedFunction ( f ); }
/** Returns a synchronized type-specific function backed by the given type-specific function, using an assigned object to synchronize.
*
* @param f the function to be wrapped in a synchronized function.
* @param sync an object that will be used to synchronize the access to the function.
* @return a synchronized view of the specified function.
* @see java.util.Collections#synchronizedMap(java.util.Map)
*/
public static Char2FloatFunction synchronize( final Char2FloatFunction f, final Object sync ) { return new SynchronizedFunction ( f, sync ); }
/** An unmodifiable wrapper class for functions. */
public static class UnmodifiableFunction extends AbstractChar2FloatFunction implements java.io.Serializable {
private static final long serialVersionUID = -7046029254386353129L;
protected final Char2FloatFunction function;
protected UnmodifiableFunction( final Char2FloatFunction f ) {
if ( f == null ) throw new NullPointerException();
this.function = f;
}
public int size() { return function.size(); }
public boolean containsKey( final char k ) { return function.containsKey( k ); }
public float defaultReturnValue() { return function.defaultReturnValue(); }
public void defaultReturnValue( final float defRetValue ) { throw new UnsupportedOperationException(); }
public float put( final char k, final float v ) { throw new UnsupportedOperationException(); }
public void clear() { throw new UnsupportedOperationException(); }
public String toString() { return function.toString(); }
public float remove( final char k ) { throw new UnsupportedOperationException(); }
public float get( final char k ) { return function.get( k ); }
public boolean containsKey( final Object ok ) { return function.containsKey( ok ); }
}
/** Returns an unmodifiable type-specific function backed by the given type-specific function.
*
* @param f the function to be wrapped in an unmodifiable function.
* @return an unmodifiable view of the specified function.
* @see java.util.Collections#unmodifiableMap(java.util.Map)
*/
public static Char2FloatFunction unmodifiable( final Char2FloatFunction f ) { return new UnmodifiableFunction ( f ); }
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.ec2.compute.functions;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Predicates.not;
import static com.google.common.base.Strings.emptyToNull;
import static com.google.common.collect.Iterables.filter;
import static org.jclouds.compute.util.ComputeServiceUtils.addMetadataAndParseTagsFromValuesOfEmptyString;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NoSuchElementException;
import java.util.Set;
import javax.annotation.Resource;
import javax.inject.Singleton;
import org.jclouds.collect.Memoized;
import org.jclouds.compute.domain.Hardware;
import org.jclouds.compute.domain.HardwareBuilder;
import org.jclouds.compute.domain.Image;
import org.jclouds.compute.domain.NodeMetadata;
import org.jclouds.compute.domain.NodeMetadata.Status;
import org.jclouds.compute.domain.NodeMetadataBuilder;
import org.jclouds.compute.domain.Volume;
import org.jclouds.compute.domain.internal.VolumeImpl;
import org.jclouds.compute.functions.GroupNamingConvention;
import org.jclouds.domain.Credentials;
import org.jclouds.domain.Location;
import org.jclouds.domain.LoginCredentials;
import org.jclouds.ec2.compute.domain.RegionAndName;
import org.jclouds.ec2.domain.BlockDevice;
import org.jclouds.ec2.domain.InstanceState;
import org.jclouds.ec2.domain.RootDeviceType;
import org.jclouds.ec2.domain.RunningInstance;
import org.jclouds.logging.Logger;
import org.jclouds.util.InetAddresses2.IsPrivateIPAddress;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.base.Supplier;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSet.Builder;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.UncheckedExecutionException;
import com.google.inject.Inject;
@Singleton
public class RunningInstanceToNodeMetadata implements Function<RunningInstance, NodeMetadata> {
@Resource
protected Logger logger = Logger.NULL;
protected final Supplier<Set<? extends Location>> locations;
protected final Supplier<Set<? extends Hardware>> hardware;
protected final Supplier<LoadingCache<RegionAndName, ? extends Image>> imageMap;
protected final Map<String, Credentials> credentialStore;
protected final Map<InstanceState, Status> instanceToNodeStatus;
protected final GroupNamingConvention.Factory namingConvention;
@Inject
protected RunningInstanceToNodeMetadata(Map<InstanceState, Status> instanceToNodeStatus,
Map<String, Credentials> credentialStore, Supplier<LoadingCache<RegionAndName, ? extends Image>> imageMap,
@Memoized Supplier<Set<? extends Location>> locations, @Memoized Supplier<Set<? extends Hardware>> hardware,
GroupNamingConvention.Factory namingConvention) {
this.locations = checkNotNull(locations, "locations");
this.hardware = checkNotNull(hardware, "hardware");
this.imageMap = checkNotNull(imageMap, "imageMap");
this.instanceToNodeStatus = checkNotNull(instanceToNodeStatus, "instanceToNodeStatus");
this.credentialStore = checkNotNull(credentialStore, "credentialStore");
this.namingConvention = checkNotNull(namingConvention, "namingConvention");
}
@Override
public NodeMetadata apply(RunningInstance instance) {
if (instance == null || instance.getId() == null)
return null;
NodeMetadataBuilder builder = new NodeMetadataBuilder();
builder.name(instance.getTags().get("Name"));
addMetadataAndParseTagsFromValuesOfEmptyString(builder, instance.getTags());
builder.providerId(instance.getId());
builder.id(instance.getRegion() + "/" + instance.getId());
String group = getGroupForInstance(instance);
builder.group(group);
// standard convention from aws-ec2, which might not be re-used outside.
if (instance.getPrivateDnsName() != null)
builder.hostname(instance.getPrivateDnsName().replaceAll("\\..*", ""));
addCredentialsForInstance(builder, instance);
builder.status(instanceToNodeStatus.get(instance.getInstanceState()));
builder.backendStatus(instance.getRawState());
// collect all ip addresses into one bundle in case the api mistakenly put a private address
// into the public address field
Builder<String> addressesBuilder = ImmutableSet.builder();
if (emptyToNull(instance.getIpAddress()) != null)
addressesBuilder.add(instance.getIpAddress());
if (emptyToNull(instance.getPrivateIpAddress()) != null)
addressesBuilder.add(instance.getPrivateIpAddress());
Set<String> addresses = addressesBuilder.build();
builder.publicAddresses(filter(addresses, not(IsPrivateIPAddress.INSTANCE)));
builder.privateAddresses(filter(addresses, IsPrivateIPAddress.INSTANCE));
builder.hardware(parseHardware(instance));
Location location = getLocationForAvailabilityZoneOrRegion(instance);
builder.location(location);
builder.imageId(instance.getRegion() + "/" + instance.getImageId());
// extract the operating system from the image
RegionAndName regionAndName = new RegionAndName(instance.getRegion(), instance.getImageId());
try {
Image image = imageMap.get().getUnchecked(regionAndName);
if (image != null)
builder.operatingSystem(image.getOperatingSystem());
} catch (CacheLoader.InvalidCacheLoadException e) {
logger.debug("image not found for %s: %s", regionAndName, e);
} catch (UncheckedExecutionException e) {
logger.debug("error getting image for %s: %s", regionAndName, e);
}
return builder.build();
}
protected void addCredentialsForInstance(NodeMetadataBuilder builder, RunningInstance instance) {
builder.credentials(LoginCredentials.fromCredentials(credentialStore.get("node#" + instance.getRegion() + "/"
+ instance.getId())));
}
protected Hardware parseHardware(final RunningInstance instance) {
Hardware hardware = getHardwareForInstance(instance);
if (hardware != null) {
hardware = HardwareBuilder.fromHardware(hardware).volumes(addEBS(instance, hardware.getVolumes())).build();
}
return hardware;
}
@VisibleForTesting
static List<Volume> addEBS(final RunningInstance instance, Iterable<? extends Volume> volumes) {
Iterable<Volume> ebsVolumes = Iterables.transform(instance.getEbsBlockDevices().entrySet(),
new Function<Entry<String, BlockDevice>, Volume>() {
@Override
public Volume apply(Entry<String, BlockDevice> from) {
return new VolumeImpl(from.getValue().getVolumeId(), Volume.Type.SAN, null, from.getKey(),
instance.getRootDeviceName() != null
&& instance.getRootDeviceName().equals(from.getKey()), true);
}
});
if (instance.getRootDeviceType() == RootDeviceType.EBS) {
volumes = Iterables.filter(volumes, new Predicate<Volume>() {
@Override
public boolean apply(Volume input) {
return !input.isBootDevice();
}
});
}
return Lists.newArrayList(Iterables.concat(volumes, ebsVolumes));
}
@VisibleForTesting
String getGroupForInstance(final RunningInstance instance) {
String group = parseGroupFrom(instance, instance.getGroupNames());
if (group == null && instance.getKeyName() != null) {
// when not using a generated security group, e.g. in VPC, try from key:
group = parseGroupFrom(instance, Sets.newHashSet(instance.getKeyName()));
}
return group;
}
private String parseGroupFrom(final RunningInstance instance, final Set<String> data) {
String group = null;
try {
Predicate<String> containsAnyGroup = namingConvention.create().containsAnyGroup();
String encodedGroup = Iterables.getOnlyElement(Iterables.filter(data, containsAnyGroup));
group = namingConvention.create().extractGroup(encodedGroup);
} catch (NoSuchElementException e) {
logger.debug("no group parsed from %s's data: %s", instance.getId(), data);
} catch (IllegalArgumentException e) {
logger.debug("too many groups match naming convention; %s's data: %s", instance.getId(), data);
}
return group;
}
@VisibleForTesting
Hardware getHardwareForInstance(final RunningInstance instance) {
try {
return Iterables.find(hardware.get(), new Predicate<Hardware>() {
@Override
public boolean apply(Hardware input) {
return input.getId().equals(instance.getInstanceType());
}
});
} catch (NoSuchElementException e) {
logger.debug("couldn't match instance type %s in: %s", instance.getInstanceType(), hardware.get());
return null;
}
}
private Location getLocationForAvailabilityZoneOrRegion(final RunningInstance instance) {
Location location = findLocationWithId(instance.getAvailabilityZone());
if (location == null)
location = findLocationWithId(instance.getRegion());
return location;
}
private Location findLocationWithId(final String locationId) {
if (locationId == null)
return null;
try {
Location location = Iterables.find(locations.get(), new Predicate<Location>() {
@Override
public boolean apply(Location input) {
return input.getId().equals(locationId);
}
});
return location;
} catch (NoSuchElementException e) {
logger.debug("couldn't match instance location %s in: %s", locationId, locations.get());
return null;
}
}
}
| |
/**
* Copyright 2012, Big Switch Networks, Inc.
* Originally created by David Erickson, Stanford University
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
**/
package net.floodlightcontroller.core.internal;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import javax.annotation.Nonnull;
import net.floodlightcontroller.core.IOFConnection;
import net.floodlightcontroller.core.IOFConnectionBackend;
import net.floodlightcontroller.core.IOFSwitchBackend;
import net.floodlightcontroller.core.LogicalOFMessageCategory;
import net.floodlightcontroller.core.PortChangeEvent;
import net.floodlightcontroller.core.PortChangeType;
import net.floodlightcontroller.core.SwitchDescription;
import net.floodlightcontroller.core.SwitchDriverSubHandshakeAlreadyStarted;
import net.floodlightcontroller.core.SwitchDriverSubHandshakeCompleted;
import net.floodlightcontroller.core.SwitchDriverSubHandshakeNotStarted;
import net.floodlightcontroller.core.util.AppCookie;
import net.floodlightcontroller.core.util.URIUtil;
import org.projectfloodlight.openflow.protocol.OFActionType;
import org.projectfloodlight.openflow.protocol.OFBsnControllerConnection;
import org.projectfloodlight.openflow.protocol.OFBsnControllerConnectionState;
import org.projectfloodlight.openflow.protocol.OFBsnControllerConnectionsReply;
import org.projectfloodlight.openflow.protocol.OFCapabilities;
import org.projectfloodlight.openflow.protocol.OFControllerRole;
import org.projectfloodlight.openflow.protocol.OFFactory;
import org.projectfloodlight.openflow.protocol.OFFeaturesReply;
import org.projectfloodlight.openflow.protocol.OFFlowWildcards;
import org.projectfloodlight.openflow.protocol.OFMessage;
import org.projectfloodlight.openflow.protocol.OFPortConfig;
import org.projectfloodlight.openflow.protocol.OFPortDesc;
import org.projectfloodlight.openflow.protocol.OFPortDescStatsReply;
import org.projectfloodlight.openflow.protocol.OFPortReason;
import org.projectfloodlight.openflow.protocol.OFPortState;
import org.projectfloodlight.openflow.protocol.OFPortStatus;
import org.projectfloodlight.openflow.protocol.OFRequest;
import org.projectfloodlight.openflow.protocol.OFStatsReply;
import org.projectfloodlight.openflow.protocol.OFStatsRequest;
import org.projectfloodlight.openflow.protocol.OFTableFeatures;
import org.projectfloodlight.openflow.protocol.OFTableFeaturesStatsReply;
import org.projectfloodlight.openflow.protocol.OFType;
import org.projectfloodlight.openflow.protocol.OFVersion;
import org.projectfloodlight.openflow.types.DatapathId;
import org.projectfloodlight.openflow.types.OFAuxId;
import org.projectfloodlight.openflow.types.OFPort;
import org.projectfloodlight.openflow.types.TableId;
import org.projectfloodlight.openflow.types.U64;
import net.floodlightcontroller.util.IterableUtils;
import net.floodlightcontroller.util.LinkedHashSetWrapper;
import net.floodlightcontroller.util.OrderedCollection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
/**
* This is the internal representation of an openflow switch.
*/
public class OFSwitch implements IOFSwitchBackend {
protected static final Logger log =
LoggerFactory.getLogger(OFSwitch.class);
protected final ConcurrentMap<Object, Object> attributes;
protected final IOFSwitchManager switchManager;
/* Switch features from initial featuresReply */
protected Set<OFCapabilities> capabilities;
protected long buffers;
protected Set<OFActionType> actions;
protected Collection<TableId> tables;
protected short nTables;
protected final DatapathId datapathId;
private Map<TableId, TableFeatures> tableFeaturesByTableId;
private boolean startDriverHandshakeCalled = false;
private final Map<OFAuxId, IOFConnectionBackend> connections;
private volatile Map<URI, Map<OFAuxId, OFBsnControllerConnection>> controllerConnections;
protected OFFactory factory;
/**
* Members hidden from subclasses
*/
private final PortManager portManager;
private volatile boolean connected;
private volatile OFControllerRole role;
private boolean flowTableFull = false;
protected SwitchDescription description;
private SwitchStatus status;
public static final int OFSWITCH_APP_ID = ident(5);
private TableId maxTableToGetTableMissFlow = TableId.of(4); /* this should cover most HW switches that have a couple SW flow tables */
static {
AppCookie.registerApp(OFSwitch.OFSWITCH_APP_ID, "switch");
}
public OFSwitch(IOFConnectionBackend connection, @Nonnull OFFactory factory, @Nonnull IOFSwitchManager switchManager,
@Nonnull DatapathId datapathId) {
if(connection == null)
throw new NullPointerException("connection must not be null");
if(!connection.getAuxId().equals(OFAuxId.MAIN))
throw new IllegalArgumentException("connection must be the main connection");
if(factory == null)
throw new NullPointerException("factory must not be null");
if(switchManager == null)
throw new NullPointerException("switchManager must not be null");
this.connected = true;
this.factory = factory;
this.switchManager = switchManager;
this.datapathId = datapathId;
this.attributes = new ConcurrentHashMap<Object, Object>();
this.role = null;
this.description = new SwitchDescription();
this.portManager = new PortManager();
this.status = SwitchStatus.HANDSHAKE;
// Connections
this.connections = new ConcurrentHashMap<OFAuxId, IOFConnectionBackend>();
this.connections.put(connection.getAuxId(), connection);
// Switch's controller connection
this.controllerConnections = ImmutableMap.of();
// Defaults properties for an ideal switch
this.setAttribute(PROP_FASTWILDCARDS, EnumSet.allOf(OFFlowWildcards.class));
this.setAttribute(PROP_SUPPORTS_OFPP_FLOOD, Boolean.TRUE);
this.setAttribute(PROP_SUPPORTS_OFPP_TABLE, Boolean.TRUE);
this.tableFeaturesByTableId = new HashMap<TableId, TableFeatures>();
this.tables = new ArrayList<TableId>();
}
private static int ident(int i) {
return i;
}
@Override
public OFFactory getOFFactory() {
return factory;
}
/**
* Manages the ports of this switch.
*
* Provides methods to query and update the stored ports. The class ensures
* that every port name and port number is unique. When updating ports
* the class checks if port number <-> port name mappings have change due
* to the update. If a new port P has number and port that are inconsistent
* with the previous mapping(s) the class will delete all previous ports
* with name or number of the new port and then add the new port.
*
* Port names are stored as-is but they are compared case-insensitive
*
* The methods that change the stored ports return a list of
* PortChangeEvents that represent the changes that have been applied
* to the port list so that IOFSwitchListeners can be notified about the
* changes.
*
* Implementation notes:
* - We keep several different representations of the ports to allow for
* fast lookups
* - Ports are stored in unchangeable lists. When a port is modified new
* data structures are allocated.
* - We use a read-write-lock for synchronization, so multiple readers are
* allowed.
*/
protected static class PortManager {
private final ReentrantReadWriteLock lock;
private List<OFPortDesc> portList;
private List<OFPortDesc> enabledPortList;
private List<OFPort> enabledPortNumbers;
private Map<OFPort,OFPortDesc> portsByNumber;
private Map<String,OFPortDesc> portsByName;
public PortManager() {
this.lock = new ReentrantReadWriteLock();
this.portList = Collections.emptyList();
this.enabledPortList = Collections.emptyList();
this.enabledPortNumbers = Collections.emptyList();
this.portsByName = Collections.emptyMap();
this.portsByNumber = Collections.emptyMap();
}
/**
* Set the internal data structure storing this switch's port
* to the ports specified by newPortsByNumber
*
* CALLER MUST HOLD WRITELOCK
*
* @param newPortsByNumber
* @throws IllegaalStateException if called without holding the
* writelock
*/
private void updatePortsWithNewPortsByNumber(
Map<OFPort,OFPortDesc> newPortsByNumber) {
if (!lock.writeLock().isHeldByCurrentThread()) {
throw new IllegalStateException("Method called without " +
"holding writeLock");
}
Map<String,OFPortDesc> newPortsByName =
new HashMap<String, OFPortDesc>();
List<OFPortDesc> newPortList =
new ArrayList<OFPortDesc>();
List<OFPortDesc> newEnabledPortList =
new ArrayList<OFPortDesc>();
List<OFPort> newEnabledPortNumbers = new ArrayList<OFPort>();
for(OFPortDesc p: newPortsByNumber.values()) {
newPortList.add(p);
newPortsByName.put(p.getName().toLowerCase(), p);
if (!p.getState().contains(OFPortState.LINK_DOWN)
&& !p.getConfig().contains(OFPortConfig.PORT_DOWN)) {
if (!newEnabledPortList.contains(p)) {
newEnabledPortList.add(p);
}
if (!newEnabledPortNumbers.contains(p.getPortNo())) {
newEnabledPortNumbers.add(p.getPortNo());
}
}
}
portsByName = Collections.unmodifiableMap(newPortsByName);
portsByNumber =
Collections.unmodifiableMap(newPortsByNumber);
enabledPortList =
Collections.unmodifiableList(newEnabledPortList);
enabledPortNumbers =
Collections.unmodifiableList(newEnabledPortNumbers);
portList = Collections.unmodifiableList(newPortList);
}
/**
* Handle a OFPortStatus delete message for the given port.
* Updates the internal port maps/lists of this switch and returns
* the PortChangeEvents caused by the delete. If the given port
* exists as it, it will be deleted. If the name<->number for the
* given port is inconsistent with the ports stored by this switch
* the method will delete all ports with the number or name of the
* given port.
*
* This method will increment error/warn counters and log
*
* @param delPort the port from the port status message that should
* be deleted.
* @return ordered collection of port changes applied to this switch
*/
private OrderedCollection<PortChangeEvent>
handlePortStatusDelete(OFPortDesc delPort) {
OrderedCollection<PortChangeEvent> events =
new LinkedHashSetWrapper<PortChangeEvent>();
lock.writeLock().lock();
try {
Map<OFPort,OFPortDesc> newPortByNumber =
new HashMap<OFPort, OFPortDesc>(portsByNumber);
OFPortDesc prevPort =
portsByNumber.get(delPort.getPortNo());
if (prevPort == null) {
// so such port. Do we have a port with the name?
prevPort = portsByName.get(delPort.getName());
if (prevPort != null) {
newPortByNumber.remove(prevPort.getPortNo());
events.add(new PortChangeEvent(prevPort,
PortChangeType.DELETE));
}
} else if (prevPort.getName().equals(delPort.getName())) {
// port exists with consistent name-number mapping
newPortByNumber.remove(delPort.getPortNo());
events.add(new PortChangeEvent(delPort,
PortChangeType.DELETE));
} else {
// port with same number exists but its name differs. This
// is weird. The best we can do is to delete the existing
// port(s) that have delPort's name and number.
newPortByNumber.remove(delPort.getPortNo());
events.add(new PortChangeEvent(prevPort,
PortChangeType.DELETE));
// is there another port that has delPort's name?
prevPort = portsByName.get(delPort.getName().toLowerCase());
if (prevPort != null) {
newPortByNumber.remove(prevPort.getPortNo());
events.add(new PortChangeEvent(prevPort,
PortChangeType.DELETE));
}
}
updatePortsWithNewPortsByNumber(newPortByNumber);
return events;
} finally {
lock.writeLock().unlock();
}
}
/**
* Handle a OFPortStatus message, update the internal data structures
* that store ports and return the list of OFChangeEvents.
*
* This method will increment error/warn counters and log
*
* @param ps
* @return
*/
@SuppressFBWarnings(value="SF_SWITCH_FALLTHROUGH")
public OrderedCollection<PortChangeEvent> handlePortStatusMessage(OFPortStatus ps) {
if (ps == null) {
throw new NullPointerException("OFPortStatus message must " +
"not be null");
}
lock.writeLock().lock();
try {
OFPortDesc port = ps.getDesc();
OFPortReason reason = ps.getReason();
if (reason == null) {
throw new IllegalArgumentException("Unknown PortStatus " +
"reason code " + ps.getReason());
}
if (log.isDebugEnabled()) {
log.debug("Handling OFPortStatus: {} for {}",
reason, String.format("%s (%d)", port.getName(), port.getPortNo().getPortNumber()));
}
if (reason == OFPortReason.DELETE)
return handlePortStatusDelete(port);
// We handle ADD and MODIFY the same way. Since OpenFlow
// doesn't specify what uniquely identifies a port the
// notion of ADD vs. MODIFY can also be hazy. So we just
// compare the new port to the existing ones.
Map<OFPort,OFPortDesc> newPortByNumber =
new HashMap<OFPort, OFPortDesc>(portsByNumber);
OrderedCollection<PortChangeEvent> events = getSinglePortChanges(port);
for (PortChangeEvent e: events) {
switch(e.type) {
case DELETE:
newPortByNumber.remove(e.port.getPortNo());
break;
case ADD:
if (reason != OFPortReason.ADD) {
// weird case
}
// fall through
case DOWN:
case OTHER_UPDATE:
case UP:
// update or add the port in the map
newPortByNumber.put(e.port.getPortNo(), e.port);
break;
}
}
updatePortsWithNewPortsByNumber(newPortByNumber);
return events;
} finally {
lock.writeLock().unlock();
}
}
/**
* Given a new or modified port newPort, returns the list of
* PortChangeEvents to "transform" the current ports stored by
* this switch to include / represent the new port. The ports stored
* by this switch are <b>NOT</b> updated.
*
* This method acquires the readlock and is thread-safe by itself.
* Most callers will need to acquire the write lock before calling
* this method though (if the caller wants to update the ports stored
* by this switch)
*
* @param newPort the new or modified port.
* @return the list of changes
*/
public OrderedCollection<PortChangeEvent>
getSinglePortChanges(OFPortDesc newPort) {
lock.readLock().lock();
try {
OrderedCollection<PortChangeEvent> events =
new LinkedHashSetWrapper<PortChangeEvent>();
// Check if we have a port by the same number in our
// old map.
OFPortDesc prevPort =
portsByNumber.get(newPort.getPortNo());
if (newPort.equals(prevPort)) {
// nothing has changed
return events;
}
if (prevPort != null &&
prevPort.getName().equals(newPort.getName())) {
// A simple modify of a exiting port
// A previous port with this number exists and it's name
// also matches the new port. Find the differences
if ((!prevPort.getState().contains(OFPortState.LINK_DOWN)
&& !prevPort.getConfig().contains(OFPortConfig.PORT_DOWN))
&& (newPort.getState().contains(OFPortState.LINK_DOWN)
|| newPort.getConfig().contains(OFPortConfig.PORT_DOWN))) {
events.add(new PortChangeEvent(newPort,
PortChangeType.DOWN));
} else if ((prevPort.getState().contains(OFPortState.LINK_DOWN)
|| prevPort.getConfig().contains(OFPortConfig.PORT_DOWN))
&& (!newPort.getState().contains(OFPortState.LINK_DOWN)
&& !newPort.getConfig().contains(OFPortConfig.PORT_DOWN))) {
events.add(new PortChangeEvent(newPort,
PortChangeType.UP));
} else {
events.add(new PortChangeEvent(newPort,
PortChangeType.OTHER_UPDATE));
}
return events;
}
if (prevPort != null) {
// There exists a previous port with the same port
// number but the port name is different (otherwise we would
// never have gotten here)
// Remove the port. Name-number mapping(s) have changed
events.add(new PortChangeEvent(prevPort,
PortChangeType.DELETE));
}
// We now need to check if there exists a previous port sharing
// the same name as the new/updated port.
prevPort = portsByName.get(newPort.getName().toLowerCase());
if (prevPort != null) {
// There exists a previous port with the same port
// name but the port number is different (otherwise we
// never have gotten here).
// Remove the port. Name-number mapping(s) have changed
events.add(new PortChangeEvent(prevPort,
PortChangeType.DELETE));
}
// We always need to add the new port. Either no previous port
// existed or we just deleted previous ports with inconsistent
// name-number mappings
events.add(new PortChangeEvent(newPort, PortChangeType.ADD));
return events;
} finally {
lock.readLock().unlock();
}
}
/**
* Compare the current ports of this switch to the newPorts list and
* return the changes that would be applied to transform the current
* ports to the new ports. No internal data structures are updated
* see {@link #compareAndUpdatePorts(List, boolean)}
*
* @param newPorts the list of new ports
* @return The list of differences between the current ports and
* newPortList
*/
public OrderedCollection<PortChangeEvent>
comparePorts(Collection<OFPortDesc> newPorts) {
return compareAndUpdatePorts(newPorts, false);
}
/**
* Compare the current ports of this switch to the newPorts list and
* return the changes that would be applied to transform the current
* ports to the new ports. No internal data structures are updated
* see {@link #compareAndUpdatePorts(List, boolean)}
*
* @param newPorts the list of new ports
* @return The list of differences between the current ports and
* newPortList
*/
public OrderedCollection<PortChangeEvent>
updatePorts(Collection<OFPortDesc> newPorts) {
return compareAndUpdatePorts(newPorts, true);
}
/**
* Compare the current ports stored in this switch instance with the
* new port list given and return the differences in the form of
* PortChangeEvents. If the doUpdate flag is true, newPortList will
* replace the current list of this switch (and update the port maps)
*
* Implementation note:
* Since this method can optionally modify the current ports and
* since it's not possible to upgrade a read-lock to a write-lock
* we need to hold the write-lock for the entire operation. If this
* becomes a problem and if compares() are common we can consider
* splitting in two methods but this requires lots of code duplication
*
* @param newPorts the list of new ports.
* @param doUpdate If true the newPortList will replace the current
* port list for this switch. If false this switch will not be changed.
* @return The list of differences between the current ports and
* newPorts
* @throws NullPointerException if newPortsList is null
* @throws IllegalArgumentException if either port names or port numbers
* are duplicated in the newPortsList.
*/
private OrderedCollection<PortChangeEvent> compareAndUpdatePorts(
Collection<OFPortDesc> newPorts,
boolean doUpdate) {
if (newPorts == null) {
throw new NullPointerException("newPortsList must not be null");
}
lock.writeLock().lock();
try {
OrderedCollection<PortChangeEvent> events =
new LinkedHashSetWrapper<PortChangeEvent>();
Map<OFPort,OFPortDesc> newPortsByNumber =
new HashMap<OFPort, OFPortDesc>();
Map<String,OFPortDesc> newPortsByName =
new HashMap<String, OFPortDesc>();
List<OFPortDesc> newEnabledPortList =
new ArrayList<OFPortDesc>();
List<OFPort> newEnabledPortNumbers =
new ArrayList<OFPort>();
List<OFPortDesc> newPortsList =
new ArrayList<OFPortDesc>(newPorts);
for (OFPortDesc p: newPortsList) {
if (p == null) {
throw new NullPointerException("portList must not " +
"contain null values");
}
// Add the port to the new maps and lists and check
// that every port is unique
OFPortDesc duplicatePort;
duplicatePort = newPortsByNumber.put(p.getPortNo(), p);
if (duplicatePort != null) {
String msg = String.format("Cannot have two ports " +
"with the same number: %s <-> %s",
String.format("%s (%d)", p.getName(), p.getPortNo().getPortNumber()),
String.format("%s (%d)", duplicatePort.getName(), duplicatePort.getPortNo().getPortNumber()));
throw new IllegalArgumentException(msg);
}
duplicatePort =
newPortsByName.put(p.getName().toLowerCase(), p);
if (duplicatePort != null) {
String msg = String.format("Cannot have two ports " +
"with the same name: %s <-> %s",
String.format("%s (%d)", p.getName(), p.getPortNo().getPortNumber()),
String.format("%s (%d)", duplicatePort.getName(), duplicatePort.getPortNo().getPortNumber()));
throw new IllegalArgumentException(msg);
}
// Enabled = not down admin (config) or phys (state)
if (!p.getConfig().contains(OFPortConfig.PORT_DOWN)
&& !p.getState().contains(OFPortState.LINK_DOWN)) {
if (!newEnabledPortList.contains(p)) {
newEnabledPortList.add(p);
}
if (!newEnabledPortNumbers.contains(p.getPortNo())) {
newEnabledPortNumbers.add(p.getPortNo());
}
}
// get changes
events.addAll(getSinglePortChanges(p));
}
// find deleted ports
// We need to do this after looping through all the new ports
// to we can handle changed name<->number mappings correctly
// We could pull it into the loop of we address this but
// it's probably not worth it
for (OFPortDesc oldPort: this.portList) {
if (!newPortsByNumber.containsKey(oldPort.getPortNo())) {
PortChangeEvent ev =
new PortChangeEvent(oldPort,
PortChangeType.DELETE);
events.add(ev);
}
}
if (doUpdate) {
portsByName = Collections.unmodifiableMap(newPortsByName);
portsByNumber =
Collections.unmodifiableMap(newPortsByNumber);
enabledPortList =
Collections.unmodifiableList(newEnabledPortList);
enabledPortNumbers =
Collections.unmodifiableList(newEnabledPortNumbers);
portList = Collections.unmodifiableList(newPortsList);
}
return events;
} finally {
lock.writeLock().unlock();
}
}
public OFPortDesc getPort(String name) {
if (name == null) {
throw new NullPointerException("Port name must not be null");
}
lock.readLock().lock();
try {
return portsByName.get(name.toLowerCase());
} finally {
lock.readLock().unlock();
}
}
public OFPortDesc getPort(OFPort portNumber) {
lock.readLock().lock();
try {
return portsByNumber.get(portNumber);
} finally {
lock.readLock().unlock();
}
}
public List<OFPortDesc> getPorts() {
lock.readLock().lock();
try {
return portList;
} finally {
lock.readLock().unlock();
}
}
public List<OFPortDesc> getEnabledPorts() {
lock.readLock().lock();
try {
return enabledPortList;
} finally {
lock.readLock().unlock();
}
}
public List<OFPort> getEnabledPortNumbers() {
lock.readLock().lock();
try {
return enabledPortNumbers;
} finally {
lock.readLock().unlock();
}
}
}
protected static class SwitchRoleMessageValidator {
private static final Map<OFVersion, Set<OFType>> invalidSlaveMsgsByOFVersion;
static {
Map<OFVersion, Set<OFType>> m = new HashMap<OFVersion, Set<OFType>>();
Set<OFType> s = new HashSet<OFType>();
s.add(OFType.PACKET_OUT);
s.add(OFType.FLOW_MOD);
s.add(OFType.PORT_MOD);
s.add(OFType.TABLE_MOD);
s.add(OFType.BARRIER_REQUEST);
m.put(OFVersion.OF_10, Collections.unmodifiableSet(s));
s = new HashSet<OFType>();
s.addAll(m.get(OFVersion.OF_10));
s.add(OFType.GROUP_MOD);
s.add(OFType.TABLE_MOD);
m.put(OFVersion.OF_11, Collections.unmodifiableSet(s));
s = new HashSet<OFType>();
s.addAll(m.get(OFVersion.OF_11));
m.put(OFVersion.OF_12, Collections.unmodifiableSet(s));
s = new HashSet<OFType>();
s.addAll(m.get(OFVersion.OF_12));
s.add(OFType.METER_MOD);
m.put(OFVersion.OF_13, Collections.unmodifiableSet(s));
s = new HashSet<OFType>();
s.addAll(m.get(OFVersion.OF_13));
s.add(OFType.BUNDLE_ADD_MESSAGE);
s.add(OFType.BUNDLE_CONTROL);
m.put(OFVersion.OF_14, Collections.unmodifiableSet(s));
invalidSlaveMsgsByOFVersion = Collections.unmodifiableMap(m);
}
/**
* Sorts any invalid messages by moving them from the msgList. The net result
* is a new list returned containing the invalid messages and a pruned msgList
* containing only those messages that are valid for the given role of the controller
* and OpenFlow version of the switch.
*
* @param msgList the list of messages to sort
* @param valid the list of valid messages (caller must allocate)
* @param swVersion the OFVersion of the switch
* @param isSlave true if controller is slave; false otherwise
* @return list of messages that are not valid, removed from input parameter msgList
*/
protected static Collection<OFMessage> pruneInvalidMessages(Iterable<OFMessage> msgList, Collection<OFMessage> valid, OFVersion swVersion, boolean isActive) {
if (isActive) { /* master or equal/other support all */
valid.addAll(IterableUtils.toCollection(msgList));
return Collections.emptyList();
} else { /* slave */
Set<OFType> invalidSlaveMsgs = invalidSlaveMsgsByOFVersion.get(swVersion);
List<OFMessage> invalid = new ArrayList<OFMessage>();
Iterator<OFMessage> itr = msgList.iterator();
while (itr.hasNext()) {
OFMessage m = itr.next();
if (invalidSlaveMsgs.contains(m.getType())) {
invalid.add(m);
} else {
valid.add(m);
}
}
return invalid;
}
}
}
@Override
public boolean attributeEquals(String name, Object other) {
Object attr = this.attributes.get(name);
if (attr == null)
return false;
return attr.equals(other);
}
@Override
public Object getAttribute(String name) {
// returns null if key doesn't exist
return this.attributes.get(name);
}
@Override
public void setAttribute(String name, Object value) {
this.attributes.put(name, value);
return;
}
@Override
public Object removeAttribute(String name) {
return this.attributes.remove(name);
}
@Override
public boolean hasAttribute(String name) {
return this.attributes.containsKey(name);
}
@Override
public void registerConnection(IOFConnectionBackend connection) {
this.connections.put(connection.getAuxId(), connection);
}
@Override
public ImmutableList<IOFConnection> getConnections() {
return ImmutableList.<IOFConnection> copyOf(this.connections.values());
}
@Override
public void removeConnections() {
this.connections.clear();
}
@Override
public void removeConnection(IOFConnectionBackend connection) {
this.connections.remove(connection.getAuxId());
}
/**
* Gets a connection specified by aux Id.
* @param auxId the specified aux id for the connection desired.
* @return the aux connection specified by the auxId
*/
public IOFConnection getConnection(OFAuxId auxId) {
IOFConnection connection = this.connections.get(auxId);
if (connection == null) {
throw new IllegalArgumentException("OF Connection for " + this + " with " + auxId + " does not exist.");
}
return connection;
}
public IOFConnection getConnection(LogicalOFMessageCategory category) {
if (switchManager.isCategoryRegistered(category)) {
return getConnection(category.getAuxId());
}
else{
throw new IllegalArgumentException(category + " is not registered with the floodlight provider service.");
}
}
/**
* Write a single message to the switch
*
* @param m the message to write
* @return true upon success; false upon failure;
* failure can occur either from sending a message not supported in the current role, or
* from the channel being disconnected
*/
@Override
public boolean write(OFMessage m) {
return this.write(Collections.singletonList(m)).isEmpty();
}
/**
* Write a list of messages to the switch
*
* @param msglist list of messages to write
* @return list of failed messages; messages can fail if sending the messages is not supported
* in the current role, or from the channel becoming disconnected
*/
@Override
public Collection<OFMessage> write(Iterable<OFMessage> msglist) {
return this.write(msglist, LogicalOFMessageCategory.MAIN);
}
@Override
public boolean write(OFMessage m, LogicalOFMessageCategory category) {
return this.write(Collections.singletonList(m), category).isEmpty();
}
@Override
public Collection<OFMessage> write(Iterable<OFMessage> msgList, LogicalOFMessageCategory category) {
IOFConnection conn = this.getConnection(category); /* do first to check for supported category */
Collection<OFMessage> validMsgs = new ArrayList<OFMessage>();
Collection<OFMessage> invalidMsgs = SwitchRoleMessageValidator.pruneInvalidMessages(
msgList, validMsgs, this.getOFFactory().getVersion(), this.isActive());
if (log.isDebugEnabled()) {
log.debug("MESSAGES: {}, VALID: {}, INVALID: {}", new Object[] { msgList, validMsgs, invalidMsgs});
}
/* Try to write all valid messages */
Collection<OFMessage> unsent = conn.write(validMsgs);
for (OFMessage m : validMsgs) {
if (!unsent.contains(m)) {
switchManager.handleOutgoingMessage(this, m);
}
}
/* Collect invalid and unsent messages */
Collection<OFMessage> ret = null;
if (!unsent.isEmpty()) {
log.warn("Could not send messages {} due to channel disconnection on switch {}", unsent, this.getId());
ret = IterableUtils.toCollection(unsent);
}
if (!invalidMsgs.isEmpty()) {
log.warn("Could not send messages {} while in SLAVE role on switch {}", invalidMsgs, this.getId());
if (ret == null) {
ret = IterableUtils.toCollection(invalidMsgs);
} else {
ret.addAll(IterableUtils.toCollection(invalidMsgs));
}
}
if (ret == null) {
return Collections.emptyList();
} else {
return ret;
}
}
@Override
public OFConnection getConnectionByCategory(LogicalOFMessageCategory category){
return (OFConnection) this.getConnection(category);
}
@Override
public <R extends OFMessage> ListenableFuture<R> writeRequest(OFRequest<R> request, LogicalOFMessageCategory category) {
return getConnection(category).writeRequest(request);
}
@Override
public <R extends OFMessage> ListenableFuture<R> writeRequest(OFRequest<R> request) {
return writeRequest(request, LogicalOFMessageCategory.MAIN);
}
@Override
public void disconnect() {
// Iterate through connections and perform cleanup
for (Entry<OFAuxId, IOFConnectionBackend> entry : this.connections.entrySet()) {
entry.getValue().disconnect();
this.connections.remove(entry.getKey());
}
log.debug("~~~~~~~SWITCH DISCONNECTED~~~~~~");
// Remove all counters from the store
connected = false;
}
@Override
public void setFeaturesReply(OFFeaturesReply featuresReply) {
if (portManager.getPorts().isEmpty() && featuresReply.getVersion().compareTo(OFVersion.OF_13) < 0) {
/* ports are updated via port status message, so we
* only fill in ports on initial connection.
*/
List<OFPortDesc> OFPortDescs = featuresReply.getPorts();
portManager.updatePorts(OFPortDescs);
}
this.capabilities = featuresReply.getCapabilities();
this.buffers = featuresReply.getNBuffers();
if (featuresReply.getVersion().compareTo(OFVersion.OF_13) < 0 ) {
/* OF1.3+ Per-table actions are set later in the OFTableFeaturesRequest/Reply */
this.actions = featuresReply.getActions();
}
this.nTables = featuresReply.getNTables();
}
@Override
public void setPortDescStats(OFPortDescStatsReply reply) {
/* ports are updated via port status message, so we
* only fill in ports on initial connection.
*/
List<OFPortDesc> OFPortDescs = reply.getEntries();
portManager.updatePorts(OFPortDescs);
}
@Override
public Collection<OFPortDesc> getEnabledPorts() {
return portManager.getEnabledPorts();
}
@Override
public Collection<OFPort> getEnabledPortNumbers() {
return portManager.getEnabledPortNumbers();
}
@Override
public OFPortDesc getPort(OFPort portNumber) {
return portManager.getPort(portNumber);
}
@Override
public OFPortDesc getPort(String portName) {
return portManager.getPort(portName);
}
@Override
public OrderedCollection<PortChangeEvent>
processOFPortStatus(OFPortStatus ps) {
return portManager.handlePortStatusMessage(ps);
}
@Override
public void processOFTableFeatures(List<OFTableFeaturesStatsReply> replies) {
/*
* Parse out all the individual replies for each table.
*/
for (OFTableFeaturesStatsReply reply : replies) {
/*
* Add or update the features for a particular table.
*/
List<OFTableFeatures> tfs = reply.getEntries();
for (OFTableFeatures tf : tfs) {
tableFeaturesByTableId.put(tf.getTableId(), TableFeatures.of(tf));
tables.add(tf.getTableId());
log.trace("Received TableFeatures for TableId {}, TableName {}", tf.getTableId().toString(), tf.getName());
}
}
}
@Override
public Collection<OFPortDesc> getSortedPorts() {
List<OFPortDesc> sortedPorts =
new ArrayList<OFPortDesc>(portManager.getPorts());
Collections.sort(sortedPorts, new Comparator<OFPortDesc>() {
@Override
public int compare(OFPortDesc o1, OFPortDesc o2) {
String name1 = o1.getName();
String name2 = o2.getName();
return name1.compareToIgnoreCase(name2);
}
});
return sortedPorts;
}
@Override
public Collection<OFPortDesc> getPorts() {
return portManager.getPorts();
}
@Override
public OrderedCollection<PortChangeEvent>
comparePorts(Collection<OFPortDesc> ports) {
return portManager.comparePorts(ports);
}
@Override
public OrderedCollection<PortChangeEvent>
setPorts(Collection<OFPortDesc> ports) {
return portManager.updatePorts(ports);
}
@Override
public boolean portEnabled(OFPort portNumber) {
OFPortDesc p = portManager.getPort(portNumber);
if (p == null) return false;
return (!p.getState().contains(OFPortState.BLOCKED) && !p.getState().contains(OFPortState.LINK_DOWN) && !p.getState().contains(OFPortState.STP_BLOCK));
}
@Override
public boolean portEnabled(String portName) {
OFPortDesc p = portManager.getPort(portName);
if (p == null) return false;
return (!p.getState().contains(OFPortState.BLOCKED) && !p.getState().contains(OFPortState.LINK_DOWN) && !p.getState().contains(OFPortState.STP_BLOCK));
}
@Override
public DatapathId getId() {
if (datapathId == null)
throw new RuntimeException("Features reply has not yet been set");
return datapathId;
}
@Override
public String toString() {
return "OFSwitch DPID[" + ((datapathId != null) ? datapathId.toString() : "?") + "]";
}
@Override
public ConcurrentMap<Object, Object> getAttributes() {
return this.attributes;
}
@Override
public Date getConnectedSince() {
return this.connections.get(OFAuxId.MAIN).getConnectedSince();
}
@Override
public <REPLY extends OFStatsReply> ListenableFuture<List<REPLY>> writeStatsRequest(OFStatsRequest<REPLY> request) {
return addInternalStatsReplyListener(connections.get(OFAuxId.MAIN).writeStatsRequest(request), request);
}
@Override
public <REPLY extends OFStatsReply> ListenableFuture<List<REPLY>> writeStatsRequest(OFStatsRequest<REPLY> request, LogicalOFMessageCategory category) {
return addInternalStatsReplyListener(getConnection(category).writeStatsRequest(request), request);
}
/**
* Append a listener to receive an OFStatsReply and update the
* internal OFSwitch data structures.
*
* This presently taps into the following stats request
* messages to listen for the corresponding reply:
* -- OFTableFeaturesStatsRequest
*
* Extend this to tap into and update other OFStatsType messages.
*
* @param future
* @param request
* @return
*/
private <REPLY extends OFStatsReply> ListenableFuture<List<REPLY>> addInternalStatsReplyListener(final ListenableFuture<List<REPLY>> future, OFStatsRequest<REPLY> request) {
switch (request.getStatsType()) {
case TABLE_FEATURES:
/* case YOUR_CASE_HERE */
future.addListener(new Runnable() {
/*
* We know the reply will be a list of OFStatsReply.
*/
@SuppressWarnings("unchecked")
@Override
public void run() {
/*
* The OFConnection handles REPLY_MORE for us in the case there
* are multiple OFStatsReply messages with the same XID.
*/
try {
List<? extends OFStatsReply> replies = future.get();
if (!replies.isEmpty()) {
/*
* By checking only the 0th element, we assume all others are the same type.
* TODO If not, what then?
*/
switch (replies.get(0).getStatsType()) {
case TABLE_FEATURES:
processOFTableFeatures((List<OFTableFeaturesStatsReply>) future.get());
break;
/* case YOUR_CASE_HERE */
default:
throw new Exception("Received an invalid OFStatsReply of "
+ replies.get(0).getStatsType().toString() + ". Expected TABLE_FEATURES.");
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
}, MoreExecutors.sameThreadExecutor()); /* No need for another thread. */
default:
break;
}
return future; /* either unmodified or with an additional listener */
}
@Override
public void cancelAllPendingRequests() {
for(Entry<OFAuxId, IOFConnectionBackend> entry : this.connections.entrySet()){
entry.getValue().cancelAllPendingRequests();
}
}
// If any connections are down consider a switch disconnected
@Override
public boolean isConnected() {
return connected;
}
@Override
public boolean isActive() {
// no lock needed since we use volatile
return isConnected() && (this.role == OFControllerRole.ROLE_MASTER || this.role == OFControllerRole.ROLE_EQUAL);
}
@Override
public OFControllerRole getControllerRole() {
return role;
}
@Override
public void setControllerRole(OFControllerRole role) {
this.role = role;
}
/**
* Get the IP Address for the switch
* @return the inet address
*/
@Override
public SocketAddress getInetAddress() {
return connections.get(OFAuxId.MAIN).getRemoteInetAddress();
}
@Override
public long getBuffers() {
return buffers;
}
@Override
public Set<OFActionType> getActions() {
return actions;
}
@Override
public Set<OFCapabilities> getCapabilities() {
return capabilities;
}
/**
* This performs a copy on each 'get'.
* Use sparingly for good performance.
*/
@Override
public Collection<TableId> getTables() {
return new ArrayList<TableId>(tables);
}
@Override
public short getNumTables() {
return this.nTables;
}
@Override
public SwitchDescription getSwitchDescription() {
return description;
}
@Override
public void setTableFull(boolean isFull) {
if (isFull && !flowTableFull) {
switchManager.addSwitchEvent(this.datapathId,
"SWITCH_FLOW_TABLE_FULL " +
"Table full error from switch", false);
log.warn("Switch {} flow table is full", datapathId.toString());
}
flowTableFull = isFull;
}
@Override
public void startDriverHandshake() {
if (startDriverHandshakeCalled)
throw new SwitchDriverSubHandshakeAlreadyStarted();
startDriverHandshakeCalled = true;
}
@Override
public boolean isDriverHandshakeComplete() {
if (!startDriverHandshakeCalled)
throw new SwitchDriverSubHandshakeNotStarted();
return true;
}
@Override
public void processDriverHandshakeMessage(OFMessage m) {
if (startDriverHandshakeCalled)
throw new SwitchDriverSubHandshakeCompleted(m);
else
throw new SwitchDriverSubHandshakeNotStarted();
}
@Override
public void setSwitchProperties(SwitchDescription description) {
this.description = description;
}
@Override
public SwitchStatus getStatus() {
return status;
}
@Override
public void setStatus(SwitchStatus switchStatus) {
this.status = switchStatus;
}
@Override
public void updateControllerConnections(OFBsnControllerConnectionsReply controllerCxnsReply) {
// Instantiate clean map, can't use a builder here since we need to call temp.get()
Map<URI,Map<OFAuxId, OFBsnControllerConnection>> temp = new ConcurrentHashMap<URI,Map<OFAuxId, OFBsnControllerConnection>>();
List<OFBsnControllerConnection> controllerCxnUpdates = controllerCxnsReply.getConnections();
for(OFBsnControllerConnection update : controllerCxnUpdates) {
URI uri = URI.create(update.getUri());
Map<OFAuxId, OFBsnControllerConnection> cxns = temp.get(uri);
// Add to nested map
if(cxns != null){
cxns.put(update.getAuxiliaryId(), update);
} else{
cxns = new ConcurrentHashMap<OFAuxId, OFBsnControllerConnection>();
cxns.put(update.getAuxiliaryId(), update);
temp.put(uri, cxns);
}
}
this.controllerConnections = ImmutableMap.<URI,Map<OFAuxId, OFBsnControllerConnection>>copyOf(temp);
}
@Override
public boolean hasAnotherMaster() {
//TODO: refactor get connection to not throw illegal arg exceptions
IOFConnection mainCxn = this.getConnection(OFAuxId.MAIN);
if(mainCxn != null) {
// Determine the local URI
InetSocketAddress address = (InetSocketAddress) mainCxn.getLocalInetAddress();
URI localURI = URIUtil.createURI(address.getHostName(), address.getPort());
for(Entry<URI,Map<OFAuxId, OFBsnControllerConnection>> entry : this.controllerConnections.entrySet()) {
// Don't check our own controller connections
URI uri = entry.getKey();
if(!localURI.equals(uri)){
// We only care for the MAIN connection
Map<OFAuxId, OFBsnControllerConnection> cxns = this.controllerConnections.get(uri);
OFBsnControllerConnection controllerCxn = cxns.get(OFAuxId.MAIN);
if(controllerCxn != null) {
// If the controller id disconnected or not master we know it is not connected
if(controllerCxn.getState() == OFBsnControllerConnectionState.BSN_CONTROLLER_CONNECTION_STATE_CONNECTED
&& controllerCxn.getRole() == OFControllerRole.ROLE_MASTER){
return true;
}
} else {
log.warn("Unable to find controller connection with aux id "
+ "MAIN for switch {} on controller with URI {}.",
this, uri);
}
}
}
}
return false;
}
@Override
public TableFeatures getTableFeatures(TableId table) {
return tableFeaturesByTableId.get(table);
}
@Override
public TableId getMaxTableForTableMissFlow() {
return maxTableToGetTableMissFlow;
}
@Override
public TableId setMaxTableForTableMissFlow(TableId max) {
if (max.getValue() >= nTables) {
maxTableToGetTableMissFlow = TableId.of(nTables - 1 < 0 ? 0 : nTables - 1);
} else {
maxTableToGetTableMissFlow = max;
}
return maxTableToGetTableMissFlow;
}
@Override
public U64 getLatency() {
return this.connections.get(OFAuxId.MAIN).getLatency();
}
}
| |
package com.github.pires.obd.reader.io;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothSocket;
import android.content.SharedPreferences;
import android.os.Binder;
import android.os.IBinder;
import android.util.Log;
import android.widget.Toast;
import com.github.pires.obd.commands.ObdCommand;
import com.github.pires.obd.commands.protocol.EchoOffObdCommand;
import com.github.pires.obd.commands.protocol.LineFeedOffObdCommand;
import com.github.pires.obd.commands.protocol.ObdResetCommand;
import com.github.pires.obd.commands.protocol.SelectProtocolObdCommand;
import com.github.pires.obd.commands.protocol.TimeoutObdCommand;
import com.github.pires.obd.commands.temperature.AmbientAirTemperatureObdCommand;
import com.github.pires.obd.enums.ObdProtocols;
import com.github.pires.obd.exceptions.UnsupportedCommandException;
import com.github.pires.obd.reader.R;
import com.github.pires.obd.reader.activity.ConfigActivity;
import com.github.pires.obd.reader.activity.MainActivity;
import com.github.pires.obd.reader.io.ObdCommandJob.ObdCommandJobState;
import com.google.inject.Inject;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.UUID;
/**
* This service is primarily responsible for establishing and maintaining a
* permanent connection between the device where the application runs and a more
* OBD Bluetooth interface.
* <p/>
* Secondarily, it will serve as a repository of ObdCommandJobs and at the same
* time the application state-machine.
*/
public class ObdGatewayService extends AbstractGatewayService {
private static final String TAG = ObdGatewayService.class.getName();
/*
* http://developer.android.com/reference/android/bluetooth/BluetoothDevice.html
* #createRfcommSocketToServiceRecord(java.util.UUID)
*
* "Hint: If you are connecting to a Bluetooth serial board then try using the
* well-known SPP UUID 00001101-0000-1000-8000-00805F9B34FB. However if you
* are connecting to an Android peer then please generate your own unique
* UUID."
*/
private static final UUID MY_UUID = UUID.fromString("00001101-0000-1000-8000-00805F9B34FB");
private final IBinder binder = new ObdGatewayServiceBinder();
@Inject
SharedPreferences prefs;
private BluetoothDevice dev = null;
private BluetoothSocket sock = null;
private BluetoothSocket sockFallback = null;
public void startService() throws IOException {
Log.d(TAG, "Starting service..");
// get the remote Bluetooth device
final String remoteDevice = prefs.getString(ConfigActivity.BLUETOOTH_LIST_KEY, null);
if (remoteDevice == null || "".equals(remoteDevice)) {
Toast.makeText(ctx, getString(R.string.text_bluetooth_nodevice), Toast.LENGTH_LONG).show();
// log error
Log.e(TAG, "No Bluetooth device has been selected.");
// TODO kill this service gracefully
stopService();
throw new IOException();
} else {
final BluetoothAdapter btAdapter = BluetoothAdapter.getDefaultAdapter();
dev = btAdapter.getRemoteDevice(remoteDevice);
/*
* Establish Bluetooth connection
*
* Because discovery is a heavyweight procedure for the Bluetooth adapter,
* this method should always be called before attempting to connect to a
* remote device with connect(). Discovery is not managed by the Activity,
* but is run as a system service, so an application should always call
* cancel discovery even if it did not directly request a discovery, just to
* be sure. If Bluetooth state is not STATE_ON, this API will return false.
*
* see
* http://developer.android.com/reference/android/bluetooth/BluetoothAdapter
* .html#cancelDiscovery()
*/
Log.d(TAG, "Stopping Bluetooth discovery.");
btAdapter.cancelDiscovery();
showNotification(getString(R.string.notification_action), getString(R.string.service_starting), R.drawable.ic_btcar, true, true, false);
try {
startObdConnection();
} catch (Exception e) {
Log.e(
TAG,
"There was an error while establishing connection. -> "
+ e.getMessage()
);
// in case of failure, stop this service.
stopService();
throw new IOException();
}
showNotification(getString(R.string.notification_action), getString(R.string.service_started), R.drawable.ic_btcar, true, true, false);
}
/*
* TODO clean
*
* Get more preferences
*/
ArrayList<ObdCommand> cmds = ConfigActivity.getObdCommands(prefs);
}
/**
* Start and configure the connection to the OBD interface.
* <p/>
* See http://stackoverflow.com/questions/18657427/ioexception-read-failed-socket-might-closed-bluetooth-on-android-4-3/18786701#18786701
*
* @throws IOException
*/
private void startObdConnection() throws IOException {
Log.d(TAG, "Starting OBD connection..");
isRunning = true;
try {
// Instantiate a BluetoothSocket for the remote device and connect it.
sock = dev.createRfcommSocketToServiceRecord(MY_UUID);
sock.connect();
} catch (Exception e1) {
Log.e(TAG, "There was an error while establishing Bluetooth connection. Falling back..", e1);
Class<?> clazz = sock.getRemoteDevice().getClass();
Class<?>[] paramTypes = new Class<?>[]{Integer.TYPE};
try {
Method m = clazz.getMethod("createRfcommSocket", paramTypes);
Object[] params = new Object[]{Integer.valueOf(1)};
sockFallback = (BluetoothSocket) m.invoke(sock.getRemoteDevice(), params);
sockFallback.connect();
sock = sockFallback;
} catch (Exception e2) {
Log.e(TAG, "Couldn't fallback while establishing Bluetooth connection. Stopping app..", e2);
stopService();
throw new IOException();
}
}
// Let's configure the connection.
Log.d(TAG, "Queueing jobs for connection configuration..");
queueJob(new ObdCommandJob(new ObdResetCommand()));
queueJob(new ObdCommandJob(new EchoOffObdCommand()));
/*
* Will send second-time based on tests.
*
* TODO this can be done w/o having to queue jobs by just issuing
* command.run(), command.getResult() and validate the result.
*/
queueJob(new ObdCommandJob(new EchoOffObdCommand()));
queueJob(new ObdCommandJob(new LineFeedOffObdCommand()));
queueJob(new ObdCommandJob(new TimeoutObdCommand(62)));
// Get protocol from preferences
final String protocol = prefs.getString(ConfigActivity.PROTOCOLS_LIST_KEY, "AUTO");
queueJob(new ObdCommandJob(new SelectProtocolObdCommand(ObdProtocols.valueOf(protocol))));
// Job for returning dummy data
queueJob(new ObdCommandJob(new AmbientAirTemperatureObdCommand()));
queueCounter = 0L;
Log.d(TAG, "Initialization jobs queued.");
}
/**
* This method will add a job to the queue while setting its ID to the
* internal queue counter.
*
* @param job the job to queue.
*/
@Override
public void queueJob(ObdCommandJob job) {
// This is a good place to enforce the imperial units option
job.getCommand().useImperialUnits(prefs.getBoolean(ConfigActivity.IMPERIAL_UNITS_KEY, false));
// Now we can pass it along
super.queueJob(job);
}
/**
* Runs the queue until the service is stopped
*/
protected void executeQueue() throws InterruptedException {
Log.d(TAG, "Executing queue..");
while (!Thread.currentThread().isInterrupted()) {
ObdCommandJob job = null;
try {
job = jobsQueue.take();
// log job
Log.d(TAG, "Taking job[" + job.getId() + "] from queue..");
if (job.getState().equals(ObdCommandJobState.NEW)) {
Log.d(TAG, "Job state is NEW. Run it..");
job.setState(ObdCommandJobState.RUNNING);
job.getCommand().run(sock.getInputStream(), sock.getOutputStream());
} else
// log not new job
Log.e(TAG,
"Job state was not new, so it shouldn't be in queue. BUG ALERT!");
} catch (InterruptedException i) {
Thread.currentThread().interrupt();
} catch (UnsupportedCommandException u) {
if (job != null) {
job.setState(ObdCommandJobState.NOT_SUPPORTED);
}
Log.d(TAG, "Command not supported. -> " + u.getMessage());
} catch (Exception e) {
if (job != null) {
job.setState(ObdCommandJobState.EXECUTION_ERROR);
}
Log.e(TAG, "Failed to run command. -> " + e.getMessage());
}
if (job != null) {
final ObdCommandJob job2 = job;
((MainActivity) ctx).runOnUiThread(new Runnable() {
@Override
public void run() {
((MainActivity) ctx).stateUpdate(job2);
}
});
}
}
}
/**
* Stop OBD connection and queue processing.
*/
public void stopService() {
Log.d(TAG, "Stopping service..");
notificationManager.cancel(NOTIFICATION_ID);
jobsQueue.removeAll(jobsQueue); // TODO is this safe?
isRunning = false;
if (sock != null)
// close socket
try {
sock.close();
} catch (IOException e) {
Log.e(TAG, e.getMessage());
}
// kill service
stopSelf();
}
public boolean isRunning() {
return isRunning;
}
public class ObdGatewayServiceBinder extends Binder {
public ObdGatewayService getService() {
return ObdGatewayService.this;
}
}
}
| |
package com.tvd12.ezyfox.sfs2x.serializer;
import static com.tvd12.ezyfox.core.reflect.ReflectConvertUtil.arrayToList;
import static com.tvd12.ezyfox.core.reflect.ReflectConvertUtil.charArrayToByteArray;
import static com.tvd12.ezyfox.core.reflect.ReflectConvertUtil.charCollectionToPrimitiveByteArray;
import static com.tvd12.ezyfox.core.reflect.ReflectConvertUtil.charWrapperArrayToPrimitiveByteArray;
import static com.tvd12.ezyfox.core.reflect.ReflectConvertUtil.collectionToPrimitiveByteArray;
import static com.tvd12.ezyfox.core.reflect.ReflectConvertUtil.primitiveArrayToBoolCollection;
import static com.tvd12.ezyfox.core.reflect.ReflectConvertUtil.primitiveArrayToDoubleCollection;
import static com.tvd12.ezyfox.core.reflect.ReflectConvertUtil.primitiveArrayToFloatCollection;
import static com.tvd12.ezyfox.core.reflect.ReflectConvertUtil.primitiveArrayToIntCollection;
import static com.tvd12.ezyfox.core.reflect.ReflectConvertUtil.primitiveArrayToLongCollection;
import static com.tvd12.ezyfox.core.reflect.ReflectConvertUtil.primitiveArrayToShortCollection;
import static com.tvd12.ezyfox.core.reflect.ReflectConvertUtil.stringArrayToCollection;
import static com.tvd12.ezyfox.core.reflect.ReflectConvertUtil.toPrimitiveByteArray;
import static com.tvd12.ezyfox.core.reflect.ReflectConvertUtil.wrapperArrayToCollection;
import static com.tvd12.ezyfox.core.reflect.ReflectTypeUtil.*;
import java.lang.reflect.Array;
import java.util.Collection;
import java.util.List;
import com.smartfoxserver.v2.entities.data.ISFSArray;
import com.smartfoxserver.v2.entities.data.ISFSObject;
import com.smartfoxserver.v2.entities.data.SFSArray;
import com.smartfoxserver.v2.entities.data.SFSDataType;
import com.smartfoxserver.v2.entities.data.SFSDataWrapper;
import com.smartfoxserver.v2.entities.data.SFSObject;
import com.tvd12.ezyfox.core.structure.ClassUnwrapper;
import com.tvd12.ezyfox.core.structure.GetterMethodCover;
import com.tvd12.ezyfox.core.structure.MethodCover;
/**
* Support to serialize a java object to a SFSObject
*
* @author tavandung12
* Created on Jun 1, 2016
*
*/
public class ParameterSerializer {
/**
* Serialize a java object to a SFSObject
*
* @param unwrapper structure of java class
* @param object the java object
* @return a SFSObject
*/
public ISFSObject object2params(ClassUnwrapper unwrapper, Object object) {
return parseMethods(unwrapper, object);
}
/**
* Serialize a java object to a SFSObject
*
* @param unwrapper structure of java class
* @param object java object
* @param result the SFSObject
* @return the SFSObject
*/
public ISFSObject object2params(ClassUnwrapper unwrapper,
Object object, ISFSObject result) {
List<GetterMethodCover> methods = unwrapper.getMethods();
for(GetterMethodCover method : methods) {
Object value = method.invoke(object);
if(value == null)
continue;
parseMethod(value, method, result);
}
return result;
}
/**
* Invoke getter method and add returned value to SFSObject
*
* @param unwrapper structure of java class
* @param object the java object
* @return the SFSObject
*/
protected ISFSObject parseMethods(ClassUnwrapper unwrapper,
Object object) {
return object2params(unwrapper, object, new SFSObject());
}
/**
* Serialize a java object to a SFSObject
*
* @param value value to parse
* @param method structure of getter method
* @param sfsObject the SFSObject
*/
@SuppressWarnings("rawtypes")
protected void parseMethod(Object value, GetterMethodCover method,
ISFSObject sfsObject) {
Object answer = value;
if(method.isColection()) {
answer = parseCollection(method, (Collection)value);
}
else if(method.isTwoDimensionsArray()) {
answer = parseTwoDimensionsArray(method, value);
}
else if(method.isArray()) {
answer = parseArray(method, value);
}
else if(method.isObject()) {
answer = parseObject(method, value);
}
else if(method.isChar()) {
answer = (byte)(((Character)value).charValue());
}
SFSDataType type = getSFSDataType(method);
sfsObject.put(method.getKey(), new SFSDataWrapper(type, answer));
}
/**
* Serialize two-dimensions array to ISFSArray
*
* @param method method's structure
* @param array the two-dimensions array
* @return ISFSArray object
*/
protected Object parseTwoDimensionsArray(GetterMethodCover method,
Object array) {
ISFSArray answer = new SFSArray();
int size = Array.getLength(array);
for(int i = 0 ; i < size ; i++) {
SFSDataType dtype = getSFSArrayDataType(method);
Object value = parseArrayOfTwoDimensionsArray(method, Array.get(array, i));
answer.add(new SFSDataWrapper(dtype, value));
}
return answer;
}
protected Object parseArrayOfTwoDimensionsArray(GetterMethodCover method, Object array) {
Class<?> type = method.getComponentType().getComponentType();
if(isObject(type)) {
return parseObjectArray(method, (Object[])array);
}
else if(isPrimitiveBool(type)) {
return primitiveArrayToBoolCollection((boolean[])array);
}
else if(isPrimitiveChar(type)) {
return charArrayToByteArray((char[])array);
}
else if(isPrimitiveDouble(type)) {
return primitiveArrayToDoubleCollection((double[])array);
}
else if(isPrimitiveFloat(type)) {
return primitiveArrayToFloatCollection((float[])array);
}
else if(isPrimitiveInt(type)) {
return primitiveArrayToIntCollection((int[])array);
}
else if(isPrimitiveLong(type)) {
return primitiveArrayToLongCollection((long[])array);
}
else if(isPrimitiveShort(type)) {
return primitiveArrayToShortCollection((short[])array);
}
else if(isString(type)) {
return stringArrayToCollection((String[])array);
}
else if(isWrapperBool(type)) {
return wrapperArrayToCollection((Boolean[])array);
}
else if(isWrapperByte(type)) {
return toPrimitiveByteArray((Byte[])array);
}
else if(isWrapperChar(type)) {
return charWrapperArrayToPrimitiveByteArray((Character[])array);
}
else if(isWrapperDouble(type)) {
return wrapperArrayToCollection((Double[])array);
}
else if(isWrapperFloat(type)) {
return wrapperArrayToCollection((Float[])array);
}
else if(isWrapperInt(type)) {
return wrapperArrayToCollection((Integer[])array);
}
else if(isWrapperLong(type)) {
return wrapperArrayToCollection((Long[])array);
}
else if(isWrapperShort(type)) {
return wrapperArrayToCollection((Short[])array);
}
return array;
}
/**
* Convert array of values to collection of values
*
* @param method structure of getter method
* @param array the array of values
* @return the collection of values
*/
protected Object parseArray(GetterMethodCover method,
Object array) {
if(method.isObjectArray()) {
return parseObjectArray(method, (Object[])array);
}
else if(method.isPrimitiveBooleanArray()) {
return primitiveArrayToBoolCollection((boolean[])array);
}
else if(method.isPrimitiveCharArray()) {
return charArrayToByteArray((char[])array);
}
else if(method.isPrimitiveDoubleArray()) {
return primitiveArrayToDoubleCollection((double[])array);
}
else if(method.isPrimitiveFloatArray()) {
return primitiveArrayToFloatCollection((float[])array);
}
else if(method.isPrimitiveIntArray()) {
return primitiveArrayToIntCollection((int[])array);
}
else if(method.isPrimitiveLongArray()) {
return primitiveArrayToLongCollection((long[])array);
}
else if(method.isPrimitiveShortArray()) {
return primitiveArrayToShortCollection((short[])array);
}
else if(method.isStringArray()) {
return stringArrayToCollection((String[])array);
}
else if(method.isWrapperBooleanArray()) {
return wrapperArrayToCollection((Boolean[])array);
}
else if(method.isWrapperByteArray()) {
return toPrimitiveByteArray((Byte[])array);
}
else if(method.isWrapperCharArray()) {
return charWrapperArrayToPrimitiveByteArray((Character[])array);
}
else if(method.isWrapperDoubleArray()) {
return wrapperArrayToCollection((Double[])array);
}
else if(method.isWrapperFloatArray()) {
return wrapperArrayToCollection((Float[])array);
}
else if(method.isWrapperIntArray()) {
return wrapperArrayToCollection((Integer[])array);
}
else if(method.isWrapperLongArray()) {
return wrapperArrayToCollection((Long[])array);
}
else if(method.isWrapperShortArray()) {
return wrapperArrayToCollection((Short[])array);
}
return array;
}
/**
* Parse collection of values and get the value mapped to smartfox value
*
* @param method structure of getter method
* @param collection collection of value
* @return the value after parsed
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
protected Object parseCollection(GetterMethodCover method,
Collection collection) {
if(method.isArrayObjectCollection()) {
return parseArrayObjectCollection(method, collection);
}
else if(method.isObjectCollection()) {
return parseObjectCollection(method, collection);
}
else if(method.isByteCollection()) {
return collectionToPrimitiveByteArray(collection);
}
else if(method.isCharCollection()) {
return charCollectionToPrimitiveByteArray(collection);
}
else if(method.isArrayCollection()) {
return parseArrayCollection(method, collection);
}
return collection;
}
/**
* Serialize a java object to a SFSObject
*
* @param method structure of getter method
* @param object object to serialize
* @return the SFSObject
*/
protected ISFSObject parseObject(GetterMethodCover method,
Object object) {
return object2params(method.getReturnClass(),
object);
}
/**
* Serialize array of object to a SFSArray
*
* @param method structure of getter method
* @param array array of objects
* @return the SFSArray
*/
protected ISFSArray parseObjectArray(GetterMethodCover method,
Object[] array) {
return parseObjectArray(method.getReturnClass(),
array);
}
/**
* Serialize array of object to a SFSArray
*
* @param unwrapper structure of java class
* @param array array of objects
* @return the SFSArray
*/
private ISFSArray parseObjectArray(ClassUnwrapper unwrapper,
Object[] array) {
ISFSArray result = new SFSArray();
for(Object obj : array) {
result.addSFSObject(object2params(unwrapper, obj));
}
return result;
}
/**
* Serialize collection of objects to a SFSArray
*
* @param method structure of getter method
* @param collection collection of objects
* @return the SFSArray
*/
@SuppressWarnings({ "rawtypes" })
protected ISFSArray parseObjectCollection(GetterMethodCover method,
Collection collection) {
ISFSArray result = new SFSArray();
for(Object obj : collection) {
result.addSFSObject(parseObject(
method, obj));
}
return result;
}
/**
* Serialize collection of java array object to a SFSArray
*
* @param method structure of getter method
* @param collection collection of java objects
* @return the SFSArray
*/
@SuppressWarnings({ "rawtypes" })
protected ISFSArray parseArrayObjectCollection(GetterMethodCover method,
Collection collection) {
ISFSArray result = new SFSArray();
for(Object obj : collection) {
result.addSFSArray(parseObjectArray(
method,
(Object[])obj));
}
return result;
}
/**
* Serialize collection of array to a SFSArray
*
* @param method structure of getter method
* @param collection collection of objects
* @return the SFSArray
*/
@SuppressWarnings("rawtypes")
protected ISFSArray parseArrayCollection(GetterMethodCover method,
Collection collection) {
ISFSArray result = new SFSArray();
SFSDataType dataType = ParamTypeParser
.getParamType(method.getGenericType());
Class<?> type = method.getGenericType().getComponentType();
for(Object obj : collection) {
Object value = obj;
if(isPrimitiveChar(type)) {
value = charArrayToByteArray((char[])value);
}
else if(isWrapperByte(type)) {
value = toPrimitiveByteArray((Byte[])value);
}
else if(isWrapperChar(type)) {
value = charWrapperArrayToPrimitiveByteArray((Character[])value);
}
else {
value = arrayToList(value);
}
result.add(new SFSDataWrapper(dataType, value));
}
return result;
}
/**
* Get SFSDataType mapped to returned value type of getter method
*
* @param method structure of getter method
* @return the SFSDataType
*/
private SFSDataType getSFSDataType(MethodCover method) {
if(method.isBoolean())
return SFSDataType.BOOL;
if(method.isByte())
return SFSDataType.BYTE;
if(method.isChar())
return SFSDataType.BYTE;
if(method.isDouble())
return SFSDataType.DOUBLE;
if(method.isFloat())
return SFSDataType.FLOAT;
if(method.isInt())
return SFSDataType.INT;
if(method.isLong())
return SFSDataType.LONG;
if(method.isShort())
return SFSDataType.SHORT;
if(method.isString())
return SFSDataType.UTF_STRING;
if(method.isObject())
return SFSDataType.SFS_OBJECT;
if(method.isBooleanArray())
return SFSDataType.BOOL_ARRAY;
if(method.isByteArray())
return SFSDataType.BYTE_ARRAY;
if(method.isCharArray())
return SFSDataType.BYTE_ARRAY;
if(method.isDoubleArray())
return SFSDataType.DOUBLE_ARRAY;
if(method.isFloatArray())
return SFSDataType.FLOAT_ARRAY;
if(method.isIntArray())
return SFSDataType.INT_ARRAY;
if(method.isLongArray())
return SFSDataType.LONG_ARRAY;
if(method.isShortArray())
return SFSDataType.SHORT_ARRAY;
if(method.isStringArray())
return SFSDataType.UTF_STRING_ARRAY;
if(method.isObjectArray())
return SFSDataType.SFS_ARRAY;
if(method.isBooleanCollection())
return SFSDataType.BOOL_ARRAY;
if(method.isByteCollection())
return SFSDataType.BYTE_ARRAY;
if(method.isCharCollection())
return SFSDataType.BYTE_ARRAY;
if(method.isDoubleCollection())
return SFSDataType.DOUBLE_ARRAY;
if(method.isFloatCollection())
return SFSDataType.FLOAT_ARRAY;
if(method.isIntCollection())
return SFSDataType.INT_ARRAY;
if(method.isLongCollection())
return SFSDataType.LONG_ARRAY;
if(method.isShortCollection())
return SFSDataType.SHORT_ARRAY;
if(method.isStringCollection())
return SFSDataType.UTF_STRING_ARRAY;
if(method.isObjectCollection())
return SFSDataType.SFS_ARRAY;
if(method.isArrayObjectCollection())
return SFSDataType.SFS_ARRAY;
return SFSDataType.SFS_ARRAY;
}
protected SFSDataType getSFSArrayDataType(MethodCover method) {
Class<?> type = method.getComponentType().getComponentType();
if(isBool(type)) return SFSDataType.BOOL_ARRAY;
if(isByte(type)) return SFSDataType.BYTE_ARRAY;
if(isChar(type)) return SFSDataType.BYTE_ARRAY;
if(isDouble(type)) return SFSDataType.DOUBLE_ARRAY;
if(isFloat(type)) return SFSDataType.FLOAT_ARRAY;
if(isInt(type)) return SFSDataType.INT_ARRAY;
if(isLong(type)) return SFSDataType.LONG_ARRAY;
if(isShort(type)) return SFSDataType.SHORT_ARRAY;
if(isString(type)) return SFSDataType.UTF_STRING_ARRAY;
return SFSDataType.SFS_ARRAY;
}
}
| |
/**
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.m3bp.mirror.jna;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.function.Consumer;
import java.util.function.Supplier;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExternalResource;
import org.junit.rules.TestRule;
import com.asakusafw.dag.api.common.ObjectCursor;
import com.asakusafw.dag.api.processor.GroupReader;
import com.asakusafw.dag.api.processor.ObjectReader;
import com.asakusafw.dag.api.processor.ObjectWriter;
import com.asakusafw.dag.api.processor.TaskProcessor;
import com.asakusafw.dag.api.processor.TaskSchedule;
import com.asakusafw.dag.api.processor.VertexProcessor;
import com.asakusafw.dag.api.processor.VertexProcessorContext;
import com.asakusafw.dag.api.processor.basic.BasicProcessorContext;
import com.asakusafw.dag.api.processor.basic.BasicTaskInfo;
import com.asakusafw.dag.api.processor.basic.BasicTaskSchedule;
import com.asakusafw.dag.api.processor.basic.CoGroupReader;
import com.asakusafw.lang.utils.common.Arguments;
import com.asakusafw.lang.utils.common.Lang;
import com.asakusafw.lang.utils.common.Optionals;
import com.asakusafw.lang.utils.common.Suppliers;
import com.asakusafw.lang.utils.common.Tuple;
import com.asakusafw.m3bp.descriptor.Descriptors;
import com.asakusafw.m3bp.descriptor.M3bpEdgeDescriptor;
import com.asakusafw.m3bp.descriptor.M3bpVertexDescriptor;
import com.asakusafw.m3bp.mirror.PortMirror;
import com.asakusafw.m3bp.mirror.VertexMirror;
import com.asakusafw.m3bp.mirror.basic.BasicVertexMirror;
import com.sun.jna.Memory;
/**
* Test for {@link VertexExecutor}.
*/
public class VertexExecutorTest {
/**
* cleanup tests.
*/
@Rule
public final TestRule CLEANER = new ExternalResource() {
@Override
protected void after() {
threads.shutdownNow();
}
};
final BasicProcessorContext root = new BasicProcessorContext(getClass().getClassLoader());
final ExecutorService threads = Executors.newCachedThreadPool();
/**
* simple case.
* @throws Exception if failed
*/
@Test
public void simple() throws Exception {
List<String> results = Collections.synchronizedList(new ArrayList<>());
VertexMirror vertex = new BasicVertexMirror("v", Bridge.wrap(new VertexProcessor() {
@Override
public Optional<? extends TaskSchedule> initialize(VertexProcessorContext c) {
return Optionals.of(new BasicTaskSchedule(new BasicTaskInfo()));
}
@Override
public TaskProcessor createTaskProcessor() throws IOException, InterruptedException {
return c -> results.add("OK");
}
}));
IO io = new IO();
VertexExecutor exec = new VertexExecutor(root, vertex, io, threads, 1);
exec.run();
assertThat(results, contains("OK"));
}
/**
* w/ input.
* @throws Exception if failed
*/
@Test
public void input() throws Exception {
List<Integer> results = Collections.synchronizedList(new ArrayList<>());
VertexMirror vertex = new BasicVertexMirror("v", Bridge.wrap(new VertexProcessor() {
@Override
public TaskProcessor createTaskProcessor() throws IOException, InterruptedException {
return context -> {
try (ObjectReader reader = (ObjectReader) context.getInput("port")) {
while (reader.nextObject()) {
results.add((Integer) reader.getObject());
}
}
};
}
}));
IO io = new IO();
PortMirror port = vertex.addInput("port", io.newOneToOne());
io.input(port, new int[] { 100, 200, 300, });
VertexExecutor exec = new VertexExecutor(root, vertex, io, threads, 1);
exec.run();
assertThat(results, containsInAnyOrder(100, 200, 300));
}
/**
* w/ gather input.
* @throws Exception if failed
*/
@SuppressWarnings("unchecked")
@Test
public void gather() throws Exception {
List<Tuple<List<Integer>, List<Integer>>> results = Collections.synchronizedList(new ArrayList<>());
VertexMirror vertex = new BasicVertexMirror("v", Bridge.wrap(new VertexProcessor() {
@Override
public TaskProcessor createTaskProcessor() throws IOException, InterruptedException {
return context -> {
try (GroupReader g0 = (GroupReader) context.getInput("p0");
GroupReader g1 = (GroupReader) context.getInput("p1");
CoGroupReader reader = new CoGroupReader(g0, g1)) {
while (reader.nextCoGroup()) {
results.add(new Tuple<>(dump(reader.getGroup(0)), dump(reader.getGroup(1))));
}
}
};
}
}));
IO io = new IO();
PortMirror p0 = vertex.addInput("p0", io.newScatterGather());
PortMirror p1 = vertex.addInput("p1", io.newScatterGather());
io.inputPairs(p0, 1, 100, 2, 200);
io.inputPairs(p1, 2, 201, 3, 301);
VertexExecutor exec = new VertexExecutor(root, vertex, io, threads, 1);
exec.run();
assertThat(results, containsInAnyOrder(
new Tuple<>(Arrays.asList(100), Arrays.asList()),
new Tuple<>(Arrays.asList(200), Arrays.asList(201)),
new Tuple<>(Arrays.asList(), Arrays.asList(301))));
}
/**
* w/ broadcast.
* @throws Exception if failed
*/
@Test
public void broadcast() throws Exception {
List<Integer> main = Collections.synchronizedList(new ArrayList<>());
List<Integer> broadcast = Collections.synchronizedList(new ArrayList<>());
VertexMirror vertex = new BasicVertexMirror("v", Bridge.wrap(new VertexProcessor() {
@Override
public Optional<? extends TaskSchedule> initialize(VertexProcessorContext context) throws IOException, InterruptedException {
try (ObjectReader reader = (ObjectReader) context.getInput("broadcast")) {
broadcast.addAll(dump(reader));
}
return Optionals.of(null);
}
@Override
public TaskProcessor createTaskProcessor() throws IOException, InterruptedException {
return context -> {
try (ObjectReader reader = (ObjectReader) context.getInput("main")) {
main.addAll(dump(reader));
}
};
}
}));
IO io = new IO();
PortMirror mainPort = vertex.addInput("main", io.newOneToOne());
io.input(mainPort, 100).input(mainPort, 200).input(mainPort, 300);
PortMirror broadcastPort = vertex.addInput("broadcast", io.newBroadcast());
io.input(broadcastPort, 10, 20, 30);
VertexExecutor exec = new VertexExecutor(root, vertex, io, threads, 1);
exec.run();
assertThat(main, containsInAnyOrder(100, 200, 300));
assertThat(broadcast, containsInAnyOrder(10, 20, 30));
}
/**
* w/ output.
* @throws Exception if failed
*/
@Test
public void output() throws Exception {
VertexMirror vertex = new BasicVertexMirror("v", Bridge.wrap(new VertexProcessor() {
@Override
public Optional<? extends TaskSchedule> initialize(VertexProcessorContext context) {
return Optionals.of(new BasicTaskSchedule(new BasicTaskInfo()));
}
@Override
public TaskProcessor createTaskProcessor() throws IOException, InterruptedException {
return context -> {
try (ObjectWriter writer = (ObjectWriter) context.getOutput("port")) {
writer.putObject(100);
}
};
}
}));
IO io = new IO();
PortMirror output = vertex.addOutput("port", io.newOneToOne());
VertexExecutor exec = new VertexExecutor(root, vertex, io, threads, 1);
exec.run();
List<Integer> values = io.getOutputValues(output);
assertThat(values, containsInAnyOrder(100));
}
/**
* w/ scatter output.
* @throws Exception if failed
*/
@SuppressWarnings("unchecked")
@Test
public void scatter() throws Exception {
VertexMirror vertex = new BasicVertexMirror("v", Bridge.wrap(new VertexProcessor() {
@Override
public Optional<? extends TaskSchedule> initialize(VertexProcessorContext context) {
return Optionals.of(new BasicTaskSchedule(new BasicTaskInfo()));
}
@Override
public TaskProcessor createTaskProcessor() throws IOException, InterruptedException {
return context -> {
try (ObjectWriter writer = (ObjectWriter) context.getOutput("p0")) {
writer.putObject(100);
}
try (ObjectWriter writer = (ObjectWriter) context.getOutput("p1")) {
writer.putObject(201);
writer.putObject(301);
}
};
}
}));
IO io = new IO();
PortMirror p0 = vertex.addOutput("p0", io.newScatterGather());
PortMirror p1 = vertex.addOutput("p1", io.newScatterGather());
VertexExecutor exec = new VertexExecutor(root, vertex, io, threads, 1);
exec.run();
assertThat(io.getOutputPairs(p0), containsInAnyOrder(new Tuple<>(100, 100)));
assertThat(io.getOutputPairs(p1), containsInAnyOrder(new Tuple<>(201, 201), new Tuple<>(301, 301)));
}
/**
* A {@link VertexProcessor} bridge.
*/
public static class Bridge implements VertexProcessor {
static VertexProcessor delegate;
static M3bpVertexDescriptor wrap(VertexProcessor processor) {
delegate = processor;
return Descriptors.newVertex(Bridge.class);
}
@Override
public Optional<? extends TaskSchedule> initialize(VertexProcessorContext context) throws IOException, InterruptedException {
return delegate.initialize(context);
}
@Override
public TaskProcessor createTaskProcessor() throws IOException, InterruptedException {
return delegate.createTaskProcessor();
}
@Override
public void close() throws IOException, InterruptedException {
delegate.close();
delegate = null;
}
}
static List<Integer> dump(ObjectCursor cursor) {
return Lang.let(new ArrayList<>(), results -> Lang.safe(() -> {
while (cursor.nextObject()) {
results.add((Integer) cursor.getObject());
}
}));
}
private static class IO implements IoMap {
private final Map<PortMirror, List<InputBufferCursor>> inputs = new HashMap<>();
private final Map<PortMirror, List<OutputBufferFragment>> outputs = new HashMap<>();
/**
* Creates a new instance.
*/
public IO() {
return;
}
IO input(PortMirror input, int... values) {
int entries = values.length;
Memory contents = new Memory(entries * Integer.BYTES);
Memory offsets = new Memory((entries + 1) * Long.BYTES);
offsets.setLong(0, 0);
for (int i = 0; i < entries; i++) {
contents.setInt(i * Integer.BYTES, values[i]);
offsets.setLong((i + 1) * Long.BYTES, (i + 1) * Integer.BYTES);
}
InputBufferFragment fragment = new InputBufferFragment(contents, offsets, entries);
return input(input, new InputBufferCursor(Suppliers.supplier(fragment)));
}
IO inputPairs(PortMirror input, int... keyValuePairs) {
Arguments.require(keyValuePairs.length % 2 == 0);
int entries = keyValuePairs.length / 2;
Memory kContents = new Memory(entries * Integer.BYTES);
Memory vContents = new Memory(entries * Integer.BYTES);
Memory kOffsets = new Memory((entries + 1) * Long.BYTES);
Memory vOffsets = new Memory((entries + 1) * Long.BYTES);
kOffsets.setLong(0, 0);
vOffsets.setLong(0, 0);
for (int i = 0; i < entries; i++) {
int k = keyValuePairs[i * 2 + 0];
int v = keyValuePairs[i * 2 + 1];
kContents.setInt(i * Integer.BYTES, k);
vContents.setInt(i * Integer.BYTES, v);
kOffsets.setLong((i + 1) * Long.BYTES, (i + 1) * Integer.BYTES);
vOffsets.setLong((i + 1) * Long.BYTES, (i + 1) * Integer.BYTES);
}
InputBufferFragment kFragments = new InputBufferFragment(kContents, kOffsets, entries);
InputBufferFragment vFragments = new InputBufferFragment(vContents, vOffsets, entries);
return input(input, new InputBufferCursor(Suppliers.supplier(kFragments), Suppliers.supplier(vFragments)));
}
IO input(PortMirror input, InputBufferCursor cursor) {
inputs.computeIfAbsent(input, p -> new ArrayList<>()).add(cursor);
return this;
}
@Override
public Supplier<OutputBufferFragment> getOutputSource(PortMirror port) {
return () -> new OutputBufferFragment(4 * 1024, 1024, port.hasKey());
}
@Override
public Consumer<OutputBufferFragment> getOutputSink(PortMirror port) {
return outputs.computeIfAbsent(port, p -> new ArrayList<>())::add;
}
@Override
public List<InputBufferCursor> getInputSource(PortMirror port) {
return Optionals.remove(inputs, port).orElseThrow(AssertionError::new);
}
M3bpEdgeDescriptor newOneToOne() {
return Descriptors.newOneToOneEdge(IntSerDe.class);
}
M3bpEdgeDescriptor newScatterGather() {
return Descriptors.newScatterGatherEdge(IntSerDe.class, null, null);
}
M3bpEdgeDescriptor newBroadcast() {
return Descriptors.newBroadcastEdge(IntSerDe.class);
}
List<Integer> getOutputValues(PortMirror port) {
List<OutputBufferFragment> fragments = Optionals.remove(outputs, port).orElseThrow(AssertionError::new);
return Lang.let(new ArrayList<>(), results -> {
fragments.stream().forEach(f -> f.forEachEntries(b -> {
assertThat(b.remaining(), is(Integer.BYTES));
results.add(b.getInt());
}));
});
}
List<Tuple<Integer, Integer>> getOutputPairs(PortMirror port) {
List<OutputBufferFragment> fragments = Optionals.remove(outputs, port).orElseThrow(AssertionError::new);
return Lang.let(new ArrayList<>(), results -> {
fragments.stream().forEach(f -> f.forEachEntries((k, v) -> {
assertThat(k.remaining(), is(Integer.BYTES));
assertThat(v.remaining(), is(Integer.BYTES));
results.add(new Tuple<>(k.getInt(), v.getInt()));
}));
});
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.io.compress;
import org.apache.cassandra.schema.CompressionParams;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import java.util.zip.DataFormatException;
import java.util.zip.Deflater;
import java.util.zip.Inflater;
public class DeflateCompressor implements ICompressor
{
public static final DeflateCompressor instance = new DeflateCompressor();
private static final ThreadLocal<byte[]> threadLocalScratchBuffer = new ThreadLocal<byte[]>()
{
@Override
protected byte[] initialValue()
{
return new byte[CompressionParams.DEFAULT_CHUNK_LENGTH];
}
};
public static byte[] getThreadLocalScratchBuffer()
{
return threadLocalScratchBuffer.get();
}
private final ThreadLocal<Deflater> deflater;
private final ThreadLocal<Inflater> inflater;
public static DeflateCompressor create(Map<String, String> compressionOptions)
{
// no specific options supported so far
return instance;
}
private DeflateCompressor()
{
deflater = new ThreadLocal<Deflater>()
{
@Override
protected Deflater initialValue()
{
return new Deflater();
}
};
inflater = new ThreadLocal<Inflater>()
{
@Override
protected Inflater initialValue()
{
return new Inflater();
}
};
}
public Set<String> supportedOptions()
{
return Collections.emptySet();
}
public int initialCompressedBufferLength(int sourceLen)
{
// Taken from zlib deflateBound(). See http://www.zlib.net/zlib_tech.html.
return sourceLen + (sourceLen >> 12) + (sourceLen >> 14) + (sourceLen >> 25) + 13;
}
public void compress(ByteBuffer input, ByteBuffer output)
{
if (input.hasArray() && output.hasArray())
{
int length = compressArray(input.array(), input.arrayOffset() + input.position(), input.remaining(),
output.array(), output.arrayOffset() + output.position(), output.remaining());
input.position(input.limit());
output.position(output.position() + length);
}
else
compressBuffer(input, output);
}
public int compressArray(byte[] input, int inputOffset, int inputLength, byte[] output, int outputOffset, int maxOutputLength)
{
Deflater def = deflater.get();
def.reset();
def.setInput(input, inputOffset, inputLength);
def.finish();
if (def.needsInput())
return 0;
int len = def.deflate(output, outputOffset, maxOutputLength);
assert def.finished();
return len;
}
public void compressBuffer(ByteBuffer input, ByteBuffer output)
{
Deflater def = deflater.get();
def.reset();
byte[] buffer = getThreadLocalScratchBuffer();
// Use half the buffer for input, half for output.
int chunkLen = buffer.length / 2;
while (input.remaining() > chunkLen)
{
input.get(buffer, 0, chunkLen);
def.setInput(buffer, 0, chunkLen);
while (!def.needsInput())
{
int len = def.deflate(buffer, chunkLen, chunkLen);
output.put(buffer, chunkLen, len);
}
}
int inputLength = input.remaining();
input.get(buffer, 0, inputLength);
def.setInput(buffer, 0, inputLength);
def.finish();
while (!def.finished())
{
int len = def.deflate(buffer, chunkLen, chunkLen);
output.put(buffer, chunkLen, len);
}
}
public void uncompress(ByteBuffer input, ByteBuffer output) throws IOException
{
if (input.hasArray() && output.hasArray())
{
int length = uncompress(input.array(), input.arrayOffset() + input.position(), input.remaining(),
output.array(), output.arrayOffset() + output.position(), output.remaining());
input.position(input.limit());
output.position(output.position() + length);
}
else
uncompressBuffer(input, output);
}
public void uncompressBuffer(ByteBuffer input, ByteBuffer output) throws IOException
{
try
{
Inflater inf = inflater.get();
inf.reset();
byte[] buffer = getThreadLocalScratchBuffer();
// Use half the buffer for input, half for output.
int chunkLen = buffer.length / 2;
while (input.remaining() > chunkLen)
{
input.get(buffer, 0, chunkLen);
inf.setInput(buffer, 0, chunkLen);
while (!inf.needsInput())
{
int len = inf.inflate(buffer, chunkLen, chunkLen);
output.put(buffer, chunkLen, len);
}
}
int inputLength = input.remaining();
input.get(buffer, 0, inputLength);
inf.setInput(buffer, 0, inputLength);
while (!inf.needsInput())
{
int len = inf.inflate(buffer, chunkLen, chunkLen);
output.put(buffer, chunkLen, len);
}
}
catch (DataFormatException e)
{
throw new IOException(e);
}
}
public int uncompress(byte[] input, int inputOffset, int inputLength, byte[] output, int outputOffset) throws IOException
{
return uncompress(input, inputOffset, inputLength, output, outputOffset, output.length - outputOffset);
}
public int uncompress(byte[] input, int inputOffset, int inputLength, byte[] output, int outputOffset, int maxOutputLength) throws IOException
{
Inflater inf = inflater.get();
inf.reset();
inf.setInput(input, inputOffset, inputLength);
if (inf.needsInput())
return 0;
// We assume output is big enough
try
{
return inf.inflate(output, outputOffset, maxOutputLength);
}
catch (DataFormatException e)
{
throw new IOException(e);
}
}
public boolean supports(BufferType bufferType)
{
return true;
}
public BufferType preferredBufferType()
{
// Prefer array-backed buffers.
return BufferType.ON_HEAP;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.io.sstable;
import java.io.File;
import java.io.IOException;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.SynchronousQueue;
import com.google.common.base.Throwables;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.db.ColumnFamily;
import org.apache.cassandra.db.ColumnFamilyType;
import org.apache.cassandra.db.DecoratedKey;
import org.apache.cassandra.db.TreeMapBackedSortedColumns;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.dht.IPartitioner;
import org.apache.cassandra.io.compress.CompressionParameters;
import org.apache.cassandra.net.MessagingService;
/**
* A SSTable writer that doesn't assume rows are in sorted order.
* This writer buffers rows in memory and then write them all in sorted order.
* To avoid loading the entire data set in memory, the amount of rows buffered
* is configurable. Each time the threshold is met, one SSTable will be
* created (and the buffer be reseted).
*
* @see AbstractSSTableSimpleWriter
*/
public class SSTableSimpleUnsortedWriter extends AbstractSSTableSimpleWriter
{
private static final Buffer SENTINEL = new Buffer();
private Buffer buffer = new Buffer();
private final long bufferSize;
private long currentSize;
private final BlockingQueue<Buffer> writeQueue = new SynchronousQueue<Buffer>();
private final DiskWriter diskWriter = new DiskWriter();
/**
* Create a new buffering writer.
* @param directory the directory where to write the sstables
* @param partitioner the partitioner
* @param keyspace the keyspace name
* @param columnFamily the column family name
* @param comparator the column family comparator
* @param subComparator the column family subComparator or null if not a Super column family.
* @param bufferSizeInMB the data size in MB before which a sstable is written and the buffer reseted. This correspond roughly to the written
* data size (i.e. the size of the create sstable). The actual size used in memory will be higher (by how much depends on the size of the
* columns you add). For 1GB of heap, a 128 bufferSizeInMB is probably a reasonable choice. If you experience OOM, this value should be lowered.
*/
public SSTableSimpleUnsortedWriter(File directory,
IPartitioner partitioner,
String keyspace,
String columnFamily,
AbstractType<?> comparator,
AbstractType<?> subComparator,
int bufferSizeInMB,
CompressionParameters compressParameters)
{
this(directory, CFMetaData.denseCFMetaData(keyspace, columnFamily, comparator, subComparator).compressionParameters(compressParameters), partitioner, bufferSizeInMB);
}
public SSTableSimpleUnsortedWriter(File directory,
IPartitioner partitioner,
String keyspace,
String columnFamily,
AbstractType<?> comparator,
AbstractType<?> subComparator,
int bufferSizeInMB)
{
this(directory, partitioner, keyspace, columnFamily, comparator, subComparator, bufferSizeInMB, new CompressionParameters(null));
}
public SSTableSimpleUnsortedWriter(File directory, CFMetaData metadata, IPartitioner partitioner, long bufferSizeInMB)
{
super(directory, metadata, partitioner);
this.bufferSize = bufferSizeInMB * 1024L * 1024L;
this.diskWriter.start();
}
protected void writeRow(DecoratedKey key, ColumnFamily columnFamily) throws IOException
{
currentSize += key.key.remaining() + ColumnFamily.serializer.serializedSize(columnFamily, MessagingService.current_version) * 1.2;
if (currentSize > bufferSize)
sync();
}
protected ColumnFamily getColumnFamily()
{
ColumnFamily previous = buffer.get(currentKey);
// If the CF already exist in memory, we'll just continue adding to it
if (previous == null)
{
previous = TreeMapBackedSortedColumns.factory.create(metadata);
buffer.put(currentKey, previous);
}
else
{
// We will reuse a CF that we have counted already. But because it will be easier to add the full size
// of the CF in the next writeRow call than to find out the delta, we just remove the size until that next call
currentSize -= currentKey.key.remaining() + ColumnFamily.serializer.serializedSize(previous, MessagingService.current_version) * 1.2;
}
return previous;
}
public void close() throws IOException
{
sync();
try
{
writeQueue.put(SENTINEL);
diskWriter.join();
}
catch (InterruptedException e)
{
throw new RuntimeException(e);
}
checkForWriterException();
}
private void sync() throws IOException
{
if (buffer.isEmpty())
return;
checkForWriterException();
try
{
writeQueue.put(buffer);
}
catch (InterruptedException e)
{
throw new RuntimeException(e);
}
buffer = new Buffer();
currentSize = 0;
}
private void checkForWriterException() throws IOException
{
// slightly lame way to report exception from the writer, but that should be good enough
if (diskWriter.exception != null)
{
if (diskWriter.exception instanceof IOException)
throw (IOException) diskWriter.exception;
else
throw Throwables.propagate(diskWriter.exception);
}
}
// typedef
private static class Buffer extends TreeMap<DecoratedKey, ColumnFamily> {}
private class DiskWriter extends Thread
{
volatile Throwable exception = null;
public void run()
{
SSTableWriter writer = null;
try
{
while (true)
{
Buffer b = writeQueue.take();
if (b == SENTINEL)
return;
writer = getWriter();
for (Map.Entry<DecoratedKey, ColumnFamily> entry : b.entrySet())
writer.append(entry.getKey(), entry.getValue());
writer.close();
}
}
catch (Throwable e)
{
if (writer != null)
writer.abort();
exception = e;
}
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gateway;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import org.junit.Test;
import java.io.InputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URISyntaxException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.StreamSupport;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
@LuceneTestCase.SuppressFileSystems("ExtrasFS") // TODO: fix test to work with ExtrasFS
public class MetaDataStateFormatTests extends ESTestCase {
/**
* Ensure we can read a pre-generated cluster state.
*/
public void testReadClusterState() throws URISyntaxException, IOException {
final MetaDataStateFormat<MetaData> format = new MetaDataStateFormat<MetaData>(randomFrom(XContentType.values()), "global-") {
@Override
public void toXContent(XContentBuilder builder, MetaData state) throws IOException {
fail("this test doesn't write");
}
@Override
public MetaData fromXContent(XContentParser parser) throws IOException {
return MetaData.Builder.fromXContent(parser);
}
};
Path tmp = createTempDir();
final InputStream resource = this.getClass().getResourceAsStream("global-3.st");
assertThat(resource, notNullValue());
Path dst = tmp.resolve("global-3.st");
Files.copy(resource, dst);
MetaData read = format.read(dst);
assertThat(read, notNullValue());
assertThat(read.clusterUUID(), equalTo("3O1tDF1IRB6fSJ-GrTMUtg"));
// indices are empty since they are serialized separately
}
public void testReadWriteState() throws IOException {
Path[] dirs = new Path[randomIntBetween(1, 5)];
for (int i = 0; i < dirs.length; i++) {
dirs[i] = createTempDir();
}
final long id = addDummyFiles("foo-", dirs);
Format format = new Format(randomFrom(XContentType.values()), "foo-");
DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean());
int version = between(0, Integer.MAX_VALUE/2);
format.write(state, version, dirs);
for (Path file : dirs) {
Path[] list = content("*", file);
assertEquals(list.length, 1);
assertThat(list[0].getFileName().toString(), equalTo(MetaDataStateFormat.STATE_DIR_NAME));
Path stateDir = list[0];
assertThat(Files.isDirectory(stateDir), is(true));
list = content("foo-*", stateDir);
assertEquals(list.length, 1);
assertThat(list[0].getFileName().toString(), equalTo("foo-" + id + ".st"));
DummyState read = format.read(list[0]);
assertThat(read, equalTo(state));
}
final int version2 = between(version, Integer.MAX_VALUE);
DummyState state2 = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean());
format.write(state2, version2, dirs);
for (Path file : dirs) {
Path[] list = content("*", file);
assertEquals(list.length, 1);
assertThat(list[0].getFileName().toString(), equalTo(MetaDataStateFormat.STATE_DIR_NAME));
Path stateDir = list[0];
assertThat(Files.isDirectory(stateDir), is(true));
list = content("foo-*", stateDir);
assertEquals(list.length,1);
assertThat(list[0].getFileName().toString(), equalTo("foo-"+ (id+1) + ".st"));
DummyState read = format.read(list[0]);
assertThat(read, equalTo(state2));
}
}
@Test
public void testVersionMismatch() throws IOException {
Path[] dirs = new Path[randomIntBetween(1, 5)];
for (int i = 0; i < dirs.length; i++) {
dirs[i] = createTempDir();
}
final long id = addDummyFiles("foo-", dirs);
Format format = new Format(randomFrom(XContentType.values()), "foo-");
DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean());
int version = between(0, Integer.MAX_VALUE/2);
format.write(state, version, dirs);
for (Path file : dirs) {
Path[] list = content("*", file);
assertEquals(list.length, 1);
assertThat(list[0].getFileName().toString(), equalTo(MetaDataStateFormat.STATE_DIR_NAME));
Path stateDir = list[0];
assertThat(Files.isDirectory(stateDir), is(true));
list = content("foo-*", stateDir);
assertEquals(list.length, 1);
assertThat(list[0].getFileName().toString(), equalTo("foo-" + id + ".st"));
DummyState read = format.read(list[0]);
assertThat(read, equalTo(state));
}
}
public void testCorruption() throws IOException {
Path[] dirs = new Path[randomIntBetween(1, 5)];
for (int i = 0; i < dirs.length; i++) {
dirs[i] = createTempDir();
}
final long id = addDummyFiles("foo-", dirs);
Format format = new Format(randomFrom(XContentType.values()), "foo-");
DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean());
int version = between(0, Integer.MAX_VALUE/2);
format.write(state, version, dirs);
for (Path file : dirs) {
Path[] list = content("*", file);
assertEquals(list.length, 1);
assertThat(list[0].getFileName().toString(), equalTo(MetaDataStateFormat.STATE_DIR_NAME));
Path stateDir = list[0];
assertThat(Files.isDirectory(stateDir), is(true));
list = content("foo-*", stateDir);
assertEquals(list.length, 1);
assertThat(list[0].getFileName().toString(), equalTo("foo-" + id + ".st"));
DummyState read = format.read(list[0]);
assertThat(read, equalTo(state));
// now corrupt it
corruptFile(list[0], logger);
try {
format.read(list[0]);
fail("corrupted file");
} catch (CorruptStateException ex) {
// expected
}
}
}
public static void corruptFile(Path file, ESLogger logger) throws IOException {
Path fileToCorrupt = file;
try (final SimpleFSDirectory dir = new SimpleFSDirectory(fileToCorrupt.getParent())) {
long checksumBeforeCorruption;
try (IndexInput input = dir.openInput(fileToCorrupt.getFileName().toString(), IOContext.DEFAULT)) {
checksumBeforeCorruption = CodecUtil.retrieveChecksum(input);
}
try (FileChannel raf = FileChannel.open(fileToCorrupt, StandardOpenOption.READ, StandardOpenOption.WRITE)) {
raf.position(randomIntBetween(0, (int)Math.min(Integer.MAX_VALUE, raf.size()-1)));
long filePointer = raf.position();
ByteBuffer bb = ByteBuffer.wrap(new byte[1]);
raf.read(bb);
bb.flip();
byte oldValue = bb.get(0);
byte newValue = (byte) ~oldValue;
bb.put(0, newValue);
raf.write(bb, filePointer);
logger.debug("Corrupting file {} -- flipping at position {} from {} to {} ", fileToCorrupt.getFileName().toString(), filePointer, Integer.toHexString(oldValue), Integer.toHexString(newValue));
}
long checksumAfterCorruption;
long actualChecksumAfterCorruption;
try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString(), IOContext.DEFAULT)) {
assertThat(input.getFilePointer(), is(0l));
input.seek(input.length() - 8); // one long is the checksum... 8 bytes
checksumAfterCorruption = input.getChecksum();
actualChecksumAfterCorruption = input.readLong();
}
StringBuilder msg = new StringBuilder();
msg.append("Checksum before: [").append(checksumBeforeCorruption).append("]");
msg.append(" after: [").append(checksumAfterCorruption).append("]");
msg.append(" checksum value after corruption: ").append(actualChecksumAfterCorruption).append("]");
msg.append(" file: ").append(fileToCorrupt.getFileName().toString()).append(" length: ").append(dir.fileLength(fileToCorrupt.getFileName().toString()));
logger.debug(msg.toString());
assumeTrue("Checksum collision - " + msg.toString(),
checksumAfterCorruption != checksumBeforeCorruption // collision
|| actualChecksumAfterCorruption != checksumBeforeCorruption); // checksum corrupted
}
}
// If the latest version doesn't use the legacy format while previous versions do, then fail hard
public void testLatestVersionDoesNotUseLegacy() throws IOException {
final ToXContent.Params params = ToXContent.EMPTY_PARAMS;
MetaDataStateFormat<MetaData> format = MetaStateService.globalStateFormat(randomFrom(XContentType.values()), params);
final Path[] dirs = new Path[2];
dirs[0] = createTempDir();
dirs[1] = createTempDir();
for (Path dir : dirs) {
Files.createDirectories(dir.resolve(MetaDataStateFormat.STATE_DIR_NAME));
}
final Path dir1 = randomFrom(dirs);
final int v1 = randomInt(10);
// write a first state file in the new format
format.write(randomMeta(), v1, dir1);
// write older state files in the old format but with a newer version
final int numLegacyFiles = randomIntBetween(1, 5);
for (int i = 0; i < numLegacyFiles; ++i) {
final Path dir2 = randomFrom(dirs);
final int v2 = v1 + 1 + randomInt(10);
try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(format.format(), Files.newOutputStream(dir2.resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve(MetaStateService.GLOBAL_STATE_FILE_PREFIX + v2)))) {
xcontentBuilder.startObject();
MetaData.Builder.toXContent(randomMeta(), xcontentBuilder, params);
xcontentBuilder.endObject();
}
}
try {
format.loadLatestState(logger, dirs);
fail("latest version can not be read");
} catch (IllegalStateException ex) {
assertThat(ex.getMessage(), startsWith("Could not find a state file to recover from among "));
}
// write the next state file in the new format and ensure it get's a higher ID
final MetaData meta = randomMeta();
format.write(meta, v1, dirs);
final MetaData metaData = format.loadLatestState(logger, dirs);
assertEquals(meta.clusterUUID(), metaData.clusterUUID());
final Path path = randomFrom(dirs);
final Path[] files = FileSystemUtils.files(path.resolve("_state"));
assertEquals(1, files.length);
assertEquals("global-" + format.findMaxStateId("global-", dirs) + ".st", files[0].getFileName().toString());
}
// If both the legacy and the new format are available for the latest version, prefer the new format
public void testPrefersNewerFormat() throws IOException {
final ToXContent.Params params = ToXContent.EMPTY_PARAMS;
MetaDataStateFormat<MetaData> format = MetaStateService.globalStateFormat(randomFrom(XContentType.values()), params);
final Path[] dirs = new Path[2];
dirs[0] = createTempDir();
dirs[1] = createTempDir();
for (Path dir : dirs) {
Files.createDirectories(dir.resolve(MetaDataStateFormat.STATE_DIR_NAME));
}
final long v = randomInt(10);
MetaData meta = randomMeta();
String uuid = meta.clusterUUID();
// write a first state file in the old format
final Path dir2 = randomFrom(dirs);
MetaData meta2 = randomMeta();
assertFalse(meta2.clusterUUID().equals(uuid));
try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(format.format(), Files.newOutputStream(dir2.resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve(MetaStateService.GLOBAL_STATE_FILE_PREFIX + v)))) {
xcontentBuilder.startObject();
MetaData.Builder.toXContent(randomMeta(), xcontentBuilder, params);
xcontentBuilder.endObject();
}
// write a second state file in the new format but with the same version
format.write(meta, v, dirs);
MetaData state = format.loadLatestState(logger, dirs);
final Path path = randomFrom(dirs);
assertTrue(Files.exists(path.resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + (v+1) + ".st")));
assertEquals(state.clusterUUID(), uuid);
}
@Test
public void testLoadState() throws IOException {
final ToXContent.Params params = ToXContent.EMPTY_PARAMS;
final Path[] dirs = new Path[randomIntBetween(1, 5)];
int numStates = randomIntBetween(1, 5);
int numLegacy = randomIntBetween(0, numStates);
List<MetaData> meta = new ArrayList<>();
for (int i = 0; i < numStates; i++) {
meta.add(randomMeta());
}
Set<Path> corruptedFiles = new HashSet<>();
MetaDataStateFormat<MetaData> format = MetaStateService.globalStateFormat(randomFrom(XContentType.values()), params);
for (int i = 0; i < dirs.length; i++) {
dirs[i] = createTempDir();
Files.createDirectories(dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME));
for (int j = 0; j < numLegacy; j++) {
XContentType type = format.format();
if (randomBoolean() && (j < numStates - 1 || dirs.length > 0 && i != 0)) {
Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-"+j);
Files.createFile(file); // randomly create 0-byte files -- there is extra logic to skip them
} else {
try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(type, Files.newOutputStream(dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + j)))) {
xcontentBuilder.startObject();
MetaData.Builder.toXContent(meta.get(j), xcontentBuilder, params);
xcontentBuilder.endObject();
}
}
}
for (int j = numLegacy; j < numStates; j++) {
format.write(meta.get(j), j, dirs[i]);
if (randomBoolean() && (j < numStates - 1 || dirs.length > 0 && i != 0)) { // corrupt a file that we do not necessarily need here....
Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + j + ".st");
corruptedFiles.add(file);
MetaDataStateFormatTests.corruptFile(file, logger);
}
}
}
List<Path> dirList = Arrays.asList(dirs);
Collections.shuffle(dirList, getRandom());
MetaData loadedMetaData = format.loadLatestState(logger, dirList.toArray(new Path[0]));
MetaData latestMetaData = meta.get(numStates-1);
assertThat(loadedMetaData.clusterUUID(), not(equalTo("_na_")));
assertThat(loadedMetaData.clusterUUID(), equalTo(latestMetaData.clusterUUID()));
ImmutableOpenMap<String,IndexMetaData> indices = loadedMetaData.indices();
assertThat(indices.size(), equalTo(latestMetaData.indices().size()));
for (IndexMetaData original : latestMetaData) {
IndexMetaData deserialized = indices.get(original.getIndex());
assertThat(deserialized, notNullValue());
assertThat(deserialized.getVersion(), equalTo(original.getVersion()));
assertThat(deserialized.getNumberOfReplicas(), equalTo(original.getNumberOfReplicas()));
assertThat(deserialized.getNumberOfShards(), equalTo(original.getNumberOfShards()));
}
// now corrupt all the latest ones and make sure we fail to load the state
if (numStates > numLegacy) {
for (int i = 0; i < dirs.length; i++) {
Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + (numStates-1) + ".st");
if (corruptedFiles.contains(file)) {
continue;
}
MetaDataStateFormatTests.corruptFile(file, logger);
}
try {
format.loadLatestState(logger, dirList.toArray(new Path[0]));
fail("latest version can not be read");
} catch (ElasticsearchException ex) {
assertThat(ex.getCause(), instanceOf(CorruptStateException.class));
}
}
}
private MetaData randomMeta() throws IOException {
int numIndices = randomIntBetween(1, 10);
MetaData.Builder mdBuilder = MetaData.builder();
mdBuilder.generateClusterUuidIfNeeded();
for (int i = 0; i < numIndices; i++) {
mdBuilder.put(indexBuilder(randomAsciiOfLength(10) + "idx-"+i));
}
return mdBuilder.build();
}
private IndexMetaData.Builder indexBuilder(String index) throws IOException {
return IndexMetaData.builder(index)
.settings(settings(Version.CURRENT).put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 10)).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomIntBetween(0, 5)));
}
private class Format extends MetaDataStateFormat<DummyState> {
Format(XContentType format, String prefix) {
super(format, prefix);
}
@Override
public void toXContent(XContentBuilder builder, DummyState state) throws IOException {
state.toXContent(builder, null);
}
@Override
public DummyState fromXContent(XContentParser parser) throws IOException {
return new DummyState().parse(parser);
}
@Override
protected Directory newDirectory(Path dir) throws IOException {
MockDirectoryWrapper mock = new MockDirectoryWrapper(getRandom(), super.newDirectory(dir));
closeAfterSuite(mock);
return mock;
}
}
private static class DummyState implements ToXContent {
String string;
int aInt;
long aLong;
double aDouble;
boolean aBoolean;
@Override
public String toString() {
return "DummyState{" +
"string='" + string + '\'' +
", aInt=" + aInt +
", aLong=" + aLong +
", aDouble=" + aDouble +
", aBoolean=" + aBoolean +
'}';
}
public DummyState(String string, int aInt, long aLong, double aDouble, boolean aBoolean) {
this.string = string;
this.aInt = aInt;
this.aLong = aLong;
this.aDouble = aDouble;
this.aBoolean = aBoolean;
}
public DummyState() {
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("string", string);
builder.field("int", aInt);
builder.field("long", aLong);
builder.field("double", aDouble);
builder.field("boolean", aBoolean);
return builder;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DummyState that = (DummyState) o;
if (aBoolean != that.aBoolean) return false;
if (Double.compare(that.aDouble, aDouble) != 0) return false;
if (aInt != that.aInt) return false;
if (aLong != that.aLong) return false;
return string.equals(that.string);
}
@Override
public int hashCode() {
int result;
long temp;
result = string.hashCode();
result = 31 * result + aInt;
result = 31 * result + (int) (aLong ^ (aLong >>> 32));
temp = Double.doubleToLongBits(aDouble);
result = 31 * result + (int) (temp ^ (temp >>> 32));
result = 31 * result + (aBoolean ? 1 : 0);
return result;
}
public DummyState parse(XContentParser parser) throws IOException {
String fieldName = null;
parser.nextToken(); // start object
while(parser.nextToken() != XContentParser.Token.END_OBJECT) {
XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_STRING) {
assertTrue("string".equals(fieldName));
string = parser.text();
} else if (token == XContentParser.Token.VALUE_NUMBER) {
switch (fieldName) {
case "double":
aDouble = parser.doubleValue();
break;
case "int":
aInt = parser.intValue();
break;
case "long":
aLong = parser.longValue();
break;
default:
fail("unexpected numeric value " + token);
break;
}
}else if (token == XContentParser.Token.VALUE_BOOLEAN) {
assertTrue("boolean".equals(fieldName));
aBoolean = parser.booleanValue();
} else {
fail("unexpected value " + token);
}
}
return this;
}
}
public Path[] content(String glob, Path dir) throws IOException {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir, glob)) {
return StreamSupport.stream(stream.spliterator(), false).toArray(length -> new Path[length]);
}
}
public long addDummyFiles(String prefix, Path... paths) throws IOException {
int realId = -1;
for (Path path : paths) {
if (randomBoolean()) {
Path stateDir = path.resolve(MetaDataStateFormat.STATE_DIR_NAME);
Files.createDirectories(stateDir);
String actualPrefix = prefix;
int id = randomIntBetween(0, 10);
if (randomBoolean()) {
actualPrefix = "dummy-";
} else {
realId = Math.max(realId, id);
}
try (OutputStream stream = Files.newOutputStream(stateDir.resolve(actualPrefix + id + MetaDataStateFormat.STATE_FILE_EXTENSION))) {
stream.write(0);
}
}
}
return realId + 1;
}
}
| |
/*
* Copyright 2021 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.stage.origin.mysql;
import com.github.shyiko.mysql.binlog.BinaryLogClient;
import com.github.shyiko.mysql.binlog.network.SSLMode;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.streamsets.pipeline.api.ConfigIssue;
import com.streamsets.pipeline.api.Stage;
import com.streamsets.pipeline.lib.jdbc.connection.MySQLConnection;
import com.streamsets.pipeline.stage.origin.mysql.filters.Filter;
import com.streamsets.pipeline.stage.origin.mysql.filters.Filters;
import com.streamsets.pipeline.stage.origin.mysql.filters.IgnoreTableFilter;
import com.streamsets.pipeline.stage.origin.mysql.filters.IncludeTableFilter;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import com.zaxxer.hikari.pool.HikariPool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.TimeoutException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class DataSourceInitializer {
private static final Logger LOG = LoggerFactory.getLogger(MysqlSource.class);
private static final Pattern MYSQL_PROTO_PREFIX_PATTERN = Pattern.compile("^jdbc:mysql://");
private static final String FILE_PROTO_PREFIX = "file://";
private static final int MYSQL_DEFAULT_PORT = 3306;
private static final List<String> MYSQL_DRIVERS = ImmutableList.of(
"com.mysql.cj.jdbc.Driver", "com.mysql.jdbc.Driver"
);
public final DataSourceConfig dataSourceConfig;
public final HikariDataSource dataSource;
public final Filter eventFilter;
public final SourceOffsetFactory offsetFactory;
public final List<Stage.ConfigIssue> issues;
public DataSourceInitializer(
final String connectionPrefix,
final MySQLConnection connection,
final String configPrefix,
final MySQLBinLogConfig config,
final ConfigIssueFactory configIssueFactory
) {
List<ConfigIssue> issues = new ArrayList<>();
this.dataSourceConfig = createDataSourceConfig(configPrefix, connection, config, configIssueFactory, issues);
checkConnection(connectionPrefix, config, configIssueFactory, issues);
eventFilter = createEventFilter(configPrefix, config, configIssueFactory, issues);
loadDrivers();
// connect to mysql
dataSource = createDataSource(configIssueFactory, issues);
offsetFactory = createOffsetFactory(configIssueFactory, issues);
this.issues = Collections.unmodifiableList(issues);
}
private DataSourceConfig createDataSourceConfig(
final String configPrefix,
final MySQLConnection connection,
final MySQLBinLogConfig config,
final ConfigIssueFactory configIssueFactory,
final List<ConfigIssue> issues
) {
DataSourceConfig dsc = new DataSourceConfig(null, null, null, 0, false, 0);
URL url = createURL(configPrefix, connection, configIssueFactory, issues);
if (url != null) {
dsc = new DataSourceConfig(
url.getHost(),
connection.username.get(),
connection.password.get(),
url.getPort() == -1 ? MYSQL_DEFAULT_PORT : url.getPort(),
isSSLEnabled(url.getQuery()),
getServerId(config)
);
}
return dsc;
}
private URL createURL(
final String configPrefix,
final MySQLConnection connection,
final ConfigIssueFactory configIssueFactory,
final List<ConfigIssue> issues
) {
URL url = null;
Matcher matcher = MYSQL_PROTO_PREFIX_PATTERN.matcher(connection.connectionString);
if (matcher.find()) {
try {
// We cannot use the connection string as is,
// since jdbc:mysql protocol URL handler is highly impossible to be installed (if exists at all)
// So we will replace jdbc:mysql: with file: (which is available by default)
// to parse the connection string and to extract later URL elements.
url = new URL(matcher.replaceFirst(FILE_PROTO_PREFIX));
} catch (final MalformedURLException ex) {
issues.add(configIssueFactory.create(
MySQLBinLogConnectionGroups.MYSQL.name(), configPrefix + "connectionString", Errors.MYSQL_011, ex.getMessage(), ex
));
}
} else {
issues.add(configIssueFactory.create(
MySQLBinLogConnectionGroups.MYSQL.name(), configPrefix + "connectionString", Errors.MYSQL_011, connection.connectionString
));
}
return url;
}
private boolean isSSLEnabled(final String query) {
String q = Optional.ofNullable(query).orElse("");
q = q.startsWith("?") ? q.substring(1) : q;
return Arrays.stream(q.split("&", -1))
.anyMatch(p -> {
String s = p.toLowerCase();
return s.equals("usessl") || s.equals("requiressl")
|| s.equals("usessl=true") || s.equals("requiressl=true");
});
}
private Integer getServerId(final MySQLBinLogConfig config) {
Integer result = null;
try {
if (config.serverId != null && !config.serverId.isEmpty()) {
result = Integer.valueOf(config.serverId);
}
} catch (final NumberFormatException e) {
throw new NumberFormatException("Server ID must be numeric");
}
return result;
}
private Filter createEventFilter(
final String configPrefix,
final MySQLBinLogConfig config,
final ConfigIssueFactory configIssueFactory,
final List<ConfigIssue> issues
) {
Filter result = null;
// create include/ignore filters
Filter includeFilter = createIncludeFilter(configPrefix, config, configIssueFactory, issues);
if (includeFilter != null) {
Filter ignoreFilter = createIgnoreFilter(configPrefix, config, configIssueFactory, issues);
if (ignoreFilter != null) {
result = includeFilter.and(ignoreFilter);
}
}
return result;
}
private Filter createIgnoreFilter(
final String configPrefix,
final MySQLBinLogConfig config,
final ConfigIssueFactory configIssueFactory,
final List<ConfigIssue> issues
) {
Filter ignoreFilter = null;
try {
Filter filter = Filters.PASS;
if (config.ignoreTables != null && !config.ignoreTables.isEmpty()) {
for (final String table : config.ignoreTables.split(",")) {
if (!table.isEmpty()) {
filter = filter.and(new IgnoreTableFilter(table));
}
}
}
ignoreFilter = filter;
} catch (final IllegalArgumentException ex) {
LOG.error("Error creating ignore tables filter: {}", ex.getMessage(), ex);
issues.add(configIssueFactory.create(
MySQLBinLogConnectionGroups.ADVANCED.name(), configPrefix + "ignoreTables", Errors.MYSQL_007, ex.getMessage(), ex
));
}
return ignoreFilter;
}
private Filter createIncludeFilter(
final String configPrefix,
final MySQLBinLogConfig config,
final ConfigIssueFactory configIssueFactory,
final List<ConfigIssue> issues
) {
Filter includeFilter = null;
try {
// if there are no include filters - pass
Filter filter = Filters.PASS;
if (config.includeTables != null && !config.includeTables.isEmpty()) {
String[] includeTables = config.includeTables.split(",");
if (includeTables.length > 0) {
// ignore all that is not explicitly included
filter = Filters.DISCARD;
for (final String table : includeTables) {
if (!table.isEmpty()) {
filter = filter.or(new IncludeTableFilter(table));
}
}
}
}
includeFilter = filter;
} catch (final IllegalArgumentException ex) {
LOG.error("Error creating include tables filter: {}", ex.getMessage(), ex);
issues.add(configIssueFactory.create(
MySQLBinLogConnectionGroups.ADVANCED.name(), configPrefix + "includeTables", Errors.MYSQL_008, ex.getMessage(), ex
));
}
return includeFilter;
}
private HikariDataSource createDataSource(
final ConfigIssueFactory configIssueFactory,
final List<ConfigIssue> issues
) {
HikariDataSource result = null;
HikariConfig hikariConfig = new HikariConfig();
hikariConfig.setJdbcUrl(String.format("jdbc:mysql://%s:%d", dataSourceConfig.hostname, dataSourceConfig.port));
hikariConfig.setUsername(dataSourceConfig.username);
hikariConfig.setPassword(dataSourceConfig.password);
hikariConfig.setReadOnly(true);
hikariConfig.addDataSourceProperty("useSSL", dataSourceConfig.useSSL);
try {
result = new HikariDataSource(hikariConfig);
} catch (final HikariPool.PoolInitializationException e) {
LOG.error("Error connecting to MySql: {}", e.getMessage(), e);
issues.add(configIssueFactory.create(
MySQLBinLogConnectionGroups.MYSQL.name(), null, Errors.MYSQL_003, e.getMessage(), e
));
}
return result;
}
private SourceOffsetFactory createOffsetFactory(
final ConfigIssueFactory configIssueFactory,
final List<ConfigIssue> issues
) {
SourceOffsetFactory result = null;
if (dataSource != null) {
try {
boolean gtidEnabled = false;
try {
gtidEnabled = "ON".equals(Util.getGlobalVariable(dataSource, "gtid_mode"));
} catch (final SQLException ex) {
throw Throwables.propagate(ex);
}
result = gtidEnabled ? new GtidSourceOffsetFactory() : new BinLogPositionOffsetFactory();
} catch (final HikariPool.PoolInitializationException ex) {
LOG.error("Error connecting to MySql: {}", ex.getMessage(), ex);
issues.add(configIssueFactory.create(
MySQLBinLogConnectionGroups.MYSQL.name(), null, Errors.MYSQL_003, ex.getMessage(), ex
));
}
}
return result;
}
private void checkConnection(
final String connectionPrefix,
final MySQLBinLogConfig config,
final ConfigIssueFactory configIssueFactory,
final List<ConfigIssue> issues
) {
// check if binlog client connection is possible
// we don't reuse this client later on, it is used just to check that client can connect, it
// is immediately closed after connection.
BinaryLogClient client = createBinaryLogClient();
try {
client.setKeepAlive(false);
client.connect(config.connectTimeout);
} catch (final IOException | TimeoutException ex) {
LOG.error("Error connecting to MySql binlog: {}", ex.getMessage(), ex);
issues.add(configIssueFactory.create(
MySQLBinLogConnectionGroups.MYSQL.name(), connectionPrefix + "connectionString", Errors.MYSQL_003, ex.getMessage(), ex
));
} finally {
try {
client.disconnect();
} catch (final IOException e) {
LOG.warn("Error disconnecting from MySql: {}", e.getMessage(), e);
}
}
}
public BinaryLogClient createBinaryLogClient(
) {
BinaryLogClient binLogClient = new BinaryLogClient(
dataSourceConfig.hostname,
dataSourceConfig.port,
dataSourceConfig.username,
dataSourceConfig.password
);
if (dataSourceConfig.useSSL) {
binLogClient.setSSLMode(SSLMode.REQUIRED);
} else {
binLogClient.setSSLMode(SSLMode.DISABLED);
}
binLogClient.setServerId(dataSourceConfig.serverId);
return binLogClient;
}
private void loadDrivers() {
for(final String driverName : MYSQL_DRIVERS) {
try {
LOG.info("Loading driver: {}", driverName);
Class.forName(driverName);
LOG.info("Loaded driver: {}", driverName);
} catch (final ClassNotFoundException e) {
LOG.error("Can't load driver: {}", driverName, e);
}
}
}
public void destroy() {
if (dataSource != null) {
dataSource.close();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.python;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.io.Files;
import com.google.gson.Gson;
import org.apache.commons.exec.CommandLine;
import org.apache.commons.exec.ExecuteException;
import org.apache.commons.exec.environment.EnvironmentUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.zeppelin.interpreter.BaseZeppelinContext;
import org.apache.zeppelin.interpreter.Interpreter;
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.InterpreterException;
import org.apache.zeppelin.interpreter.InterpreterGroup;
import org.apache.zeppelin.interpreter.InterpreterHookRegistry.HookType;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.InterpreterResult.Code;
import org.apache.zeppelin.interpreter.InvalidHookException;
import org.apache.zeppelin.interpreter.remote.RemoteInterpreterUtils;
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
import org.apache.zeppelin.interpreter.util.InterpreterOutputStream;
import org.apache.zeppelin.interpreter.util.ProcessLauncher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import py4j.GatewayServer;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
/**
* Interpreter for Python, it is the first implementation of interpreter for Python, so with less
* features compared to IPythonInterpreter, but requires less prerequisites than
* IPythonInterpreter, only python installation is required.
*/
public class PythonInterpreter extends Interpreter {
private static final Logger LOGGER = LoggerFactory.getLogger(PythonInterpreter.class);
private static final int MAX_TIMEOUT_SEC = 30;
private GatewayServer gatewayServer;
private PythonProcessLauncher pythonProcessLauncher;
private File pythonWorkDir;
protected boolean useBuiltinPy4j = true;
// used to forward output from python process to InterpreterOutput
private InterpreterOutputStream outputStream;
private long pythonPid = -1;
private IPythonInterpreter iPythonInterpreter;
private BaseZeppelinContext zeppelinContext;
// set by PythonCondaInterpreter
private String condaPythonExec;
private boolean usePy4jAuth = false;
public PythonInterpreter(Properties property) {
super(property);
}
@Override
public void open() throws InterpreterException {
// try IPythonInterpreter first
iPythonInterpreter = getIPythonInterpreter();
if (getProperty("zeppelin.python.useIPython", "true").equals("true") &&
StringUtils.isEmpty(
iPythonInterpreter.checkIPythonPrerequisite(getPythonExec()))) {
try {
iPythonInterpreter.open();
LOGGER.info("IPython is available, Use IPythonInterpreter to replace PythonInterpreter");
return;
} catch (Exception e) {
iPythonInterpreter = null;
LOGGER.warn("Fail to open IPythonInterpreter", e);
}
}
// reset iPythonInterpreter to null as it is not available
iPythonInterpreter = null;
LOGGER.info("IPython is not available, use the native PythonInterpreter");
// Add matplotlib display hook
InterpreterGroup intpGroup = getInterpreterGroup();
if (intpGroup != null && intpGroup.getInterpreterHookRegistry() != null) {
try {
// just for unit test I believe (zjffdu)
registerHook(HookType.POST_EXEC_DEV.getName(), "__zeppelin__._displayhook()");
} catch (InvalidHookException e) {
throw new InterpreterException(e);
}
}
try {
this.usePy4jAuth = Boolean.parseBoolean(getProperty("zeppelin.py4j.useAuth", "true"));
createGatewayServerAndStartScript();
} catch (IOException e) {
LOGGER.error("Fail to open PythonInterpreter", e);
throw new InterpreterException("Fail to open PythonInterpreter", e);
}
}
// start gateway sever and start python process
private void createGatewayServerAndStartScript() throws IOException {
// start gateway server in JVM side
int port = RemoteInterpreterUtils.findRandomAvailablePortOnAllLocalInterfaces();
// use the FQDN as the server address instead of 127.0.0.1 so that python process in docker
// container can also connect to this gateway server.
String serverAddress = PythonUtils.getLocalIP(properties);
String secret = PythonUtils.createSecret(256);
this.gatewayServer = PythonUtils.createGatewayServer(this, serverAddress, port, secret,
usePy4jAuth);
gatewayServer.start();
// launch python process to connect to the gateway server in JVM side
createPythonScript();
String pythonExec = getPythonExec();
CommandLine cmd = CommandLine.parse(pythonExec);
if (!pythonExec.endsWith(".py")) {
// PythonDockerInterpreter set pythonExec with script
cmd.addArgument(pythonWorkDir + "/zeppelin_python.py", false);
}
cmd.addArgument(serverAddress, false);
cmd.addArgument(Integer.toString(port), false);
outputStream = new InterpreterOutputStream(LOGGER);
Map<String, String> env = setupPythonEnv();
if (usePy4jAuth) {
env.put("PY4J_GATEWAY_SECRET", secret);
}
LOGGER.info("Launching Python Process Command: " + cmd.getExecutable() +
" " + StringUtils.join(cmd.getArguments(), " "));
pythonProcessLauncher = new PythonProcessLauncher(cmd, env);
pythonProcessLauncher.launch();
pythonProcessLauncher.waitForReady(MAX_TIMEOUT_SEC * 1000);
if (!pythonProcessLauncher.isRunning()) {
if (pythonProcessLauncher.isLaunchTimeout()) {
throw new IOException("Launch python process is time out.\n" +
pythonProcessLauncher.getErrorMessage());
} else {
throw new IOException("Fail to launch python process.\n" +
pythonProcessLauncher.getErrorMessage());
}
}
}
@VisibleForTesting
public PythonProcessLauncher getPythonProcessLauncher() {
return pythonProcessLauncher;
}
private void createPythonScript() throws IOException {
// set java.io.tmpdir to /tmp on MacOS, because docker can not share the /var folder which will
// cause PythonDockerInterpreter fails.
// https://stackoverflow.com/questions/45122459/docker-mounts-denied-the-paths-are-not-shared-
// from-os-x-and-are-not-known
if (System.getProperty("os.name", "").contains("Mac")) {
System.setProperty("java.io.tmpdir", "/tmp");
}
this.pythonWorkDir = Files.createTempDir();
this.pythonWorkDir.deleteOnExit();
LOGGER.info("Create Python working dir: " + pythonWorkDir.getAbsolutePath());
copyResourceToPythonWorkDir("python/zeppelin_python.py", "zeppelin_python.py");
copyResourceToPythonWorkDir("python/zeppelin_context.py", "zeppelin_context.py");
copyResourceToPythonWorkDir("python/backend_zinline.py", "backend_zinline.py");
copyResourceToPythonWorkDir("python/mpl_config.py", "mpl_config.py");
copyResourceToPythonWorkDir("python/py4j-src-0.10.7.zip", "py4j-src-0.10.7.zip");
}
protected boolean useIPython() {
return this.iPythonInterpreter != null;
}
private void copyResourceToPythonWorkDir(String srcResourceName,
String dstFileName) throws IOException {
FileOutputStream out = null;
try {
out = new FileOutputStream(pythonWorkDir.getAbsoluteFile() + "/" + dstFileName);
IOUtils.copy(
getClass().getClassLoader().getResourceAsStream(srcResourceName),
out);
} finally {
if (out != null) {
out.close();
}
}
}
protected Map<String, String> setupPythonEnv() throws IOException {
Map<String, String> env = EnvironmentUtils.getProcEnvironment();
appendToPythonPath(env, pythonWorkDir.getAbsolutePath());
if (useBuiltinPy4j) {
appendToPythonPath(env, pythonWorkDir.getAbsolutePath() + "/py4j-src-0.10.7.zip");
}
LOGGER.info("PYTHONPATH: " + env.get("PYTHONPATH"));
return env;
}
private void appendToPythonPath(Map<String, String> env, String path) {
if (!env.containsKey("PYTHONPATH")) {
env.put("PYTHONPATH", path);
} else {
env.put("PYTHONPATH", env.get("PYTHONPATH") + ":" + path);
}
}
// Run python script
// Choose python in the order of
// condaPythonExec > zeppelin.python
protected String getPythonExec() {
if (condaPythonExec != null) {
return condaPythonExec;
} else {
return getProperty("zeppelin.python", "python");
}
}
public File getPythonWorkDir() {
return pythonWorkDir;
}
@Override
public void close() throws InterpreterException {
if (iPythonInterpreter != null) {
iPythonInterpreter.close();
return;
}
if (pythonProcessLauncher != null) {
if (pythonProcessLauncher.isRunning()) {
LOGGER.info("Kill python process");
pythonProcessLauncher.stop();
}
}
if (gatewayServer != null) {
gatewayServer.shutdown();
}
// reset these 2 monitors otherwise when you restart PythonInterpreter it would fails to execute
// python code as these 2 objects are in incorrect state.
statementSetNotifier = new Integer(0);
statementFinishedNotifier = new Integer(0);
}
private PythonInterpretRequest pythonInterpretRequest = null;
private Integer statementSetNotifier = new Integer(0);
private Integer statementFinishedNotifier = new Integer(0);
private String statementOutput = null;
private boolean statementError = false;
public void setPythonExec(String pythonExec) {
LOGGER.info("Set Python Command : {}", pythonExec);
this.condaPythonExec = pythonExec;
}
/**
* Request send to Python Daemon
*/
public class PythonInterpretRequest {
public String statements;
public boolean isForCompletion;
public boolean isCallHooks;
public PythonInterpretRequest(String statements, boolean isForCompletion) {
this(statements, isForCompletion, true);
}
public PythonInterpretRequest(String statements, boolean isForCompletion, boolean isCallHooks) {
this.statements = statements;
this.isForCompletion = isForCompletion;
this.isCallHooks = isCallHooks;
}
public String statements() {
return statements;
}
public boolean isForCompletion() {
return isForCompletion;
}
public boolean isCallHooks() {
return isCallHooks;
}
}
// called by Python Process
public PythonInterpretRequest getStatements() {
synchronized (statementSetNotifier) {
while (pythonInterpretRequest == null) {
try {
statementSetNotifier.wait(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
PythonInterpretRequest req = pythonInterpretRequest;
pythonInterpretRequest = null;
return req;
}
}
// called by Python Process
public void setStatementsFinished(String out, boolean error) {
synchronized (statementFinishedNotifier) {
LOGGER.debug("Setting python statement output: " + out + ", error: " + error);
statementOutput = out;
statementError = error;
statementFinishedNotifier.notify();
}
}
// called by Python Process
public void onPythonScriptInitialized(long pid) {
pythonPid = pid;
synchronized (pythonProcessLauncher) {
LOGGER.debug("onPythonScriptInitialized is called");
pythonProcessLauncher.initialized();
}
}
// called by Python Process
public void appendOutput(String message) throws IOException {
LOGGER.debug("Output from python process: " + message);
outputStream.getInterpreterOutput().write(message);
}
// used by subclass such as PySparkInterpreter to set JobGroup before executing spark code
protected void preCallPython(InterpreterContext context) {
}
// blocking call. Send python code to python process and get response
protected void callPython(PythonInterpretRequest request) {
synchronized (statementSetNotifier) {
this.pythonInterpretRequest = request;
statementOutput = null;
statementSetNotifier.notify();
}
synchronized (statementFinishedNotifier) {
while (statementOutput == null && pythonProcessLauncher.isRunning()) {
try {
statementFinishedNotifier.wait(1000);
} catch (InterruptedException e) {
// ignore this exception
}
}
}
}
@Override
public InterpreterResult interpret(String st, InterpreterContext context)
throws InterpreterException {
if (iPythonInterpreter != null) {
return iPythonInterpreter.interpret(st, context);
}
outputStream.setInterpreterOutput(context.out);
BaseZeppelinContext z = getZeppelinContext();
z.setInterpreterContext(context);
z.setGui(context.getGui());
z.setNoteGui(context.getNoteGui());
InterpreterContext.set(context);
preCallPython(context);
callPython(new PythonInterpretRequest(st, false));
if (statementError) {
return new InterpreterResult(Code.ERROR, statementOutput);
} else {
try {
context.out.flush();
} catch (IOException e) {
throw new InterpreterException(e);
}
if (pythonProcessLauncher.isRunning()) {
return new InterpreterResult(Code.SUCCESS);
} else {
return new InterpreterResult(Code.ERROR,
"Python process is abnormally exited, please check your code and log.");
}
}
}
public void interrupt() throws IOException, InterpreterException {
if (pythonPid > -1) {
LOGGER.info("Sending SIGINT signal to PID : " + pythonPid);
Runtime.getRuntime().exec("kill -SIGINT " + pythonPid);
} else {
LOGGER.warn("Non UNIX/Linux system, close the interpreter");
close();
}
}
@Override
public void cancel(InterpreterContext context) throws InterpreterException {
if (iPythonInterpreter != null) {
iPythonInterpreter.cancel(context);
return;
}
try {
interrupt();
} catch (IOException e) {
LOGGER.error("Error", e);
}
}
@Override
public FormType getFormType() {
return FormType.NATIVE;
}
@Override
public int getProgress(InterpreterContext context) throws InterpreterException {
if (iPythonInterpreter != null) {
return iPythonInterpreter.getProgress(context);
}
return 0;
}
@Override
public List<InterpreterCompletion> completion(String buf, int cursor,
InterpreterContext interpreterContext)
throws InterpreterException {
if (iPythonInterpreter != null) {
return iPythonInterpreter.completion(buf, cursor, interpreterContext);
}
if (buf.length() < cursor) {
cursor = buf.length();
}
String completionString = getCompletionTargetString(buf, cursor);
String completionCommand = "__zeppelin_completion__.getCompletion('" + completionString + "')";
LOGGER.debug("completionCommand: " + completionCommand);
pythonInterpretRequest = new PythonInterpretRequest(completionCommand, true);
statementOutput = null;
synchronized (statementSetNotifier) {
statementSetNotifier.notify();
}
String[] completionList = null;
synchronized (statementFinishedNotifier) {
long startTime = System.currentTimeMillis();
while (statementOutput == null
&& pythonProcessLauncher.isRunning()) {
try {
if (System.currentTimeMillis() - startTime > MAX_TIMEOUT_SEC * 1000) {
LOGGER.error("Python completion didn't have response for {}sec.", MAX_TIMEOUT_SEC);
break;
}
statementFinishedNotifier.wait(1000);
} catch (InterruptedException e) {
// not working
LOGGER.info("wait drop");
return new LinkedList<>();
}
}
if (statementError) {
return new LinkedList<>();
}
Gson gson = new Gson();
completionList = gson.fromJson(statementOutput, String[].class);
}
//end code for completion
if (completionList == null) {
return new LinkedList<>();
}
List<InterpreterCompletion> results = new LinkedList<>();
for (String name : completionList) {
results.add(new InterpreterCompletion(name, name, StringUtils.EMPTY));
}
return results;
}
private String getCompletionTargetString(String text, int cursor) {
String[] completionSeqCharaters = {" ", "\n", "\t"};
int completionEndPosition = cursor;
int completionStartPosition = cursor;
int indexOfReverseSeqPostion = cursor;
String resultCompletionText = "";
String completionScriptText = "";
try {
completionScriptText = text.substring(0, cursor);
} catch (Exception e) {
LOGGER.error(e.toString());
return null;
}
completionEndPosition = completionScriptText.length();
String tempReverseCompletionText = new StringBuilder(completionScriptText).reverse().toString();
for (String seqCharacter : completionSeqCharaters) {
indexOfReverseSeqPostion = tempReverseCompletionText.indexOf(seqCharacter);
if (indexOfReverseSeqPostion < completionStartPosition && indexOfReverseSeqPostion > 0) {
completionStartPosition = indexOfReverseSeqPostion;
}
}
if (completionStartPosition == completionEndPosition) {
completionStartPosition = 0;
} else {
completionStartPosition = completionEndPosition - completionStartPosition;
}
resultCompletionText = completionScriptText.substring(
completionStartPosition, completionEndPosition);
return resultCompletionText;
}
protected IPythonInterpreter getIPythonInterpreter() throws InterpreterException {
return getInterpreterInTheSameSessionByClassName(IPythonInterpreter.class, false);
}
protected BaseZeppelinContext createZeppelinContext() {
return new PythonZeppelinContext(
getInterpreterGroup().getInterpreterHookRegistry(),
Integer.parseInt(getProperty("zeppelin.python.maxResult", "1000")));
}
public BaseZeppelinContext getZeppelinContext() {
if (zeppelinContext == null) {
zeppelinContext = createZeppelinContext();
}
return zeppelinContext;
}
protected void bootstrapInterpreter(String resourceName) throws IOException {
LOGGER.info("Bootstrap interpreter via " + resourceName);
String bootstrapCode =
IOUtils.toString(getClass().getClassLoader().getResourceAsStream(resourceName));
try {
// Add hook explicitly, otherwise python will fail to execute the statement
InterpreterResult result = interpret(bootstrapCode + "\n" + "__zeppelin__._displayhook()",
InterpreterContext.get());
if (result.code() != Code.SUCCESS) {
throw new IOException("Fail to run bootstrap script: " + resourceName);
}
} catch (InterpreterException e) {
throw new IOException(e);
}
}
// Called by Python Process, used for debugging purpose
public void logPythonOutput(String message) {
LOGGER.debug("Python Process Output: " + message);
}
class PythonProcessLauncher extends ProcessLauncher {
PythonProcessLauncher(CommandLine commandLine, Map<String, String> envs) {
super(commandLine, envs);
}
@Override
public void waitForReady(int timeout) {
long startTime = System.currentTimeMillis();
synchronized (this) {
while (state == State.LAUNCHED) {
LOGGER.info("Waiting for python process initialized");
try {
wait(100);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
if ((System.currentTimeMillis() - startTime) > timeout) {
onTimeout();
break;
}
}
}
}
public void initialized() {
synchronized (this) {
this.state = State.RUNNING;
notify();
}
}
@Override
public void onProcessFailed(ExecuteException e) {
super.onProcessFailed(e);
synchronized (statementFinishedNotifier) {
statementFinishedNotifier.notify();
}
}
@Override
public void onProcessComplete(int exitValue) {
super.onProcessComplete(exitValue);
synchronized (statementFinishedNotifier) {
statementFinishedNotifier.notify();
}
}
}
}
| |
/*L
* Copyright Washington University in St. Louis, SemanticBits, Persistent Systems, Krishagni.
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/metadata-based-query/LICENSE.txt for details.
*/
package edu.wustl.common.querysuite.utils;
import org.apache.commons.lang.builder.HashCodeBuilder;
import edu.common.dynamicextensions.domaininterface.AttributeInterface;
import edu.wustl.common.querysuite.factory.QueryObjectFactory;
import edu.wustl.common.querysuite.queryobject.ArithmeticOperator;
import edu.wustl.common.querysuite.queryobject.DSInterval;
import edu.wustl.common.querysuite.queryobject.IArithmeticOperand;
import edu.wustl.common.querysuite.queryobject.IConnector;
import edu.wustl.common.querysuite.queryobject.IDateLiteral;
import edu.wustl.common.querysuite.queryobject.IDateOffset;
import edu.wustl.common.querysuite.queryobject.IDateOffsetLiteral;
import edu.wustl.common.querysuite.queryobject.IExpressionAttribute;
import edu.wustl.common.querysuite.queryobject.ILiteral;
import edu.wustl.common.querysuite.queryobject.INumericLiteral;
import edu.wustl.common.querysuite.queryobject.ITerm;
import edu.wustl.common.querysuite.queryobject.TermType;
import edu.wustl.common.querysuite.queryobject.TimeInterval;
/**
* Provides string representation and term type of an {@link ITerm}. It
* requires an {@link IAttributeAliasProvider} and a
* {@link PrimitiveOperationProcessor} for obtaining this information. By
* default, it uses a default alias provider that substitutes the attribute name
* for an {@link IExpressionAttribute}. If an SQL string is to be built, then
* {@link DatabaseSQLSettings} have to be provided; based on the database
* settings, an appropriate {@link PrimitiveOperationProcessor} is used.
*
* @author srinath_k
*/
//TODO remove support of converting numeric to date as appropriate
public class TermProcessor {
/**
* Provides an appropriate alias for an {@link IExpressionAttribute} in a
* query. This alias is used to refer to the attribute when an SQL is built
* from the query.
*/
public interface IAttributeAliasProvider {
String getAliasFor(IExpressionAttribute exprAttr);
}
static final IAttributeAliasProvider defaultAliasProvider = new IAttributeAliasProvider() {
public String getAliasFor(IExpressionAttribute exprAttr) {
AttributeInterface attribute = exprAttr.getAttribute();
String entityName = attribute.getEntity().getName();
entityName = entityName.substring(entityName.lastIndexOf(".") + 1);
return entityName + "." + attribute.getName();
}
};
private IAttributeAliasProvider aliasProvider;
private PrimitiveOperationProcessor primitiveOperationProcessor;
/**
* Configures to use the default alias provider and
* {@link PrimitiveOperationProcessor}.
*/
public TermProcessor() {
this.aliasProvider = defaultAliasProvider;
this.primitiveOperationProcessor = new PrimitiveOperationProcessor();
}
/**
* Configures to use the specified alias provider and a
* {@link PrimitiveOperationProcessor} appropriate for the specified
* database settings.
*/
public TermProcessor(IAttributeAliasProvider aliasProvider, DatabaseSQLSettings databaseSQLSettings) {
this.aliasProvider = aliasProvider;
switch (databaseSQLSettings.getDatabaseType()) {
case MySQL :
this.primitiveOperationProcessor = new MySQLPrimitiveOperationProcessor();
break;
case Oracle :
this.primitiveOperationProcessor = new OraclePrimitiveOperationProcessor();
break;
case MsSqlServer :
this.primitiveOperationProcessor = new MsSqlServerPrimitiveOperationProcessor();
break;
default :
throw new RuntimeException("Can't occur.");
}
}
/**
* The result of using {@link TermProcessor} to process an {@link ITerm}.
* It contains the string representation and the {@link TermType} of the
* term.
*
* @author srinath_k
*/
public static class TermString {
private String string;
private TermType termType;
static final TermString INVALID = new TermString("", TermType.Invalid);
TermString(String s, TermType termType) {
if (s == null || termType == null) {
throw new IllegalArgumentException();
}
this.string = s;
this.termType = termType;
}
public String getString() {
return string;
}
public TermType getTermType() {
return termType;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof TermString)) {
return false;
}
TermString o = (TermString) obj;
return string.equals(o.string) && termType == o.termType;
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(string).append(termType).toHashCode();
}
@Override
public String toString() {
return string + "[" + termType + "]";
}
}
static class TermStringOpnd implements IArithmeticOperand {
private static final long serialVersionUID = 7952975305036738122L;
private final String string;
private final TermType termType;
private final TimeInterval<?> timeInterval;
static final TermStringOpnd INVALID_TERM_STRING_OPND = new TermStringOpnd("", TermType.Invalid);
TermStringOpnd(String string, TermType termType) {
this.string = string;
this.termType = termType;
if (TermType.isInterval(termType)) {
timeInterval = TimeInterval.compoundEnum(DSInterval.Day);
} else {
timeInterval = null;
}
}
TermStringOpnd(String string, TimeInterval<?> timeInterval) {
this.string = string;
this.timeInterval = timeInterval;
this.termType = TermType.termType(timeInterval);
}
public String getString() {
return string;
}
public TermType getTermType() {
return termType;
}
public TimeInterval<?> getTimeInterval() {
if (!TermType.isInterval(termType)) {
throw new UnsupportedOperationException();
}
return timeInterval;
}
public void setTermType(TermType termType) {
throw new UnsupportedOperationException();
}
public Long getId() {
throw new UnsupportedOperationException();
}
public void setId(Long id) {
throw new UnsupportedOperationException();
}
}
private static class SubTerm implements IArithmeticOperand {
private static final long serialVersionUID = 7342856030098944697L;
private static final SubTerm INVALID_SUBTERM = new SubTerm(-1, TermStringOpnd.INVALID_TERM_STRING_OPND, -1);
private final int endIdx;
private final TermStringOpnd termStringOpnd;
private final int numRightPs;
private SubTerm(int endIdx, TermStringOpnd termStringOpnd, int numRightPs) {
this.endIdx = endIdx;
this.termStringOpnd = termStringOpnd;
this.numRightPs = numRightPs;
}
private String string() {
return termStringOpnd.getString();
}
private String getOperandString() {
return string().substring(0, string().length() - numRightPs);
}
public TermType getTermType() {
return termStringOpnd.getTermType();
}
public void setTermType(TermType termType) {
throw new UnsupportedOperationException();
}
public Long getId() {
throw new UnsupportedOperationException();
}
public void setId(Long id) {
throw new UnsupportedOperationException();
}
}
@SuppressWarnings("unchecked")
public TermString convertTerm(ITerm term) {
term = replaceDateLiterals(term);
if (term.numberOfOperands() == 0) {
return TermString.INVALID;
}
if (term.numberOfOperands() == 1) {
IArithmeticOperand opnd = term.getOperand(0);
if (opnd.getTermType() == TermType.Invalid) {
return TermString.INVALID;
}
if (opnd.getTermType() == TermType.DSInterval && opnd instanceof IDateOffsetLiteral) {
IDateOffsetLiteral lit = (IDateOffsetLiteral) opnd;
String s = primitiveOperationProcessor.getIntervalString(lit.getOffset(), lit.getTimeInterval());
return new TermString(s, TermType.DSInterval);
}
TermStringOpnd termStrOpnd = convertOperand(opnd);
String s = termStrOpnd.getString();
TermType termType = termStrOpnd.getTermType();
if (opnd.getTermType() == TermType.Date) {
s = primitiveOperationProcessor.dateToTimestamp(s);
termType = TermType.Timestamp;
}
return new TermString(s, termType);
}
SubTerm subTerm = convertSubTerm(term, 0);
String res = subTerm.string();
if (subTerm != SubTerm.INVALID_SUBTERM) {
res = res.substring(1, res.length() - 1);
}
return new TermString(res, subTerm.getTermType());
}
private ITerm replaceDateLiterals(ITerm term) {
ITerm res = QueryObjectFactory.createTerm();
if (term.numberOfOperands() == 0) {
return res;
}
res.addOperand(dateCheckedOperand(term.getOperand(0)));
for (int i = 1; i < term.numberOfOperands(); i++) {
res.addOperand(term.getConnector(i - 1, i), dateCheckedOperand(term.getOperand(i)));
}
return res;
}
private IArithmeticOperand dateCheckedOperand(IArithmeticOperand opnd) {
// TODO support timestamp literal?
IArithmeticOperand res = opnd;
if (opnd instanceof IDateLiteral) {
IDateLiteral literal = (IDateLiteral) opnd;
String dateStr = primitiveOperationProcessor.modifyDateLiteral(literal);
TermStringOpnd newLit = new TermStringOpnd(dateStr, TermType.Date);
res = newLit;
}
return res;
}
private SubTerm convertSubTerm(ITerm term, int startIdx) {
int operatorBeforeTermNesting = term.getConnector(startIdx - 1, startIdx).getNestingNumber();
if (term.nestingNumberOfOperand(startIdx) <= operatorBeforeTermNesting) {
throw new IllegalArgumentException();
}
String res = "";
int numLeftPs = term.nestingNumberOfOperand(startIdx) - operatorBeforeTermNesting;
res += getLeftParentheses(numLeftPs);
res += convertOperand(term.getOperand(startIdx)).getString();
int i = startIdx + 1;
TermType termType = term.getOperand(startIdx).getTermType();
while (true) {
if (i == term.numberOfOperands()) {
break;
}
int currOpndNesting = term.nestingNumberOfOperand(i);
if (currOpndNesting <= operatorBeforeTermNesting) {
break;
}
String leftOpndString = res.substring(numLeftPs);
TermStringOpnd leftOpnd = new TermStringOpnd(leftOpndString, termType);
IConnector<ArithmeticOperator> prevConn = term.getConnector(i - 1, i);
IArithmeticOperand rightOpnd;
int nextI;
int numRightPs;
if (currOpndNesting > prevConn.getNestingNumber()) {
SubTerm subTerm = convertSubTerm(term, i);
rightOpnd = subTerm;
numRightPs = subTerm.numRightPs;
nextI = subTerm.endIdx + 1;
} else {
rightOpnd = term.getOperand(i);
numRightPs = currOpndNesting - term.getConnector(i, i + 1).getNestingNumber();
nextI = i + 1;
}
TermStringOpnd resLit = convertBasicTerm(leftOpnd, prevConn.getOperator(), rightOpnd);
if (resLit == TermStringOpnd.INVALID_TERM_STRING_OPND) {
return SubTerm.INVALID_SUBTERM;
}
res = getLeftParentheses(numLeftPs) + resLit.getString();
termType = resLit.getTermType();
res += getRightParentheses(numRightPs);
numLeftPs -= numRightPs;
i = nextI;
}
TermStringOpnd termStringOpnd = new TermStringOpnd(res, termType);
return new SubTerm(i - 1, termStringOpnd, -numLeftPs);
}
private String getLeftParentheses(int i) {
return getParantheses(i, "(");
}
private String getRightParentheses(int i) {
return getParantheses(i, ")");
}
private String getParantheses(int n, String paranthesis) {
StringBuffer s = new StringBuffer();
for (int i = 0; i < n; i++) {
s.append(paranthesis);
}
return s.toString();
}
private TermStringOpnd numToDateOffset(IArithmeticOperand opnd) {
TermStringOpnd strOpnd = convertOperand(opnd);
String res = primitiveOperationProcessor.getIntervalString(strOpnd.getString(), TimeInterval
.compoundEnum(DSInterval.Day));
return new TermStringOpnd(res, TimeInterval.compoundEnum(DSInterval.Day));
}
private TermStringOpnd convertBasicTerm(IArithmeticOperand leftOpnd, ArithmeticOperator operator,
IArithmeticOperand rightOpnd) {
TermType leftType = leftOpnd.getTermType();
TermType rightType = rightOpnd.getTermType();
TermType termType = TermType.getResultTermType(leftType, rightType, operator);
if (termType == TermType.Invalid) {
return TermStringOpnd.INVALID_TERM_STRING_OPND;
}
TermStringOpnd leftTermStrOpnd;
TermStringOpnd rightTermStrOpnd;
if (isNumericCompatible(leftType) && rightType == TermType.Numeric) {
leftTermStrOpnd = convertOperand(leftOpnd);
rightTermStrOpnd = numToDateOffset(rightOpnd);
} else if (leftType == TermType.Numeric && isNumericCompatible(rightType)) {
leftTermStrOpnd = numToDateOffset(leftOpnd);
rightTermStrOpnd = convertOperand(rightOpnd);
} else {
leftTermStrOpnd = convertOperand(leftOpnd);
rightTermStrOpnd = convertOperand(rightOpnd);
}
// if (leftType == TermType.DSInterval && rightType ==
// TermType.DSInterval) {
// database independent
// return dsIntervalMath(leftTermStrOpnd, rightTermStrOpnd,
// operator);
// }
return new TermStringOpnd(primitiveOperation(leftTermStrOpnd, operator, rightTermStrOpnd), termType);
}
private boolean isNumericCompatible(TermType termType) {
return termType == TermType.Date || termType == TermType.Timestamp || termType == TermType.DSInterval;
}
private String primitiveOperation(TermStringOpnd leftTermStrOpnd, ArithmeticOperator operator,
TermStringOpnd rightTermStrOpnd) {
return primitiveOperationProcessor.getResultString(leftTermStrOpnd, operator, rightTermStrOpnd);
}
private TermStringOpnd convertOperand(IArithmeticOperand operand) {
String termStr;
// date literal won't appear here.
if (operand instanceof INumericLiteral) {
INumericLiteral literal = (INumericLiteral) operand;
termStr = literal.getNumber();
} else if (operand instanceof IDateOffsetLiteral) {
IDateOffsetLiteral literal = (IDateOffsetLiteral) operand;
termStr = literal.getOffset();
} else if (operand instanceof SubTerm) {
SubTerm subTerm = (SubTerm) operand;
termStr = subTerm.getOperandString();
} else if (operand instanceof IExpressionAttribute) {
IExpressionAttribute expressionAttribute = (IExpressionAttribute) operand;
termStr = aliasProvider.getAliasFor(expressionAttribute);
} else if (operand instanceof TermStringOpnd) {
TermStringOpnd termStringOpnd = (TermStringOpnd) operand;
termStr = termStringOpnd.getString();
} else {
throw new RuntimeException("Can't occur.");
}
if (operand instanceof IDateOffset) {
IDateOffset offset = (IDateOffset) operand;
if (operand instanceof ILiteral || operand instanceof IExpressionAttribute)
termStr = primitiveOperationProcessor.getIntervalString(termStr, offset.getTimeInterval());
return new TermStringOpnd(termStr, offset.getTimeInterval());
} else {
return new TermStringOpnd(termStr, operand.getTermType());
}
}
// for testing
final void setPrimitiveOperationProcessor(PrimitiveOperationProcessor primitiveOperationProcessor) {
this.primitiveOperationProcessor = primitiveOperationProcessor;
}
final IAttributeAliasProvider getAliasProvider() {
return aliasProvider;
}
final PrimitiveOperationProcessor getPrimitiveOperationProcessor() {
return primitiveOperationProcessor;
}
// DS INTERVAL MATH
/*
* private static final int[] multipliers = {60, 24, 7};
*
* final TermStringOpnd dsIntervalMath(TermStringOpnd leftOpnd,
* TermStringOpnd rightOpnd, ArithmeticOperator oper) { DSInterval
* smallInterval = (DSInterval) leftOpnd.getTimeInterval(); DSInterval
* bigInterval = (DSInterval) rightOpnd.getTimeInterval(); String smallS =
* leftOpnd.getString(); String bigS = rightOpnd.getString(); int diff =
* smallInterval.compareTo(bigInterval); if (diff > 0) { DSInterval temp =
* smallInterval; smallInterval = bigInterval; bigInterval = temp;
*
* String tempS = smallS; smallS = bigS; bigS = tempS; }
*
* DSInterval[] intervals = DSInterval.values(); int smallIdx =
* Arrays.binarySearch(intervals, smallInterval); int bigIdx =
* Arrays.binarySearch(intervals, bigInterval);
*
* for (int i = bigIdx - 1; i >= smallIdx; i--) { bigS = bigS + "*" +
* multipliers[i]; } String res = smallS + " " + oper.mathString() + " " +
* bigS; return new TermStringOpnd(res, smallInterval); }
*/
}
| |
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.spanner.connection;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import com.google.api.core.SettableApiFuture;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.cloud.spanner.ErrorCode;
import com.google.cloud.spanner.MockSpannerServiceImpl.SimulatedExecutionTime;
import com.google.cloud.spanner.ResultSet;
import com.google.cloud.spanner.SpannerException;
import com.google.cloud.spanner.SpannerExceptionFactory;
import com.google.cloud.spanner.Statement;
import com.google.cloud.spanner.connection.AbstractConnectionImplTest.ConnectionConsumer;
import com.google.cloud.spanner.connection.ITAbstractSpannerTest.ITConnection;
import com.google.common.base.Stopwatch;
import com.google.common.collect.Collections2;
import com.google.longrunning.Operation;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Any;
import com.google.protobuf.Empty;
import com.google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata;
import com.google.spanner.admin.database.v1.UpdateDatabaseDdlRequest;
import com.google.spanner.v1.CommitRequest;
import com.google.spanner.v1.ExecuteSqlRequest;
import io.grpc.Status;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.threeten.bp.Duration;
@RunWith(JUnit4.class)
public class StatementTimeoutTest extends AbstractMockServerTest {
private static final String SLOW_SELECT = "SELECT foo FROM bar";
private static final String INVALID_SELECT = "SELECT FROM bar"; // missing columns / *
private static final String SLOW_DDL = "CREATE TABLE foo";
private static final String FAST_DDL = "CREATE TABLE fast_table";
private static final String SLOW_UPDATE = "UPDATE foo SET col1=1 WHERE id=2";
/** Execution time for statements that have been defined as slow. */
private static final int EXECUTION_TIME_SLOW_STATEMENT = 10_000;
/**
* This timeout should be high enough that it will never be exceeded, even on a slow build
* environment, but still significantly lower than the expected execution time of the slow
* statements.
*/
private static final long TIMEOUT_FOR_FAST_STATEMENTS = 1000L;
/**
* This timeout should be low enough that it will not make the test case unnecessarily slow, but
* still high enough that it would normally not be exceeded for a statement that is executed
* directly.
*/
private static final int TIMEOUT_FOR_SLOW_STATEMENTS = 50;
ITConnection createConnection() {
ConnectionOptions options =
ConnectionOptions.newBuilder()
.setUri(getBaseUrl())
.setConfigurator(
optionsConfigurator ->
optionsConfigurator
.getDatabaseAdminStubSettingsBuilder()
.updateDatabaseDdlOperationSettings()
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(1L))
.setMaxRetryDelay(Duration.ofMillis(1L))
.setRetryDelayMultiplier(1.0)
.setTotalTimeout(Duration.ofMinutes(10L))
.build())))
.build();
return createITConnection(options);
}
@After
public void clearExecutionTimes() {
mockSpanner.removeAllExecutionTimes();
}
@Test
public void testTimeoutExceptionReadOnlyAutocommit() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
connection.setReadOnly(true);
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(SELECT_RANDOM_STATEMENT));
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
}
}
@Test
public void testTimeoutExceptionReadOnlyAutocommitMultipleStatements() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
connection.setReadOnly(true);
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
// assert that multiple statements after each other also time out
for (int i = 0; i < 2; i++) {
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(SELECT_RANDOM_STATEMENT));
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
}
// try to do a new query that is fast.
mockSpanner.removeAllExecutionTimes();
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
try (ResultSet rs = connection.executeQuery(SELECT_RANDOM_STATEMENT)) {
assertNotNull(rs);
}
}
}
@Test
public void testTimeoutExceptionReadOnlyTransactional() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setReadOnly(true);
connection.setAutocommit(false);
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(SELECT_RANDOM_STATEMENT));
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
}
}
@Test
public void testTimeoutExceptionReadOnlyTransactionMultipleStatements() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setReadOnly(true);
connection.setAutocommit(false);
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
// assert that multiple statements after each other also time out
for (int i = 0; i < 2; i++) {
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(SELECT_RANDOM_STATEMENT));
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
}
// do a rollback without any chance of a timeout
connection.clearStatementTimeout();
connection.rollback();
// try to do a new query that is fast.
mockSpanner.removeAllExecutionTimes();
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
try (ResultSet rs = connection.executeQuery(SELECT_RANDOM_STATEMENT)) {
assertNotNull(rs);
}
}
}
@Test
public void testTimeoutExceptionReadWriteAutocommit() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(SELECT_RANDOM_STATEMENT));
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
}
}
@Test
public void testTimeoutExceptionReadWriteAutocommitMultipleStatements() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
// assert that multiple statements after each other also time out
for (int i = 0; i < 2; i++) {
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(SELECT_RANDOM_STATEMENT));
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
}
// try to do a new query that is fast.
mockSpanner.removeAllExecutionTimes();
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
try (ResultSet rs = connection.executeQuery(SELECT_RANDOM_STATEMENT)) {
assertNotNull(rs);
}
}
}
@Test
public void testTimeoutExceptionReadWriteAutocommitSlowUpdate() {
mockSpanner.setExecuteSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
SpannerException e =
assertThrows(SpannerException.class, () -> connection.execute(INSERT_STATEMENT));
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
}
}
@Test
public void testTimeoutExceptionReadWriteAutocommitSlowUpdateMultipleStatements() {
mockSpanner.setExecuteSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
// assert that multiple statements after each other also time out
for (int i = 0; i < 2; i++) {
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.execute(Statement.of(SLOW_UPDATE)));
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
}
// try to do a new update that is fast.
mockSpanner.removeAllExecutionTimes();
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
assertEquals(UPDATE_COUNT, connection.execute(INSERT_STATEMENT).getUpdateCount().longValue());
}
}
@Test
public void testTimeoutExceptionReadWriteAutocommitSlowCommit() {
mockSpanner.setCommitExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
// First verify that the fast update does not timeout when in transactional mode (as it is the
// commit that is slow).
connection.setAutocommit(false);
connection.execute(INSERT_STATEMENT);
connection.rollback();
// Then verify that the update does timeout when executed in autocommit mode, as the commit
// gRPC call will be slow.
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
connection.setAutocommit(true);
SpannerException e =
assertThrows(SpannerException.class, () -> connection.execute(INSERT_STATEMENT));
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
}
}
@Test
public void testTimeoutExceptionReadWriteAutocommitSlowCommitMultipleStatements() {
mockSpanner.setCommitExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
// assert that multiple statements after each other also time out
for (int i = 0; i < 2; i++) {
SpannerException e =
assertThrows(SpannerException.class, () -> connection.execute(INSERT_STATEMENT));
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
}
// try to do a query in autocommit mode. This will use a single-use read-only transaction that
// does not need to commit, i.e. it should succeed.
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
try (ResultSet rs = connection.executeQuery(SELECT_RANDOM_STATEMENT)) {
assertNotNull(rs);
}
}
}
@Test
public void testTimeoutExceptionReadWriteAutocommitPartitioned() {
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
connection.setAutocommitDmlMode(AutocommitDmlMode.PARTITIONED_NON_ATOMIC);
// First verify that the statement will not timeout by default.
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
connection.execute(INSERT_STATEMENT);
// Now slow down the execution and verify that it times out. PDML uses the ExecuteStreamingSql
// RPC.
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
SpannerException e =
assertThrows(SpannerException.class, () -> connection.execute(INSERT_STATEMENT));
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
}
}
@Test
public void testTimeoutExceptionReadWriteTransactional() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(false);
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(SELECT_RANDOM_STATEMENT));
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
}
}
@Test
public void testTimeoutExceptionReadWriteTransactionMultipleStatements() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(false);
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
// Assert that multiple statements after each other will timeout the first time, and then
// throw a SpannerException with code FAILED_PRECONDITION.
for (int i = 0; i < 2; i++) {
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(SELECT_RANDOM_STATEMENT));
if (i == 0) {
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
} else {
assertEquals(ErrorCode.FAILED_PRECONDITION, e.getErrorCode());
}
}
// do a rollback without any chance of a timeout
connection.clearStatementTimeout();
connection.rollback();
// try to do a new query that is fast.
mockSpanner.removeAllExecutionTimes();
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
try (ResultSet rs = connection.executeQuery(SELECT_RANDOM_STATEMENT)) {
assertNotNull(rs);
}
}
}
@Test
public void testTimeoutExceptionReadWriteTransactionalSlowCommit() {
mockSpanner.setCommitExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(false);
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
try (ResultSet rs = connection.executeQuery(SELECT_RANDOM_STATEMENT)) {
assertNotNull(rs);
}
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
SpannerException e = assertThrows(SpannerException.class, () -> connection.commit());
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
}
}
@Test
public void testTimeoutExceptionReadWriteTransactionalSlowRollback() {
mockSpanner.setRollbackExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(false);
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
try (ResultSet rs = connection.executeQuery(SELECT_RANDOM_STATEMENT)) {
assertNotNull(rs);
}
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
// Rollback timeouts are not propagated as exceptions, as all errors during a Rollback RPC are
// ignored by the client library.
connection.rollback();
}
}
private static final class ConnectionReadOnlyAutocommit implements ConnectionConsumer {
@Override
public void accept(Connection t) {
t.setAutocommit(true);
t.setReadOnly(true);
}
}
@Test
public void testInterruptedExceptionReadOnlyAutocommit()
throws InterruptedException, ExecutionException {
testInterruptedException(new ConnectionReadOnlyAutocommit());
}
private static final class ConnectionReadOnlyTransactional implements ConnectionConsumer {
@Override
public void accept(Connection t) {
t.setReadOnly(true);
t.setAutocommit(false);
}
}
@Test
public void testInterruptedExceptionReadOnlyTransactional()
throws InterruptedException, ExecutionException {
testInterruptedException(new ConnectionReadOnlyTransactional());
}
private static final class ConnectionReadWriteAutocommit implements ConnectionConsumer {
@Override
public void accept(Connection t) {
t.setAutocommit(true);
t.setReadOnly(false);
}
}
@Test
public void testInterruptedExceptionReadWriteAutocommit()
throws InterruptedException, ExecutionException {
testInterruptedException(new ConnectionReadWriteAutocommit());
}
private static final class ConnectionReadWriteTransactional implements ConnectionConsumer {
@Override
public void accept(Connection t) {
t.setAutocommit(false);
t.setReadOnly(false);
}
}
@Test
public void testInterruptedExceptionReadWriteTransactional()
throws InterruptedException, ExecutionException {
testInterruptedException(new ConnectionReadWriteTransactional());
}
private void testInterruptedException(final ConnectionConsumer consumer)
throws InterruptedException, ExecutionException {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
CountDownLatch latch = new CountDownLatch(1);
SettableApiFuture<Thread> thread = SettableApiFuture.create();
ExecutorService executor = Executors.newSingleThreadExecutor();
try {
Future<Boolean> future =
executor.submit(
() -> {
try (Connection connection = createConnection()) {
consumer.accept(connection);
connection.setStatementTimeout(10000L, TimeUnit.MILLISECONDS);
thread.set(Thread.currentThread());
latch.countDown();
try (ResultSet rs = connection.executeQuery(SELECT_RANDOM_STATEMENT)) {}
return false;
} catch (SpannerException e) {
return e.getErrorCode() == ErrorCode.CANCELLED;
}
});
latch.await(10L, TimeUnit.SECONDS);
waitForRequestsToContain(ExecuteSqlRequest.class);
thread.get().interrupt();
assertTrue(future.get());
} finally {
executor.shutdownNow();
}
}
@Test
public void testInvalidQueryReadOnlyAutocommit() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofException(Status.INVALID_ARGUMENT.asRuntimeException()));
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
connection.setReadOnly(true);
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(Statement.of(INVALID_SELECT)));
assertEquals(ErrorCode.INVALID_ARGUMENT, e.getErrorCode());
}
}
@Test
public void testInvalidQueryReadOnlyTransactional() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofException(Status.INVALID_ARGUMENT.asRuntimeException()));
try (Connection connection = createConnection()) {
connection.setReadOnly(true);
connection.setAutocommit(false);
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(Statement.of(INVALID_SELECT)));
assertEquals(ErrorCode.INVALID_ARGUMENT, e.getErrorCode());
}
}
@Test
public void testInvalidQueryReadWriteAutocommit() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofException(Status.INVALID_ARGUMENT.asRuntimeException()));
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(Statement.of(INVALID_SELECT)));
assertEquals(ErrorCode.INVALID_ARGUMENT, e.getErrorCode());
}
}
@Test
public void testInvalidQueryReadWriteTransactional() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofException(Status.INVALID_ARGUMENT.asRuntimeException()));
try (Connection connection = createConnection()) {
connection.setAutocommit(false);
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(Statement.of(INVALID_SELECT)));
assertEquals(ErrorCode.INVALID_ARGUMENT, e.getErrorCode());
}
}
static void waitForRequestsToContain(Class<? extends AbstractMessage> request) {
try {
mockSpanner.waitForRequestsToContain(request, EXECUTION_TIME_SLOW_STATEMENT);
} catch (InterruptedException e) {
throw SpannerExceptionFactory.propagateInterrupt(e);
} catch (TimeoutException e) {
throw SpannerExceptionFactory.propagateTimeout(e);
}
}
private void waitForDdlRequestOnServer() {
try {
Stopwatch watch = Stopwatch.createStarted();
while (Collections2.filter(
mockDatabaseAdmin.getRequests(),
input -> input.getClass().equals(UpdateDatabaseDdlRequest.class))
.size()
== 0) {
Thread.sleep(1L);
if (watch.elapsed(TimeUnit.MILLISECONDS) > EXECUTION_TIME_SLOW_STATEMENT) {
throw new TimeoutException("Timeout while waiting for DDL request");
}
}
} catch (InterruptedException e) {
throw SpannerExceptionFactory.propagateInterrupt(e);
} catch (TimeoutException e) {
throw SpannerExceptionFactory.propagateTimeout(e);
}
}
@Test
public void testCancelReadOnlyAutocommit() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
connection.setReadOnly(true);
ExecutorService executor = Executors.newSingleThreadExecutor();
try {
executor.execute(
() -> {
waitForRequestsToContain(ExecuteSqlRequest.class);
connection.cancel();
});
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(SELECT_RANDOM_STATEMENT));
assertEquals(ErrorCode.CANCELLED, e.getErrorCode());
} finally {
executor.shutdownNow();
}
}
}
@Test
public void testCancelReadOnlyAutocommitMultipleStatements() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
connection.setReadOnly(true);
ExecutorService executor = Executors.newSingleThreadExecutor();
try {
executor.execute(
() -> {
waitForRequestsToContain(ExecuteSqlRequest.class);
connection.cancel();
});
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(SELECT_RANDOM_STATEMENT));
assertThat(e.getErrorCode(), is(equalTo(ErrorCode.CANCELLED)));
mockSpanner.removeAllExecutionTimes();
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
try (ResultSet rs = connection.executeQuery(SELECT_RANDOM_STATEMENT)) {
assertNotNull(rs);
}
} finally {
executor.shutdownNow();
}
}
}
@Test
public void testCancelReadOnlyTransactional() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setReadOnly(true);
connection.setAutocommit(false);
ExecutorService executor = Executors.newSingleThreadExecutor();
try {
executor.execute(
() -> {
waitForRequestsToContain(ExecuteSqlRequest.class);
connection.cancel();
});
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(SELECT_RANDOM_STATEMENT));
assertEquals(ErrorCode.CANCELLED, e.getErrorCode());
} finally {
executor.shutdownNow();
}
}
}
@Test
public void testCancelReadOnlyTransactionalMultipleStatements() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setReadOnly(true);
connection.setAutocommit(false);
ExecutorService executor = Executors.newSingleThreadExecutor();
try {
executor.execute(
() -> {
waitForRequestsToContain(ExecuteSqlRequest.class);
connection.cancel();
});
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(Statement.of(SLOW_SELECT)));
assertEquals(ErrorCode.CANCELLED, e.getErrorCode());
// try to do a new query that is fast.
mockSpanner.removeAllExecutionTimes();
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
try (ResultSet rs = connection.executeQuery(SELECT_RANDOM_STATEMENT)) {
assertNotNull(rs);
}
// rollback and do another fast query
connection.rollback();
try (ResultSet rs = connection.executeQuery(SELECT_RANDOM_STATEMENT)) {
assertNotNull(rs);
}
} finally {
executor.shutdownNow();
}
}
}
@Test
public void testCancelReadWriteAutocommit() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
ExecutorService executor = Executors.newSingleThreadExecutor();
try {
executor.execute(
() -> {
waitForRequestsToContain(ExecuteSqlRequest.class);
connection.cancel();
});
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(SELECT_RANDOM_STATEMENT));
assertEquals(ErrorCode.CANCELLED, e.getErrorCode());
} finally {
executor.shutdownNow();
}
}
}
@Test
public void testCancelReadWriteAutocommitMultipleStatements() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
ExecutorService executor = Executors.newSingleThreadExecutor();
try {
executor.execute(
() -> {
waitForRequestsToContain(ExecuteSqlRequest.class);
connection.cancel();
});
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(SELECT_RANDOM_STATEMENT));
assertEquals(ErrorCode.CANCELLED, e.getErrorCode());
// try to do a new query that is fast.
mockSpanner.removeAllExecutionTimes();
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
try (ResultSet rs = connection.executeQuery(SELECT_RANDOM_STATEMENT)) {
assertNotNull(rs);
}
} finally {
executor.shutdownNow();
}
}
}
@Test
public void testCancelReadWriteAutocommitSlowUpdate() {
mockSpanner.setExecuteSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
ExecutorService executor = Executors.newSingleThreadExecutor();
try {
executor.execute(
() -> {
waitForRequestsToContain(ExecuteSqlRequest.class);
connection.cancel();
});
SpannerException e =
assertThrows(SpannerException.class, () -> connection.execute(INSERT_STATEMENT));
assertEquals(ErrorCode.CANCELLED, e.getErrorCode());
} finally {
executor.shutdownNow();
}
}
}
@Test
public void testCancelReadWriteAutocommitSlowCommit() {
mockSpanner.setCommitExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
ExecutorService executor = Executors.newSingleThreadExecutor();
try {
executor.execute(
() -> {
waitForRequestsToContain(CommitRequest.class);
connection.cancel();
});
SpannerException e =
assertThrows(SpannerException.class, () -> connection.execute(INSERT_STATEMENT));
assertEquals(ErrorCode.CANCELLED, e.getErrorCode());
} finally {
executor.shutdownNow();
}
}
}
@Test
public void testCancelReadWriteTransactional() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(false);
ExecutorService executor = Executors.newSingleThreadExecutor();
try {
executor.execute(
() -> {
waitForRequestsToContain(ExecuteSqlRequest.class);
connection.cancel();
});
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(SELECT_RANDOM_STATEMENT));
assertEquals(ErrorCode.CANCELLED, e.getErrorCode());
} finally {
executor.shutdownNow();
}
}
}
@Test
public void testCancelReadWriteTransactionalMultipleStatements() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(false);
ExecutorService executor = Executors.newSingleThreadExecutor();
try {
executor.execute(
() -> {
waitForRequestsToContain(ExecuteSqlRequest.class);
connection.cancel();
});
SpannerException e =
assertThrows(
SpannerException.class, () -> connection.executeQuery(SELECT_RANDOM_STATEMENT));
assertEquals(ErrorCode.CANCELLED, e.getErrorCode());
// Rollback the transaction as it is no longer usable.
connection.rollback();
// Try to do a new query that is fast.
mockSpanner.removeAllExecutionTimes();
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
try (ResultSet rs = connection.executeQuery(SELECT_RANDOM_STATEMENT)) {
assertNotNull(rs);
}
} finally {
executor.shutdownNow();
}
}
}
static void addSlowMockDdlOperation() {
addSlowMockDdlOperations(1);
}
static void addSlowMockDdlOperations(int count) {
addMockDdlOperations(count, false);
}
static void addFastMockDdlOperation() {
addFastMockDdlOperations(1);
}
static void addFastMockDdlOperations(int count) {
addMockDdlOperations(count, true);
}
static void addMockDdlOperations(int count, boolean done) {
for (int i = 0; i < count; i++) {
mockDatabaseAdmin.addResponse(
Operation.newBuilder()
.setMetadata(
Any.pack(
UpdateDatabaseDdlMetadata.newBuilder()
.addStatements(SLOW_DDL)
.setDatabase("projects/proj/instances/inst/databases/db")
.build()))
.setName("projects/proj/instances/inst/databases/db/operations/1")
.setDone(done)
.setResponse(Any.pack(Empty.getDefaultInstance()))
.build());
}
}
@Test
public void testCancelDdlBatch() {
addSlowMockDdlOperation();
try (Connection connection = createConnection()) {
connection.setAutocommit(false);
connection.startBatchDdl();
connection.execute(Statement.of(SLOW_DDL));
ExecutorService executor = Executors.newSingleThreadExecutor();
try {
executor.execute(
() -> {
waitForDdlRequestOnServer();
connection.cancel();
});
SpannerException e = assertThrows(SpannerException.class, () -> connection.runBatch());
assertEquals(ErrorCode.CANCELLED, e.getErrorCode());
} finally {
executor.shutdownNow();
}
}
}
@Test
public void testCancelDdlAutocommit() {
addSlowMockDdlOperation();
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
ExecutorService executor = Executors.newSingleThreadExecutor();
try {
executor.execute(
() -> {
waitForDdlRequestOnServer();
connection.cancel();
});
SpannerException e =
assertThrows(SpannerException.class, () -> connection.execute(Statement.of(SLOW_DDL)));
assertEquals(ErrorCode.CANCELLED, e.getErrorCode());
} finally {
executor.shutdownNow();
}
}
}
@Test
public void testTimeoutExceptionDdlAutocommit() {
addSlowMockDdlOperations(10);
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
SpannerException e =
assertThrows(SpannerException.class, () -> connection.execute(Statement.of(SLOW_DDL)));
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
}
}
@Test
public void testTimeoutExceptionDdlAutocommitMultipleStatements() {
addSlowMockDdlOperations(20);
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
// assert that multiple statements after each other also time out
for (int i = 0; i < 2; i++) {
SpannerException e =
assertThrows(SpannerException.class, () -> connection.execute(Statement.of(SLOW_DDL)));
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
}
// try to do a new DDL statement that is fast.
mockDatabaseAdmin.reset();
addFastMockDdlOperation();
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
assertNotNull(connection.execute(Statement.of(FAST_DDL)));
}
}
@Test
public void testTimeoutExceptionDdlBatch() {
addSlowMockDdlOperations(10);
try (Connection connection = createConnection()) {
connection.setAutocommit(false);
connection.startBatchDdl();
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
// the following statement will NOT timeout as the statement is only buffered locally
connection.execute(Statement.of(SLOW_DDL));
// the runBatch() statement sends the statement to the server and should timeout
SpannerException e = assertThrows(SpannerException.class, () -> connection.runBatch());
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
}
}
@Test
public void testTimeoutExceptionDdlBatchMultipleStatements() {
addSlowMockDdlOperations(20);
try (Connection connection = createConnection()) {
connection.setAutocommit(false);
connection.setStatementTimeout(TIMEOUT_FOR_SLOW_STATEMENTS, TimeUnit.MILLISECONDS);
// assert that multiple statements after each other also time out
for (int i = 0; i < 2; i++) {
connection.startBatchDdl();
connection.execute(Statement.of(SLOW_DDL));
SpannerException e = assertThrows(SpannerException.class, () -> connection.runBatch());
assertEquals(ErrorCode.DEADLINE_EXCEEDED, e.getErrorCode());
}
// try to do a new DDL statement that is fast.
mockDatabaseAdmin.reset();
addFastMockDdlOperation();
connection.setStatementTimeout(TIMEOUT_FOR_FAST_STATEMENTS, TimeUnit.MILLISECONDS);
connection.startBatchDdl();
assertNotNull(connection.execute(Statement.of(FAST_DDL)));
connection.runBatch();
}
}
@Test
public void testTimeoutDifferentTimeUnits() {
mockSpanner.setExecuteStreamingSqlExecutionTime(
SimulatedExecutionTime.ofMinimumAndRandomTime(EXECUTION_TIME_SLOW_STATEMENT, 0));
try (Connection connection = createConnection()) {
connection.setAutocommit(true);
for (TimeUnit unit : ReadOnlyStalenessUtil.SUPPORTED_UNITS) {
// Only set the timeout, don't execute a statement with the timeout to prevent unnecessarily
// slowing down the build time.
connection.setStatementTimeout(1L, unit);
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.kafka;
import org.apache.storm.Config;
import org.apache.storm.utils.Utils;
import com.google.common.base.Preconditions;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.retry.RetryNTimes;
import org.json.simple.JSONValue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.storm.kafka.trident.GlobalPartitionInformation;
import java.io.UnsupportedEncodingException;
import java.net.SocketTimeoutException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class DynamicBrokersReader {
public static final Logger LOG = LoggerFactory.getLogger(DynamicBrokersReader.class);
private CuratorFramework _curator;
private String _zkPath;
private String _topic;
private Boolean _isWildcardTopic;
public DynamicBrokersReader(Map conf, String zkStr, String zkPath, String topic) {
// Check required parameters
Preconditions.checkNotNull(conf, "conf cannot be null");
validateConfig(conf);
Preconditions.checkNotNull(zkStr,"zkString cannot be null");
Preconditions.checkNotNull(zkPath, "zkPath cannot be null");
Preconditions.checkNotNull(topic, "topic cannot be null");
_zkPath = zkPath;
_topic = topic;
_isWildcardTopic = Utils.getBoolean(conf.get("kafka.topic.wildcard.match"), false);
try {
_curator = CuratorFrameworkFactory.newClient(
zkStr,
Utils.getInt(conf.get(Config.STORM_ZOOKEEPER_SESSION_TIMEOUT)),
Utils.getInt(conf.get(Config.STORM_ZOOKEEPER_CONNECTION_TIMEOUT)),
new RetryNTimes(Utils.getInt(conf.get(Config.STORM_ZOOKEEPER_RETRY_TIMES)),
Utils.getInt(conf.get(Config.STORM_ZOOKEEPER_RETRY_INTERVAL))));
_curator.start();
} catch (Exception ex) {
LOG.error("Couldn't connect to zookeeper", ex);
throw new RuntimeException(ex);
}
}
/**
* Get all partitions with their current leaders
*/
public List<GlobalPartitionInformation> getBrokerInfo() throws SocketTimeoutException {
List<String> topics = getTopics();
List<GlobalPartitionInformation> partitions = new ArrayList<GlobalPartitionInformation>();
for (String topic : topics) {
GlobalPartitionInformation globalPartitionInformation = new GlobalPartitionInformation(topic, this._isWildcardTopic);
try {
int numPartitionsForTopic = getNumPartitions(topic);
String brokerInfoPath = brokerPath();
for (int partition = 0; partition < numPartitionsForTopic; partition++) {
int leader = getLeaderFor(topic,partition);
String path = brokerInfoPath + "/" + leader;
try {
byte[] brokerData = _curator.getData().forPath(path);
Broker hp = getBrokerHost(brokerData);
globalPartitionInformation.addPartition(partition, hp);
} catch (org.apache.zookeeper.KeeperException.NoNodeException e) {
LOG.error("Node {} does not exist ", path);
}
}
} catch (SocketTimeoutException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException(e);
}
LOG.info("Read partition info from zookeeper: " + globalPartitionInformation);
partitions.add(globalPartitionInformation);
}
return partitions;
}
private int getNumPartitions(String topic) {
try {
String topicBrokersPath = partitionPath(topic);
List<String> children = _curator.getChildren().forPath(topicBrokersPath);
return children.size();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private List<String> getTopics() {
List<String> topics = new ArrayList<String>();
if (!_isWildcardTopic) {
topics.add(_topic);
return topics;
} else {
try {
List<String> children = _curator.getChildren().forPath(topicsPath());
for (String t : children) {
if (t.matches(_topic)) {
LOG.info(String.format("Found matching topic %s", t));
topics.add(t);
}
}
return topics;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
public String topicsPath () {
return _zkPath + "/topics";
}
public String partitionPath(String topic) {
return topicsPath() + "/" + topic + "/partitions";
}
public String brokerPath() {
return _zkPath + "/ids";
}
/**
* get /brokers/topics/distributedTopic/partitions/1/state
* { "controller_epoch":4, "isr":[ 1, 0 ], "leader":1, "leader_epoch":1, "version":1 }
* @param topic
* @param partition
* @return
*/
private int getLeaderFor(String topic, long partition) {
try {
String topicBrokersPath = partitionPath(topic);
byte[] hostPortData = _curator.getData().forPath(topicBrokersPath + "/" + partition + "/state");
Map<Object, Object> value = (Map<Object, Object>) JSONValue.parse(new String(hostPortData, "UTF-8"));
Integer leader = ((Number) value.get("leader")).intValue();
if (leader == -1) {
throw new RuntimeException("No leader found for partition " + partition);
}
return leader;
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public void close() {
_curator.close();
}
/**
* [zk: localhost:2181(CONNECTED) 56] get /brokers/ids/0
* { "host":"localhost", "jmx_port":9999, "port":9092, "version":1 }
*
* @param contents
* @return
*/
private Broker getBrokerHost(byte[] contents) {
try {
Map<Object, Object> value = (Map<Object, Object>) JSONValue.parse(new String(contents, "UTF-8"));
String host = (String) value.get("host");
Integer port = ((Long) value.get("port")).intValue();
return new Broker(host, port);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
/**
* Validate required parameters in the input configuration Map
* @param conf
*/
private void validateConfig(final Map conf) {
Preconditions.checkNotNull(conf.get(Config.STORM_ZOOKEEPER_SESSION_TIMEOUT),
"%s cannot be null", Config.STORM_ZOOKEEPER_SESSION_TIMEOUT);
Preconditions.checkNotNull(conf.get(Config.STORM_ZOOKEEPER_CONNECTION_TIMEOUT),
"%s cannot be null", Config.STORM_ZOOKEEPER_CONNECTION_TIMEOUT);
Preconditions.checkNotNull(conf.get(Config.STORM_ZOOKEEPER_RETRY_TIMES),
"%s cannot be null", Config.STORM_ZOOKEEPER_RETRY_TIMES);
Preconditions.checkNotNull(conf.get(Config.STORM_ZOOKEEPER_RETRY_INTERVAL),
"%s cannot be null", Config.STORM_ZOOKEEPER_RETRY_INTERVAL);
}
}
| |
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.reteoo;
import org.drools.core.RuleBaseConfiguration;
import org.drools.core.common.InternalFactHandle;
import org.drools.core.common.InternalKnowledgeRuntime;
import org.drools.core.common.InternalWorkingMemory;
import org.drools.core.common.Memory;
import org.drools.core.common.MemoryFactory;
import org.drools.core.common.WorkingMemoryAction;
import org.drools.core.impl.StatefulKnowledgeSessionImpl;
import org.drools.core.marshalling.impl.MarshallerReaderContext;
import org.drools.core.marshalling.impl.MarshallerWriteContext;
import org.drools.core.marshalling.impl.ProtobufMessages;
import org.drools.core.reteoo.builder.BuildContext;
import org.drools.core.spi.PropagationContext;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.List;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.drools.core.util.BitMaskUtil.intersect;
/**
* A node that will add the propagation to the working memory actions queue,
* in order to allow multiple threads to concurrently assert objects to multiple
* entry points.
*/
public class PropagationQueuingNode extends ObjectSource
implements
ObjectSinkNode,
MemoryFactory {
private static final long serialVersionUID = 510l;
// should we make this one configurable?
private static final int PROPAGATION_SLICE_LIMIT = 1000;
private ObjectSinkNode previousObjectSinkNode;
private ObjectSinkNode nextObjectSinkNode;
private PropagateAction action;
public PropagationQueuingNode() {
}
/**
* Construct a <code>PropagationQueuingNode</code> that will queue up
* propagations until it the engine reaches a safe propagation point,
* when all the queued facts are propagated.
*
* @param id Node's ID
* @param objectSource Node's object source
* @param context
*/
public PropagationQueuingNode(final int id,
final ObjectSource objectSource,
final BuildContext context) {
super( id,
context.getPartitionId(),
context.getKnowledgeBase().getConfiguration().isMultithreadEvaluation(),
objectSource,
context.getKnowledgeBase().getConfiguration().getAlphaNodeHashingThreshold() );
this.action = new PropagateAction( this );
initDeclaredMask(context);
}
@Override
public long calculateDeclaredMask(List<String> settableProperties) {
return 0;
}
public void readExternal( ObjectInput in ) throws IOException,
ClassNotFoundException {
super.readExternal( in );
action = (PropagateAction) in.readObject();
}
public void writeExternal( ObjectOutput out ) throws IOException {
super.writeExternal( out );
out.writeObject( action );
}
public short getType() {
return NodeTypeEnums.PropagationQueuingNode;
}
public void updateSink( ObjectSink sink,
PropagationContext context,
InternalWorkingMemory workingMemory ) {
final PropagationQueueingNodeMemory memory = (PropagationQueueingNodeMemory) workingMemory.getNodeMemory( this );
// this is just sanity code. We may remove it in the future, but keeping it for now.
if ( !memory.isEmpty() ) {
throw new RuntimeException( "Error updating sink. Not safe to update sink as the PropagatingQueueingNode memory is not empty at node: " + this.toString() );
}
// as this node is simply a queue, ask object source to update the child sink directly
this.source.updateSink( sink,
context,
workingMemory );
}
public void attach( BuildContext context ) {
this.source.addObjectSink( this );
// this node does not require update, so nothing else to do.
}
public ObjectSinkNode getNextObjectSinkNode() {
return this.nextObjectSinkNode;
}
public ObjectSinkNode getPreviousObjectSinkNode() {
return this.previousObjectSinkNode;
}
public void setNextObjectSinkNode( ObjectSinkNode next ) {
this.nextObjectSinkNode = next;
}
public void setPreviousObjectSinkNode( ObjectSinkNode previous ) {
this.previousObjectSinkNode = previous;
}
public boolean isObjectMemoryEnabled() {
return true;
}
public void assertObject( InternalFactHandle factHandle,
PropagationContext context,
InternalWorkingMemory workingMemory ) {
final PropagationQueueingNodeMemory memory = (PropagationQueueingNodeMemory) workingMemory.getNodeMemory( this );
memory.addAction( new AssertAction( factHandle,
context ) );
// if not queued yet, we need to queue it up
if ( memory.isQueued().compareAndSet( false,
true ) ) {
workingMemory.queueWorkingMemoryAction( this.action );
}
}
public void retractObject( InternalFactHandle handle,
PropagationContext context,
InternalWorkingMemory workingMemory ) {
final PropagationQueueingNodeMemory memory = (PropagationQueueingNodeMemory) workingMemory.getNodeMemory( this );
memory.addAction( new RetractAction( handle,
context ) );
// if not queued yet, we need to queue it up
if ( memory.isQueued().compareAndSet( false,
true ) ) {
workingMemory.queueWorkingMemoryAction( this.action );
}
}
public void modifyObject(InternalFactHandle factHandle,
ModifyPreviousTuples modifyPreviousTuples,
PropagationContext context,
InternalWorkingMemory workingMemory) {
final PropagationQueueingNodeMemory memory = (PropagationQueueingNodeMemory) workingMemory.getNodeMemory( this );
// for ( ObjectSink s : this.sink.getSinks() ) {
// RightTuple rightTuple = modifyPreviousTuples.removeRightTuple( (RightTupleSink) s );
// if ( rightTuple != null ) {
// rightTuple.reAdd();
// // RightTuple previously existed, so continue as modify
// memory.addAction( new ModifyToSinkAction( rightTuple,
// context,
// (RightTupleSink) s ) );
// } else {
// // RightTuple does not exist, so create and continue as assert
// memory.addAction( new AssertToSinkAction( factHandle,
// context,
// s ) );
// }
// }
for ( ObjectSink s : this.sink.getSinks() ) {
BetaNode betaNode = (BetaNode) s;
RightTuple rightTuple = modifyPreviousTuples.peekRightTuple();
while ( rightTuple != null &&
rightTuple.getRightTupleSink().getRightInputOtnId().before( betaNode.getRightInputOtnId() ) ) {
modifyPreviousTuples.removeRightTuple();
// we skipped this node, due to alpha hashing, so retract now
rightTuple.getRightTupleSink().retractRightTuple( rightTuple,
context,
workingMemory );
rightTuple = modifyPreviousTuples.peekRightTuple();
}
if ( rightTuple != null && rightTuple.getRightTupleSink().getRightInputOtnId().equals( betaNode.getRightInputOtnId() ) ) {
modifyPreviousTuples.removeRightTuple();
rightTuple.reAdd();
if ( intersect( context.getModificationMask(), betaNode.getRightInferredMask() ) ) {
// RightTuple previously existed, so continue as modify
memory.addAction( new ModifyToSinkAction( rightTuple,
context,
betaNode ) );
}
} else {
if ( intersect( context.getModificationMask(), betaNode.getRightInferredMask() ) ) {
// RightTuple does not exist for this node, so create and continue as assert
memory.addAction( new AssertToSinkAction( factHandle,
context,
betaNode ) );
}
}
}
// if not queued yet, we need to queue it up
if ( memory.isQueued().compareAndSet( false,
true ) ) {
workingMemory.queueWorkingMemoryAction( this.action );
}
}
public void byPassModifyToBetaNode (final InternalFactHandle factHandle,
final ModifyPreviousTuples modifyPreviousTuples,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
modifyObject( factHandle, modifyPreviousTuples, context, workingMemory );
}
/**
* Propagate all queued actions (asserts and retracts).
* <p/>
* This method implementation is based on optimistic behavior to avoid the
* use of locks. There may eventually be a minimum wasted effort, but overall
* it will be better than paying for the lock's cost.
*
* @param workingMemory
*/
public void propagateActions( InternalWorkingMemory workingMemory ) {
final PropagationQueueingNodeMemory memory = (PropagationQueueingNodeMemory) workingMemory.getNodeMemory( this );
// first we clear up the action queued flag
memory.isQueued().compareAndSet( true,
false );
// we limit the propagation to avoid a hang when this queue is never empty
Action next;
for ( int counter = 0; counter < PROPAGATION_SLICE_LIMIT; counter++ ) {
next = memory.getNextAction();
if ( next != null ) {
next.execute( this.sink,
workingMemory );
} else {
break;
}
}
if ( memory.hasNextAction() && memory.isQueued().compareAndSet( false,
true ) ) {
// add action to the queue again.
workingMemory.queueWorkingMemoryAction( this.action );
}
}
public void setObjectMemoryEnabled( boolean objectMemoryOn ) {
throw new UnsupportedOperationException( "PropagationQueueingNode must have its node memory enabled." );
}
public Memory createMemory(RuleBaseConfiguration config, InternalWorkingMemory wm) {
return new PropagationQueueingNodeMemory();
}
public int hashCode() {
return this.source.hashCode();
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#equals(java.lang.Object)
*/
public boolean equals(final Object object) {
if ( this == object ) {
return true;
}
if ( object == null || !(object instanceof PropagationQueuingNode) ) {
return false;
}
final PropagationQueuingNode other = (PropagationQueuingNode) object;
return this.source.equals( other.source );
}
/**
* Memory implementation for the node
*/
public static class PropagationQueueingNodeMemory
implements
Memory {
private static final long serialVersionUID = 7372028632974484023L;
private ConcurrentLinkedQueue<Action> queue;
// "singleton" action - there is one of this for each node in each working memory
private AtomicBoolean isQueued;
public PropagationQueueingNodeMemory() {
super();
this.queue = new ConcurrentLinkedQueue<Action>();
this.isQueued = new AtomicBoolean( false );
}
public boolean isEmpty() {
return this.queue.isEmpty();
}
public void addAction( Action action ) {
this.queue.add( action );
}
public Action getNextAction() {
return this.queue.poll();
}
public boolean hasNextAction() {
return this.queue.peek() != null;
}
public AtomicBoolean isQueued() {
return isQueued;
}
public long getSize() {
return this.queue.size();
}
public short getNodeType() {
return NodeTypeEnums.PropagationQueueingNode;
}
public Memory getPrevious() {
throw new UnsupportedOperationException();
}
public void setPrevious(Memory previous) {
throw new UnsupportedOperationException();
}
public void setNext(Memory next) {
throw new UnsupportedOperationException();
}
public Memory getNext() {
throw new UnsupportedOperationException();
}
public SegmentMemory getSegmentMemory() {
throw new UnsupportedOperationException();
}
public void setSegmentMemory(SegmentMemory segmentMemory) {
throw new UnsupportedOperationException();
}
public void nullPrevNext() {
throw new UnsupportedOperationException();
}
}
private static abstract class Action
implements
Externalizable {
protected InternalFactHandle handle;
protected PropagationContext context;
public Action(InternalFactHandle handle,
PropagationContext context) {
super();
this.handle = handle;
this.context = context;
}
public void readExternal( ObjectInput in ) throws IOException,
ClassNotFoundException {
handle = (InternalFactHandle) in.readObject();
context = (PropagationContext) in.readObject();
}
public void writeExternal( ObjectOutput out ) throws IOException {
out.writeObject( handle );
out.writeObject( context );
}
public abstract void execute( final ObjectSinkPropagator sink,
final InternalWorkingMemory workingMemory );
}
private static class AssertAction extends Action {
private static final long serialVersionUID = -8478488926430845209L;
public AssertAction(final InternalFactHandle handle,
final PropagationContext context) {
super( handle,
context );
}
public void execute( final ObjectSinkPropagator sink,
final InternalWorkingMemory workingMemory ) {
sink.propagateAssertObject( this.handle,
this.context,
workingMemory );
context.evaluateActionQueue( workingMemory );
}
}
private static class AssertToSinkAction extends Action {
private static final long serialVersionUID = -8478488926430845209L;
private ObjectSink nodeSink;
public AssertToSinkAction(final InternalFactHandle handle,
final PropagationContext context,
final ObjectSink sink) {
super( handle,
context );
nodeSink = sink;
}
public void execute( final ObjectSinkPropagator sink,
final InternalWorkingMemory workingMemory ) {
nodeSink.assertObject( this.handle,
this.context,
workingMemory );
context.evaluateActionQueue( workingMemory );
}
@Override
public void readExternal( ObjectInput in ) throws IOException,
ClassNotFoundException {
super.readExternal( in );
nodeSink = (ObjectSink) in.readObject();
}
@Override
public void writeExternal( ObjectOutput out ) throws IOException {
super.writeExternal( out );
out.writeObject( nodeSink );
}
}
private static class RetractAction extends Action {
private static final long serialVersionUID = -84784886430845209L;
public RetractAction(final InternalFactHandle handle,
final PropagationContext context) {
super( handle,
context );
}
public void execute( final ObjectSinkPropagator sink,
final InternalWorkingMemory workingMemory ) {
for ( RightTuple rightTuple = this.handle.getFirstRightTuple(); rightTuple != null; rightTuple = rightTuple.getHandleNext() ) {
rightTuple.getRightTupleSink().retractRightTuple( rightTuple,
context,
workingMemory );
}
this.handle.clearRightTuples();
for ( LeftTuple leftTuple = this.handle.getLastLeftTuple(); leftTuple != null; leftTuple = leftTuple.getLeftParentNext() ) {
leftTuple.getLeftTupleSink().retractLeftTuple( leftTuple,
context,
workingMemory );
}
this.handle.clearLeftTuples();
context.evaluateActionQueue( workingMemory );
}
}
private static class ModifyToSinkAction extends Action {
private static final long serialVersionUID = -8478488926430845209L;
private RightTupleSink nodeSink;
private RightTuple rightTuple;
public ModifyToSinkAction(final RightTuple rightTuple,
final PropagationContext context,
final RightTupleSink nodeSink) {
super( rightTuple.getFactHandle(),
context );
this.nodeSink = nodeSink;
this.rightTuple = rightTuple;
}
public void execute( final ObjectSinkPropagator sink,
final InternalWorkingMemory workingMemory ) {
nodeSink.modifyRightTuple( rightTuple,
context,
workingMemory );
context.evaluateActionQueue( workingMemory );
}
@Override
public void readExternal( ObjectInput in ) throws IOException,
ClassNotFoundException {
super.readExternal( in );
nodeSink = (RightTupleSink) in.readObject();
rightTuple = (RightTuple) in.readObject();
}
@Override
public void writeExternal( ObjectOutput out ) throws IOException {
super.writeExternal( out );
out.writeObject( nodeSink );
out.writeObject( rightTuple );
}
}
/**
* This is the action that is added to the working memory actions queue, so that
* this node propagation can be triggered at a safe point
*/
public static class PropagateAction
implements
WorkingMemoryAction {
private static final long serialVersionUID = 6765029029501617115L;
private PropagationQueuingNode node;
public PropagateAction() {
}
public PropagateAction(PropagationQueuingNode node) {
this.node = node;
}
public PropagateAction(MarshallerReaderContext context) throws IOException {
this.node = (PropagationQueuingNode) context.sinks.get( context.readInt() );
}
public PropagateAction(MarshallerReaderContext context,
ProtobufMessages.ActionQueue.Action _action) {
this.node = (PropagationQueuingNode) context.sinks.get( _action.getPropagate().getNodeId() );
}
public void write( MarshallerWriteContext context ) throws IOException {
context.writeShort( WorkingMemoryAction.PropagateAction );
context.write( node.getId() );
}
public ProtobufMessages.ActionQueue.Action serialize( MarshallerWriteContext context ) {
return ProtobufMessages.ActionQueue.Action.newBuilder()
.setType( ProtobufMessages.ActionQueue.ActionType.PROPAGATE )
.setPropagate( ProtobufMessages.ActionQueue.Propagate.newBuilder()
.setNodeId( node.getId() )
.build() )
.build();
}
public void readExternal( ObjectInput in ) throws IOException,
ClassNotFoundException {
node = (PropagationQueuingNode) in.readObject();
}
public void writeExternal( ObjectOutput out ) throws IOException {
out.writeObject( node );
}
public void execute( InternalWorkingMemory workingMemory ) {
this.node.propagateActions( workingMemory );
}
public void execute( InternalKnowledgeRuntime kruntime ) {
execute( ((StatefulKnowledgeSessionImpl) kruntime).getInternalWorkingMemory() );
}
}
}
| |
package org.hisp.dhis.startup;
/*
* Copyright (c) 2004-2016, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import org.amplecode.quick.StatementManager;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hisp.dhis.jdbc.StatementBuilder;
import org.hisp.dhis.system.startup.AbstractStartupRoutine;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
/**
* @author Lars Helge Overland
*/
public class InitTableAlteror
extends AbstractStartupRoutine
{
private static final Log log = LogFactory.getLog( InitTableAlteror.class );
@Autowired
private StatementManager statementManager;
@Autowired
private StatementBuilder statementBuilder;
// -------------------------------------------------------------------------
// Execute
// -------------------------------------------------------------------------
@Override
@Transactional
public void execute()
{
executeSql( "update dataelement set domaintype='AGGREGATE' where domaintype='aggregate' or domaintype is null;" );
executeSql( "update dataelement set domaintype='TRACKER' where domaintype='patient';" );
executeSql( "update users set invitation = false where invitation is null" );
executeSql( "update users set selfregistered = false where selfregistered is null" );
executeSql( "update users set externalauth = false where externalauth is null" );
executeSql( "update users set disabled = false where disabled is null" );
executeSql( "alter table dataelement alter column domaintype set not null;" );
executeSql( "alter table programstageinstance alter column status type varchar(25);" );
executeSql( "UPDATE programstageinstance SET status='ACTIVE' WHERE status='0';" );
executeSql( "UPDATE programstageinstance SET status='COMPLETED' WHERE status='1';" );
executeSql( "UPDATE programstageinstance SET status='SKIPPED' WHERE status='5';" );
executeSql( "ALTER TABLE program DROP COLUMN displayonallorgunit" );
upgradeProgramStageDataElements();
updateValueTypes();
updateAggregationTypes();
updateFeatureTypes();
updateValidationRuleEnums();
updateProgramStatus();
removeDeprecatedConfigurationColumns();
updateTimestamps();
updateCompletedBy();
executeSql( "ALTER TABLE program ALTER COLUMN \"type\" TYPE varchar(255);" );
executeSql( "update program set \"type\"='WITH_REGISTRATION' where type='1' or type='2'" );
executeSql( "update program set \"type\"='WITHOUT_REGISTRATION' where type='3'" );
// Update userkeyjsonvalue and keyjsonvalue to set new encrypted column to false.
executeSql( "UPDATE keyjsonvalue SET encrypted = false WHERE encrypted IS NULL" );
executeSql( "UPDATE userkeyjsonvalue SET encrypted = false WHERE encrypted IS NULL" );
}
private void updateCompletedBy()
{
executeSql( "update programinstance set completedby=completeduser where completedby is null" );
executeSql( "update programstageinstance set completedby=completeduser where completedby is null" );
executeSql( "alter table programinstance drop column completeduser" );
executeSql( "alter table programstageinstance drop column completeduser" );
}
// -------------------------------------------------------------------------
// Supportive methods
// -------------------------------------------------------------------------
private void removeDeprecatedConfigurationColumns()
{
try
{
executeSql( "ALTER TABLE configuration DROP COLUMN smptpassword" );
executeSql( "ALTER TABLE configuration DROP COLUMN smtppassword" );
executeSql( "ALTER TABLE configuration DROP COLUMN remoteserverurl" );
executeSql( "ALTER TABLE configuration DROP COLUMN remoteserverusername" );
executeSql( "ALTER TABLE configuration DROP COLUMN remotepassword" );
executeSql( "ALTER TABLE configuration DROP COLUMN remoteserverpassword" );
}
catch ( Exception ex )
{
log.debug( ex );
}
}
private void updateTimestamps()
{
executeSql( "update datavalueaudit set created=timestamp where created is null" );
executeSql( "update datavalueaudit set created=now() where created is null" );
executeSql( "alter table datavalueaudit drop column timestamp" );
executeSql( "update trackedentitydatavalue set created=timestamp where created is null" );
executeSql( "update trackedentitydatavalue set lastupdated=timestamp where lastupdated is null" );
executeSql( "update trackedentityattributevalue set created=now() where created is null" );
executeSql( "update trackedentityattributevalue set lastupdated=now() where lastupdated is null" );
executeSql( "alter table trackedentitydatavalue drop column timestamp" );
}
private void updateProgramStatus()
{
executeSql( "alter table programinstance alter column status type varchar(50)" );
executeSql( "update programinstance set status='ACTIVE' where status='0'" );
executeSql( "update programinstance set status='COMPLETED' where status='1'" );
executeSql( "update programinstance set status='CANCELLED' where status='2'" );
executeSql( "update programinstance set status='ACTIVE' where status is null" );
}
private void updateValidationRuleEnums()
{
executeSql( "alter table validationrule alter column ruletype type varchar(50)" );
executeSql( "alter table validationrule alter column importance type varchar(50)" );
executeSql( "update validationrule set ruletype='VALIDATION' where ruletype='validation'" );
executeSql( "update validationrule set ruletype='SURVEILLANCE' where ruletype='surveillance'" );
executeSql( "update validationrule set ruletype='VALIDATION' where ruletype='' or ruletype is null" );
executeSql( "update validationrule set importance='HIGH' where importance='high'" );
executeSql( "update validationrule set importance='MEDIUM' where importance='medium'" );
executeSql( "update validationrule set importance='LOW' where importance='low'" );
executeSql( "update validationrule set importance='MEDIUM' where importance='' or importance is null" );
}
private void updateFeatureTypes()
{
executeSql( "update organisationunit set featuretype='NONE' where featuretype='None'" );
executeSql( "update organisationunit set featuretype='MULTI_POLYGON' where featuretype='MultiPolygon'" );
executeSql( "update organisationunit set featuretype='POLYGON' where featuretype='Polygon'" );
executeSql( "update organisationunit set featuretype='POINT' where featuretype='Point'" );
executeSql( "update organisationunit set featuretype='SYMBOL' where featuretype='Symbol'" );
executeSql( "update organisationunit set featuretype='NONE' where featuretype is null" );
}
private void updateAggregationTypes()
{
executeSql( "alter table dataelement alter column aggregationtype type varchar(50)" );
executeSql( "update dataelement set aggregationtype='SUM' where aggregationtype='sum'" );
executeSql( "update dataelement set aggregationtype='AVERAGE' where aggregationtype='avg'" );
executeSql( "update dataelement set aggregationtype='AVERAGE_SUM_ORG_UNIT' where aggregationtype='avg_sum_org_unit'" );
executeSql( "update dataelement set aggregationtype='AVERAGE_SUM_ORG_UNIT' where aggregationtype='average'" );
executeSql( "update dataelement set aggregationtype='COUNT' where aggregationtype='count'" );
executeSql( "update dataelement set aggregationtype='STDDEV' where aggregationtype='stddev'" );
executeSql( "update dataelement set aggregationtype='VARIANCE' where aggregationtype='variance'" );
executeSql( "update dataelement set aggregationtype='MIN' where aggregationtype='min'" );
executeSql( "update dataelement set aggregationtype='MAX' where aggregationtype='max'" );
executeSql( "update dataelement set aggregationtype='NONE' where aggregationtype='none'" );
executeSql( "update dataelement set aggregationtype='DEFAULT' where aggregationtype='default'" );
executeSql( "update dataelement set aggregationtype='CUSTOM' where aggregationtype='custom'" );
executeSql( "update dataelement set aggregationtype='SUM' where aggregationtype is null" );
}
private void updateValueTypes()
{
executeSql( "alter table dataelement alter column valuetype type varchar(50)" );
executeSql( "update dataelement set valuetype='NUMBER' where valuetype='int' and numbertype='number'" );
executeSql( "update dataelement set valuetype='INTEGER' where valuetype='int' and numbertype='int'" );
executeSql( "update dataelement set valuetype='INTEGER_POSITIVE' where valuetype='int' and numbertype='posInt'" );
executeSql( "update dataelement set valuetype='INTEGER_POSITIVE' where valuetype='int' and numbertype='positiveNumber'" );
executeSql( "update dataelement set valuetype='INTEGER_NEGATIVE' where valuetype='int' and numbertype='negInt'" );
executeSql( "update dataelement set valuetype='INTEGER_NEGATIVE' where valuetype='int' and numbertype='negativeNumber'" );
executeSql( "update dataelement set valuetype='INTEGER_ZERO_OR_POSITIVE' where valuetype='int' and numbertype='zeroPositiveInt'" );
executeSql( "update dataelement set valuetype='PERCENTAGE' where valuetype='int' and numbertype='percentage'" );
executeSql( "update dataelement set valuetype='UNIT_INTERVAL' where valuetype='int' and numbertype='unitInterval'" );
executeSql( "update dataelement set valuetype='NUMBER' where valuetype='int' and numbertype is null" );
executeSql( "alter table dataelement drop column numbertype" );
executeSql( "update dataelement set valuetype='TEXT' where valuetype='string' and texttype='text'" );
executeSql( "update dataelement set valuetype='LONG_TEXT' where valuetype='string' and texttype='longText'" );
executeSql( "update dataelement set valuetype='TEXT' where valuetype='string' and texttype is null" );
executeSql( "alter table dataelement drop column texttype" );
executeSql( "update dataelement set valuetype='DATE' where valuetype='date'" );
executeSql( "update dataelement set valuetype='DATETIME' where valuetype='datetime'" );
executeSql( "update dataelement set valuetype='BOOLEAN' where valuetype='bool'" );
executeSql( "update dataelement set valuetype='TRUE_ONLY' where valuetype='trueOnly'" );
executeSql( "update dataelement set valuetype='USERNAME' where valuetype='username'" );
executeSql( "update dataelement set valuetype='NUMBER' where valuetype is null" );
executeSql( "update trackedentityattribute set valuetype='TEXT' where valuetype='string'" );
executeSql( "update trackedentityattribute set valuetype='PHONE_NUMBER' where valuetype='phoneNumber'" );
executeSql( "update trackedentityattribute set valuetype='EMAIL' where valuetype='email'" );
executeSql( "update trackedentityattribute set valuetype='NUMBER' where valuetype='number'" );
executeSql( "update trackedentityattribute set valuetype='NUMBER' where valuetype='int'" );
executeSql( "update trackedentityattribute set valuetype='LETTER' where valuetype='letter'" );
executeSql( "update trackedentityattribute set valuetype='BOOLEAN' where valuetype='bool'" );
executeSql( "update trackedentityattribute set valuetype='TRUE_ONLY' where valuetype='trueOnly'" );
executeSql( "update trackedentityattribute set valuetype='DATE' where valuetype='date'" );
executeSql( "update trackedentityattribute set valuetype='TEXT' where valuetype='optionSet'" );
executeSql( "update trackedentityattribute set valuetype='TEXT' where valuetype='OPTION_SET'" );
executeSql( "update trackedentityattribute set valuetype='TRACKER_ASSOCIATE' where valuetype='trackerAssociate'" );
executeSql( "update trackedentityattribute set valuetype='USERNAME' where valuetype='users'" );
executeSql( "update trackedentityattribute set valuetype='TEXT' where valuetype is null" );
executeSql( "update optionset set valuetype='TEXT' where valuetype is null" );
executeSql( "update attribute set valuetype='TEXT' where valuetype='string'" );
executeSql( "update attribute set valuetype='LONG_TEXT' where valuetype='text'" );
executeSql( "update attribute set valuetype='BOOLEAN' where valuetype='bool'" );
executeSql( "update attribute set valuetype='DATE' where valuetype='date'" );
executeSql( "update attribute set valuetype='NUMBER' where valuetype='number'" );
executeSql( "update attribute set valuetype='INTEGER' where valuetype='integer'" );
executeSql( "update attribute set valuetype='INTEGER_POSITIVE' where valuetype='positive_integer'" );
executeSql( "update attribute set valuetype='INTEGER_NEGATIVE' where valuetype='negative_integer'" );
executeSql( "update attribute set valuetype='TEXT' where valuetype='option_set'" );
executeSql( "update attribute set valuetype='TEXT' where valuetype is null" );
}
private void upgradeProgramStageDataElements()
{
if ( tableExists( "programstage_dataelements" ) )
{
String autoIncr = statementBuilder.getAutoIncrementValue();
String insertSql =
"insert into programstagedataelement(programstagedataelementid,programstageid,dataelementid,compulsory,allowprovidedelsewhere," +
"sort_order,displayinreports,programstagesectionid,allowfuturedate,section_sort_order) " + "select " + autoIncr +
",programstageid,dataelementid,compulsory,allowprovidedelsewhere,sort_order,displayinreports,programstagesectionid,allowfuturedate,section_sort_order from programstage_dataelements";
executeSql( insertSql );
String dropSql = "drop table programstage_dataelements";
executeSql( dropSql );
log.info( "Upgraded program stage data elements" );
}
}
private int executeSql( String sql )
{
try
{
return statementManager.getHolder().executeUpdate( sql );
}
catch ( Exception ex )
{
log.debug( ex );
return -1;
}
}
private boolean tableExists( String table )
{
try
{
statementManager.getHolder().queryForInteger( "select 1 from " + table );
return true;
}
catch ( Exception ex )
{
return false;
}
}
}
| |
package com.touwolf.mailchimp.api.campaign;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.touwolf.mailchimp.MailchimpException;
import com.touwolf.mailchimp.data.MailchimpResponse;
import com.touwolf.mailchimp.impl.MailchimpBuilder;
import com.touwolf.mailchimp.impl.MailchimpUtils;
import com.touwolf.mailchimp.model.campaign.*;
import org.apache.commons.lang.StringUtils;
import org.bridje.ioc.Component;
import java.util.List;
/**
* Campaigns are how you send emails to your MailChimp list. Use the Campaigns API calls to manage campaigns in your MailChimp account.
*/
@Component
public class Campaign {
private final Gson GSON = new GsonBuilder().setPrettyPrinting().setDateFormat("yyyy-MM-dd HH:mm:ss").create();
private MailchimpBuilder builder;
public Campaign builder(MailchimpBuilder builder) {
this.builder = builder;
return this;
}
/**
* Create a new campaign
*
* @param request Request body parameters
* @throws MailchimpException
*/
public MailchimpResponse<CampaignResponse> create(CampaignRequest request) throws MailchimpException {
String payload = GSON.toJson(request);
return builder.post("/campaigns", payload, CampaignResponse.class);
}
/**
* Get all campaigns
*
* @param request Query string parameters
* @throws MailchimpException
*/
public MailchimpResponse<CampaignReadResponse> read(CampaignReadRequest request) throws MailchimpException {
String url = "/campaigns";
url = MailchimpUtils.formatQueryString(url, "fields", request.getFields());
url = MailchimpUtils.formatQueryString(url, "exclude_fields", request.getExcludeFields());
url = MailchimpUtils.formatQueryString(url, "count", request.getCount());
url = MailchimpUtils.formatQueryString(url, "offset", request.getOffset());
url = MailchimpUtils.formatQueryString(url, "type", request.getType());
url = MailchimpUtils.formatQueryString(url, "status", request.getStatus());
url = MailchimpUtils.formatQueryString(url, "before_send_time", request.getBeforeSendTime());
url = MailchimpUtils.formatQueryString(url, "since_send_time", request.getSinceSendTime());
url = MailchimpUtils.formatQueryString(url, "before_create_time", request.getBeforeCreateTime());
url = MailchimpUtils.formatQueryString(url, "since_create_time", request.getSinceCreateTime());
url = MailchimpUtils.formatQueryString(url, "list_id", request.getListId());
return builder.get(url, CampaignReadResponse.class);
}
/**
* Get information about a specific campaign
*
* @param campaignId The unique id for the campaign.
* @param request Query string parameters
* @throws MailchimpException
*/
public MailchimpResponse<CampaignResponse> read(String campaignId, CampaignReadRequest request) throws MailchimpException {
if (StringUtils.isBlank(campaignId)) {
throw new MailchimpException("The field campaign_id is required");
}
String url = "/campaigns/" + campaignId;
url = MailchimpUtils.formatQueryString(url, "fields", request.getFields());
url = MailchimpUtils.formatQueryString(url, "exclude_fields", request.getExcludeFields());
return builder.get(url, CampaignResponse.class);
}
/**
* Update the settings for a campaign
*
* @param campaignId The unique id for the campaign.
* @param request Request body parameters
* @throws MailchimpException
*/
public MailchimpResponse<CampaignResponse> edit(String campaignId, CampaignRequest request) throws MailchimpException {
if (StringUtils.isBlank(campaignId)) {
throw new MailchimpException("The field campaign_id is required");
}
String url = "/campaigns/" + campaignId;
String payload = GSON.toJson(request);
return builder.patch(url, payload, CampaignResponse.class);
}
/**
* Delete a campaign
*
* @param campaignId The unique id for the campaign.
* @throws MailchimpException
*/
public MailchimpResponse<Void> delete(String campaignId) throws MailchimpException {
if (StringUtils.isBlank(campaignId)) {
throw new MailchimpException("The field campaign_id is required");
}
String url = "/campaigns/" + campaignId;
return builder.delete(url, Void.class);
}
/**
* Cancel a campaign
*
* @param campaignId The unique id for the campaign.
* @throws MailchimpException
*/
public MailchimpResponse<Void> cancelSend(String campaignId) throws MailchimpException {
if (StringUtils.isBlank(campaignId)) {
throw new MailchimpException("The field campaign_id is required");
}
String url = "/campaigns/" + campaignId + "/actions/cancel-send";
return builder.post(url, Void.class);
}
/**
* Pause an RSS-Driven campaign
*
* @param campaignId The unique id for the campaign.
* @throws MailchimpException
*/
public MailchimpResponse<Void> pause(String campaignId) throws MailchimpException {
if (StringUtils.isBlank(campaignId)) {
throw new MailchimpException("The field campaign_id is required");
}
String url = "/campaigns/" + campaignId + "/actions/pause";
return builder.post(url, Void.class);
}
/**
* Replicate a campaign
*
* @param campaignId The unique id for the campaign.
* @throws MailchimpException
*/
public MailchimpResponse<CampaignResponse> replicate(String campaignId) throws MailchimpException {
if (StringUtils.isBlank(campaignId)) {
throw new MailchimpException("The field campaign_id is required");
}
String url = "/campaigns/" + campaignId + "/actions/replicate";
return builder.post(url, CampaignResponse.class);
}
/**
* Resume an RSS-Driven campaign
*
* @param campaignId The unique id for the campaign.
* @throws MailchimpException
*/
public MailchimpResponse<Void> resume(String campaignId) throws MailchimpException {
if (StringUtils.isBlank(campaignId)) {
throw new MailchimpException("The field campaign_id is required");
}
String url = "/campaigns/" + campaignId + "/actions/resume";
return builder.post(url, Void.class);
}
/**
* Schedule a campaign
*
* @param campaignId The unique id for the campaign.
* @param request Request body parameters
* @throws MailchimpException
*/
public MailchimpResponse<Void> schedule(String campaignId, CampaignScheduleRequest request) throws MailchimpException {
if (StringUtils.isBlank(campaignId)) {
throw new MailchimpException("The field campaign_id is required");
}
String url = "/campaigns/" + campaignId + "/actions/schedule";
String payload = GSON.toJson(request);
return builder.post(url, payload, Void.class);
}
/**
* Send a campaign
*
* @param campaignId The unique id for the campaign.
* @throws MailchimpException
*/
public MailchimpResponse<Void> send(String campaignId) throws MailchimpException {
if (StringUtils.isBlank(campaignId)) {
throw new MailchimpException("The field campaign_id is required");
}
String url = "/campaigns/" + campaignId + "/actions/send";
return builder.post(url, Void.class);
}
/**
* Send a test email
*
* @param campaignId The unique id for the campaign.
* @param testEmails An array of email addresses to send the test email to.
* @param sendType Choose the type of test email to send. Possible Values: html, plain_text
* @throws MailchimpException
*/
public MailchimpResponse<Void> test(String campaignId, List<String> testEmails, CampaignSendTypeEnum sendType) throws MailchimpException {
if (StringUtils.isBlank(campaignId)) {
throw new MailchimpException("The field campaign_id is required");
}
String url = "/campaigns/" + campaignId + "/actions/test";
String emails = GSON.toJson(testEmails.toArray());
String payload = "{test_emails: \"" + emails + "\", send_type: \"" + sendType.name() + "\"}";
return builder.post(url, payload, Void.class);
}
/**
* Unschedule a campaign
*
* @param campaignId The unique id for the campaign.
* @throws MailchimpException
*/
public MailchimpResponse<Void> unschedule(String campaignId) throws MailchimpException {
if (StringUtils.isBlank(campaignId)) {
throw new MailchimpException("The field campaign_id is required");
}
String url = "/campaigns/" + campaignId + "/actions/unschedule";
return builder.post(url, Void.class);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.pdmodel.font;
import static org.apache.pdfbox.pdmodel.font.UniUtil.getUniNameOfCodePoint;
import java.awt.geom.GeneralPath;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import org.apache.fontbox.FontBoxFont;
import org.apache.fontbox.afm.AFMParser;
import org.apache.fontbox.afm.FontMetrics;
import org.apache.pdfbox.pdmodel.font.encoding.GlyphList;
import org.apache.pdfbox.pdmodel.font.encoding.SymbolEncoding;
/**
* The "Standard 14" PDF fonts, also known as the "base 14" fonts.
* There are 14 font files, but Acrobat uses additional names for compatibility, e.g. Arial.
*
* @author John Hewson
*/
public final class Standard14Fonts
{
/**
* Contains all base names and alias names for the known fonts.
* For base fonts both the key and the value will be the base name.
* For aliases, the key is an alias, and the value is a FontName.
* We want a single lookup in the map to find the font both by a base name or an alias.
*/
private static final Map<String, FontName> ALIASES = new HashMap<>(38);
/**
* Contains the font metrics for the standard 14 fonts.
* The key is the font name, value is a FontMetrics instance.
* Metrics are loaded into this map on demand, only if needed.
*
* @see #getAFM
*/
private static final Map<FontName, FontMetrics> FONTS = new EnumMap<>(FontName.class);
/**
* Contains the mapped fonts for the standard 14 fonts.
* The key is the font name, value is a FontBoxFont instance.
* FontBoxFont are loaded into this map on demand, only if needed.
*/
private static final Map<FontName, FontBoxFont> GENERIC_FONTS = new EnumMap<>(FontName.class);
static
{
// the 14 standard fonts
mapName(FontName.COURIER);
mapName(FontName.COURIER_BOLD);
mapName(FontName.COURIER_BOLD_OBLIQUE);
mapName(FontName.COURIER_OBLIQUE);
mapName(FontName.HELVETICA);
mapName(FontName.HELVETICA_BOLD);
mapName(FontName.HELVETICA_BOLD_OBLIQUE);
mapName(FontName.HELVETICA_OBLIQUE);
mapName(FontName.TIMES_ROMAN);
mapName(FontName.TIMES_BOLD);
mapName(FontName.TIMES_BOLD_ITALIC);
mapName(FontName.TIMES_ITALIC);
mapName(FontName.SYMBOL);
mapName(FontName.ZAPF_DINGBATS);
// alternative names from Adobe Supplement to the ISO 32000
mapName("CourierCourierNew", FontName.COURIER);
mapName("CourierNew", FontName.COURIER);
mapName("CourierNew,Italic", FontName.COURIER_OBLIQUE);
mapName("CourierNew,Bold", FontName.COURIER_BOLD);
mapName("CourierNew,BoldItalic", FontName.COURIER_BOLD_OBLIQUE);
mapName("Arial", FontName.HELVETICA);
mapName("Arial,Italic", FontName.HELVETICA_OBLIQUE);
mapName("Arial,Bold", FontName.HELVETICA_BOLD);
mapName("Arial,BoldItalic", FontName.HELVETICA_BOLD_OBLIQUE);
mapName("TimesNewRoman", FontName.TIMES_ROMAN);
mapName("TimesNewRoman,Italic", FontName.TIMES_ITALIC);
mapName("TimesNewRoman,Bold", FontName.TIMES_BOLD);
mapName("TimesNewRoman,BoldItalic", FontName.TIMES_BOLD_ITALIC);
// Acrobat treats these fonts as "standard 14" too (at least Acrobat preflight says so)
mapName("Symbol,Italic", FontName.SYMBOL);
mapName("Symbol,Bold", FontName.SYMBOL);
mapName("Symbol,BoldItalic", FontName.SYMBOL);
mapName("Times", FontName.TIMES_ROMAN);
mapName("Times,Italic", FontName.TIMES_ITALIC);
mapName("Times,Bold", FontName.TIMES_BOLD);
mapName("Times,BoldItalic", FontName.TIMES_BOLD_ITALIC);
// PDFBOX-3457: PDF.js file bug864847.pdf
mapName("ArialMT", FontName.HELVETICA);
mapName("Arial-ItalicMT", FontName.HELVETICA_OBLIQUE);
mapName("Arial-BoldMT", FontName.HELVETICA_BOLD);
mapName("Arial-BoldItalicMT", FontName.HELVETICA_BOLD_OBLIQUE);
}
private Standard14Fonts()
{
}
/**
* Loads the metrics for the base font specified by name. Metric file must exist in the pdfbox jar under
* /org/apache/pdfbox/resources/afm/
*
* @param fontName one of the standard 14 font names for which to load the metrics.
* @throws IOException if no metrics exist for that font.
*/
private static void loadMetrics(FontName fontName) throws IOException
{
String resourceName = "/org/apache/pdfbox/resources/afm/" + fontName.getName() + ".afm";
InputStream resourceAsStream = PDType1Font.class.getResourceAsStream(resourceName);
if (resourceAsStream == null)
{
throw new IOException("resource '" + resourceName + "' not found");
}
try (InputStream afmStream = new BufferedInputStream(resourceAsStream))
{
AFMParser parser = new AFMParser(afmStream);
FontMetrics metric = parser.parse(true);
FONTS.put(fontName, metric);
}
}
/**
* Adds a standard font name to the map of known aliases, to simplify the logic of finding
* font metrics by name. We want a single lookup in the map to find the font both by a base name or
* an alias.
*
* @see #getAFM
* @param baseName the font name of the Standard 14 font
*/
private static void mapName(FontName baseName)
{
ALIASES.put(baseName.getName(), baseName);
}
/**
* Adds an alias name for a standard font to the map of known aliases to the map of aliases (alias as key, standard
* name as value). We want a single lookup in tbaseNamehe map to find the font both by a base name or an alias.
*
* @param alias an alias for the font
* @param baseName the font name of the Standard 14 font
*/
private static void mapName(String alias, FontName baseName)
{
ALIASES.put(alias, baseName);
}
/**
* Returns the metrics for font specified by fontName. Loads the font metrics if not already
* loaded.
*
* @param fontName name of font; either a base name or alias
* @return the font metrics or null if the name is not one of the known names
* @throws IllegalArgumentException if no metrics exist for that font.
*/
public static FontMetrics getAFM(String fontName)
{
FontName baseName = ALIASES.get(fontName);
if (baseName == null)
{
return null;
}
if (FONTS.get(baseName) == null)
{
synchronized (FONTS)
{
if (FONTS.get(baseName) == null)
{
try
{
loadMetrics(baseName);
}
catch (IOException e)
{
throw new IllegalArgumentException(e);
}
}
}
}
return FONTS.get(baseName);
}
/**
* Returns true if the given font name is one of the known names, including alias.
*
* @param fontName the name of font, either a base name or alias
* @return true if the name is one of the known names
*/
public static boolean containsName(String fontName)
{
return ALIASES.containsKey(fontName);
}
/**
* Returns the set of known font names, including aliases.
*/
public static Set<String> getNames()
{
return Collections.unmodifiableSet(ALIASES.keySet());
}
/**
* Returns the base name of the font which the given font name maps to.
*
* @param fontName name of font, either a base name or an alias
* @return the base name or null if this is not one of the known names
*/
public static FontName getMappedFontName(String fontName)
{
return ALIASES.get(fontName);
}
/**
* Returns the mapped font for the specified Standard 14 font. The mapped font is cached.
*
* @param baseName name of the standard 14 font
* @return the mapped font
*/
private static FontBoxFont getMappedFont(FontName baseName)
{
if (!GENERIC_FONTS.containsKey(baseName))
{
synchronized (GENERIC_FONTS)
{
if (!GENERIC_FONTS.containsKey(baseName))
{
PDType1Font type1Font = new PDType1Font(baseName);
GENERIC_FONTS.put(baseName, type1Font.getFontBoxFont());
}
}
}
return GENERIC_FONTS.get(baseName);
}
/**
* Returns the path for the character with the given name for the specified Standard 14 font. The mapped font is
* cached. The path may differ in different environments as it depends on the mapped font.
*
* @param baseName name of the standard 14 font
* @param glyphName name of glyph
* @return the mapped font
*/
public static GeneralPath getGlyphPath(FontName baseName, String glyphName) throws IOException
{
// copied and adapted from PDType1Font.getNameInFont(String)
if (!glyphName.equals(".notdef"))
{
FontBoxFont mappedFont = getMappedFont(baseName);
if (mappedFont != null)
{
if (mappedFont.hasGlyph(glyphName))
{
return mappedFont.getPath(glyphName);
}
String unicodes = getGlyphList(baseName).toUnicode(glyphName);
if (unicodes != null && unicodes.length() == 1)
{
String uniName = getUniNameOfCodePoint(unicodes.codePointAt(0));
if (mappedFont.hasGlyph(uniName))
{
return mappedFont.getPath(uniName);
}
}
if ("SymbolMT".equals(mappedFont.getName()))
{
Integer code = SymbolEncoding.INSTANCE.getNameToCodeMap().get(glyphName);
if (code != null)
{
String uniName = getUniNameOfCodePoint(code + 0xF000);
if (mappedFont.hasGlyph(uniName))
{
return mappedFont.getPath(uniName);
}
}
}
}
}
return new GeneralPath();
}
private static GlyphList getGlyphList(FontName baseName)
{
return FontName.ZAPF_DINGBATS == baseName ? GlyphList.getZapfDingbats()
: GlyphList.getAdobeGlyphList();
}
/**
* Enum for the names of the 14 standard fonts.
*/
public enum FontName
{
TIMES_ROMAN("Times-Roman"), //
TIMES_BOLD("Times-Bold"), //
TIMES_ITALIC("Times-Italic"), //
TIMES_BOLD_ITALIC("Times-BoldItalic"), //
HELVETICA("Helvetica"), //
HELVETICA_BOLD("Helvetica-Bold"), //
HELVETICA_OBLIQUE("Helvetica-Oblique"), //
HELVETICA_BOLD_OBLIQUE("Helvetica-BoldOblique"), //
COURIER("Courier"), //
COURIER_BOLD("Courier-Bold"), //
COURIER_OBLIQUE("Courier-Oblique"), //
COURIER_BOLD_OBLIQUE("Courier-BoldOblique"), //
SYMBOL("Symbol"), //
ZAPF_DINGBATS("ZapfDingbats");
private final String name;
private FontName(String name)
{
this.name = name;
}
public String getName()
{
return name;
}
@Override
public String toString()
{
return name;
}
}
}
| |
/*
* Copyright 2011-2021 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.index.query;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Lists.newArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
import com.b2international.index.ScriptExpression;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
/**
* @since 4.7
*/
public abstract class SortBy {
public static enum Order {
ASC,
DESC;
}
/**
* Special field name for sorting based on the document score (relevance).
*/
public static final String FIELD_SCORE = "_score";
/**
* Special field name for sorting by the default sort field.
*/
public static final String FIELD_DEFAULT = "_default";
/**
* Singleton representing document sort based on their default sort field (usually the ID, but in case of scroll we can use _doc to speed things up) in ascending order.
*/
public static final SortByField DEFAULT = SortBy.field(FIELD_DEFAULT, Order.ASC);
/**
* Singleton representing document sort based on their score in decreasing order (higher score first).
*/
public static final SortBy SCORE = SortBy.field(FIELD_SCORE, Order.DESC);
/**
* @since 5.0
*/
public static final class SortByField extends SortBy {
private final String field;
private final Order order;
private SortByField(String field, Order order) {
this.field = checkNotNull(field, "field");
this.order = checkNotNull(order, "order");
}
public String getField() {
return field;
}
public Order getOrder() {
return order;
}
@Override
public int hashCode() {
return Objects.hash(field, order);
}
@Override
public boolean equals(Object obj) {
if (this == obj) { return true; }
if (obj == null) { return false; }
if (getClass() != obj.getClass()) { return false; }
SortByField other = (SortByField) obj;
if (!Objects.equals(field, other.field)) { return false; }
if (order != other.order) { return false; }
return true;
}
@Override
public String toString() {
return field + " " + order;
}
}
/**
* @since 6.3
*/
public static final class SortByScript extends SortBy implements ScriptExpression {
private final Order order;
private final String name;
private final Map<String, Object> params;
private final ScriptSortType sortType;
private SortByScript(String name, Map<String, Object> params, Order order, ScriptSortType sortType) {
this.name = name;
this.params = params;
this.order = order;
this.sortType = sortType;
}
public Order getOrder() {
return order;
}
@Override
public String getScript() {
return name;
}
@Override
public Map<String, Object> getParams() {
return params;
}
public ScriptSortType getSortType() {
return sortType;
}
@Override
public int hashCode() {
return Objects.hash(name, params, order);
}
@Override
public boolean equals(Object obj) {
if (this == obj) { return true; }
if (obj == null) { return false; }
if (getClass() != obj.getClass()) { return false; }
SortByScript other = (SortByScript) obj;
return Objects.equals(name, other.name)
&& Objects.equals(params, other.params)
&& Objects.equals(order, other.order);
}
@Override
public String toString() {
return name + " " + params + " " + order;
}
}
/**
* @since 5.0
*/
public static final class MultiSortBy extends SortBy {
private final List<SortBy> items;
private MultiSortBy(List<SortBy> items) {
this.items = ImmutableList.copyOf(checkNotNull(items, "items"));
}
public List<SortBy> getItems() {
return items;
}
@Override
public int hashCode() {
return 31 + items.hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) { return true; }
if (obj == null) { return false; }
if (getClass() != obj.getClass()) { return false; }
MultiSortBy other = (MultiSortBy) obj;
return items.equals(other.items);
}
@Override
public String toString() {
return Joiner.on(", ").join(items);
}
}
public static final class Builder {
private final List<SortBy> sorts = newArrayList();
public Builder sortByField(String field, Order order) {
sorts.add(field(field, order));
return this;
}
public Builder sortByScript(String script, Map<String, Object> arguments, Order order) {
sorts.add(script(script, arguments, order));
return this;
}
public Builder sortByScriptNumeric(String script, Map<String, Object> arguments, Order order) {
sorts.add(scriptNumeric(script, arguments, order));
return this;
}
public SortBy build() {
if (sorts.isEmpty()) {
return DEFAULT;
} else if (sorts.size() == 1) {
return Iterables.getOnlyElement(sorts);
} else {
return new MultiSortBy(sorts);
}
}
}
/**
* Creates and returns a new {@link SortBy} instance that sorts matches by the given field in the given order.
* @param field - the field to use for sort
* @param order - the order to use when sorting matches
* @return
*/
public static SortByField field(String field, Order order) {
return new SortByField(field, order);
}
/**
* @param script
* @param arguments
* @param order
* @return
*/
public static SortBy script(String script, Map<String, Object> arguments, Order order) {
return new SortByScript(script, arguments, order, ScriptSortType.STRING);
}
public static SortBy scriptNumeric(String script, Map<String, Object> arguments, Order order) {
return new SortByScript(script, arguments, order, ScriptSortType.NUMBER);
}
public static Builder builder() {
return new Builder();
}
}
| |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.gwt.shared;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import org.unitime.timetable.gwt.resources.StudentSectioningMessages;
import org.unitime.timetable.gwt.shared.ClassAssignmentInterface.CourseAssignment;
import org.unitime.timetable.gwt.shared.CourseRequestInterface.RequestedCourse;
import com.google.gwt.user.client.rpc.IsSerializable;
/**
* @author Tomas Muller
*/
public class DegreePlanInterface implements IsSerializable, Serializable {
private static final long serialVersionUID = 1L;
private Long iStudentId, iSessionId;
private String iId, iName, iDegree, iSchool, iTrack, iModifiedWho;
private Date iModified;
private DegreeGroupInterface iGroup;
private boolean iLocked = false, iActive = false;
public DegreePlanInterface() {
}
public Long getStudentId() { return iStudentId; }
public void setStudentId(Long studentId) { iStudentId = studentId; }
public Long getSessionId() { return iSessionId; }
public void setSessionId(Long sessionId) { iSessionId = sessionId; }
public String getId() { return iId; }
public void setId(String id) { iId = id; }
public String getName() { return iName; }
public void setName(String name) { iName = name; }
public String getDegree() { return iDegree; }
public void setDegree(String degree) { iDegree = degree; }
public String getSchool() { return iSchool; }
public void setSchool(String school) { iSchool = school; }
public String getTrackingStatus() { return iTrack; }
public void setTrackingStatus(String track) { iTrack = track; }
public Date getLastModified() { return iModified; }
public void setLastModified(Date modified) { iModified = modified; }
public String getModifiedWho() { return iModifiedWho; }
public void setModifiedWho(String name) { iModifiedWho = name; }
public DegreeGroupInterface getGroup() { return iGroup; }
public void setGroup(DegreeGroupInterface group) { iGroup = group; }
public boolean isActive() { return iActive; }
public void setActive(boolean active) { iActive = active; }
public boolean isLocked() { return iLocked; }
public void setLocked(boolean locked) { iLocked = locked; }
public List<DegreeCourseInterface> listSelected(boolean pickFirstWhenNoneSelected) {
List<DegreeCourseInterface> ret = new ArrayList<DegreeCourseInterface>();
if (iGroup != null) iGroup.listSelected(ret, pickFirstWhenNoneSelected);
return ret;
}
public List<DegreeCourseInterface> listAlternatives(DegreeCourseInterface course) {
List<DegreeCourseInterface> ret = new ArrayList<DegreeCourseInterface>();
if (iGroup != null) iGroup.listAlternatives(ret, course);
return ret;
}
public boolean hasCourse(String course) {
if (iGroup != null) return iGroup.hasCourse(course);
return false;
}
public boolean isCourseSelected(String course) {
if (iGroup != null) return iGroup.isCourseSelected(course);
return false;
}
public boolean hasCourse(RequestedCourse course) {
if (iGroup != null) return iGroup.hasCourse(course);
return false;
}
public boolean isCourseSelected(RequestedCourse course) {
if (iGroup != null) return iGroup.isCourseSelected(course);
return false;
}
@Override
public String toString() { return iName + ": " + iGroup; }
public static abstract class DegreeItemInterface implements IsSerializable, Serializable {
private static final long serialVersionUID = 1L;
private String iId = null;
public String getId() { return iId; }
public void setId(String id) { iId = id; }
}
public static class DegreeGroupInterface extends DegreeItemInterface {
private static final long serialVersionUID = 1L;
private boolean iChoice = false;
List<DegreeCourseInterface> iCourses = null;
List<DegreeGroupInterface> iGroups = null;
List<DegreePlaceHolderInterface> iPlaceHolders = null;
private Boolean iSelected = null;
private String iDescription = null;
public DegreeGroupInterface() {}
public boolean isChoice() { return iChoice; }
public void setChoice(boolean choice) { iChoice = choice; }
public boolean hasCourses() { return iCourses != null && !iCourses.isEmpty(); }
public boolean hasMultipleCourses() {
if (iCourses == null) return false;
for (DegreeCourseInterface course: iCourses)
if (course.hasMultipleCourses()) return true;
return false;
}
public List<DegreeCourseInterface> getCourses() { return iCourses; }
public void addCourse(DegreeCourseInterface course) {
if (iCourses == null) iCourses = new ArrayList<DegreeCourseInterface>();
iCourses.add(course);
}
public boolean hasGroups() { return iGroups != null && !iGroups.isEmpty(); }
public List<DegreeGroupInterface> getGroups() { return iGroups; }
public void addGroup(DegreeGroupInterface group) {
if (iGroups == null) iGroups = new ArrayList<DegreeGroupInterface>();
iGroups.add(group);
}
public void merge(DegreeGroupInterface group) {
if (group.hasCourses())
for (DegreeCourseInterface course: group.getCourses())
addCourse(course);
if (group.hasPlaceHolders())
for (DegreePlaceHolderInterface ph: group.getPlaceHolders())
addPlaceHolder(ph);
if (group.hasGroups())
for (DegreeGroupInterface g: group.getGroups()) {
if (isChoice() == g.isChoice())
merge(g);
else
addGroup(g);
}
}
public boolean hasPlaceHolders() { return iPlaceHolders != null && !iPlaceHolders.isEmpty(); }
public List<DegreePlaceHolderInterface> getPlaceHolders() { return iPlaceHolders; }
public void addPlaceHolder(DegreePlaceHolderInterface placeHolder) {
if (iPlaceHolders == null) iPlaceHolders = new ArrayList<DegreePlaceHolderInterface>();
iPlaceHolders.add(placeHolder);
}
public boolean hasSelected() { return iSelected != null; }
public boolean isSelected() { return iSelected == null || iSelected.booleanValue(); }
public void setSelected(boolean selected) { iSelected = selected; }
public boolean hasDescription() { return iDescription != null && !iDescription.isEmpty(); }
public String getDescription() { return hasDescription() ? iDescription : toString(); }
public void setDescription(String description) { iDescription = description; }
public int countItems() {
return (hasPlaceHolders() ? getPlaceHolders().size() : 0) + (hasGroups() ? getGroups().size() : 0) + (hasCourses() ? getCourses().size() : 0);
}
public boolean isUnionGroupWithOneChoice() {
if (isChoice()) return false;
int nrChoices = (hasPlaceHolders() ? getPlaceHolders().size() : 0) +
(hasGroups() ? getGroups().size() : 0) +
(hasCourses() ? getCourses().size() : 0);
return nrChoices == 1;
}
public int getMaxDepth() {
if (iGroups == null || iGroups.isEmpty()) return (!isChoice() && hasMultipleCourses() ? 2 : 1);
int ret = 0;
for (DegreeGroupInterface g: iGroups)
if (ret < g.getMaxDepth())
ret = g.getMaxDepth();
return 1 + ret;
}
@Override
public String toString() {
String ret = "";
if (iCourses != null)
for (DegreeCourseInterface course: iCourses)
ret += (ret.isEmpty() ? "" : iChoice ? " or " : " and ") + course;
if (iGroups != null)
for (DegreeGroupInterface group: iGroups)
ret += (ret.isEmpty() ? "" : iChoice ? " or " : " and ") + "(" + group + ")";
if (iPlaceHolders != null)
for (DegreePlaceHolderInterface ph: iPlaceHolders)
ret += (ret.isEmpty() ? "" : iChoice ? " or " : " and ") + ph;
return ret;
}
public String toString(StudentSectioningMessages MESSAGES) {
List<String> items = new ArrayList<String>();
if (iCourses != null)
for (DegreeCourseInterface course: iCourses)
items.add(MESSAGES.course(course.getSubject(), course.getCourse()));
if (iGroups != null)
for (DegreeGroupInterface group: iGroups)
items.add(MESSAGES.surroundWithBrackets(group.toString(MESSAGES)));
if (iPlaceHolders != null)
for (DegreePlaceHolderInterface ph: iPlaceHolders)
items.add(ph.getType());
switch (items.size()) {
case 0:
return "";
case 1:
return items.get(0);
case 2:
return (isChoice() ? MESSAGES.choiceSeparatorPair(items.get(0), items.get(1)) : MESSAGES.courseSeparatorPair(items.get(0), items.get(1)));
default:
String ret = null;
for (Iterator<String> i = items.iterator(); i.hasNext(); ) {
String item = i.next();
if (ret == null)
ret = item;
else {
if (i.hasNext()) {
ret = (isChoice() ? MESSAGES.choiceSeparatorMiddle(ret, item) : MESSAGES.courseSeparatorMiddle(ret, item));
} else {
ret = (isChoice() ? MESSAGES.choiceSeparatorLast(ret, item) : MESSAGES.courseSeparatorLast(ret, item));
}
}
}
return ret;
}
}
protected void listSelected(List<DegreeCourseInterface> requested, boolean pickFirstWhenNoneSelected) {
boolean hasSelection = false;
if (hasCourses())
for (DegreeCourseInterface course: getCourses())
if (!isChoice() || course.isSelected()) {
requested.add(course); hasSelection = true;
}
if (hasGroups())
for (DegreeGroupInterface g: getGroups())
if (!isChoice() || g.isSelected()) {
g.listSelected(requested, pickFirstWhenNoneSelected); hasSelection = true;
}
if (isChoice() && !hasSelection && pickFirstWhenNoneSelected) {
if (hasCourses())
requested.add(getCourses().get(0));
else if (hasGroups())
getGroups().get(0).listSelected(requested, pickFirstWhenNoneSelected);
}
}
public boolean hasCourse(DegreeCourseInterface course) {
if (hasCourses())
for (DegreeCourseInterface c: getCourses())
if (c.getId().equals(course.getId())) return true;
if (hasGroups())
for (DegreeGroupInterface g: getGroups())
if (g.hasCourse(course)) return true;
return false;
}
protected void listAlternatives(List<DegreeCourseInterface> alternatives, DegreeCourseInterface course) {
if (isChoice() && hasCourse(course)) {
if (hasCourses())
for (DegreeCourseInterface c: getCourses())
if (!c.getId().equals(course.getId())) alternatives.add(c);
if (hasGroups())
for (DegreeGroupInterface g: getGroups())
if (g.hasCourses())
for (DegreeCourseInterface c: g.getCourses())
if (!c.getId().equals(course.getId())) alternatives.add(c);
} else if (hasGroups()) {
for (DegreeGroupInterface g: getGroups())
g.listAlternatives(alternatives, course);
}
}
public boolean hasSelection() {
if (hasCourses())
for (DegreeCourseInterface course: getCourses())
if (!isChoice() || course.isSelected()) return true;
if (hasGroups())
for (DegreeGroupInterface g: getGroups())
if (!isChoice() || g.isSelected()) return true;
return false;
}
protected boolean hasCourse(String name) {
if (iCourses != null)
for (DegreeCourseInterface course: iCourses) {
if (course.hasCourses()) {
for (CourseAssignment ca: course.getCourses())
if (name.equalsIgnoreCase(ca.getCourseName()) || name.equalsIgnoreCase(ca.getCourseNameWithTitle())) return true;
}
if (name.equals(course.getCourseName()) || name.equalsIgnoreCase(course.getCourseNameWithTitle())) return true;
}
if (iGroups != null)
for (DegreeGroupInterface g: iGroups)
if (g.hasCourse(name)) return true;
return false;
}
protected boolean isCourseSelected(String name) {
if (iCourses != null)
for (DegreeCourseInterface course: iCourses) {
if (isChoice() && !course.isSelected()) continue;
if (course.hasCourses() && course.getCourseId() != null) {
for (CourseAssignment ca: course.getCourses())
if (course.getCourseId().equals(ca.getCourseId()) && (name.equalsIgnoreCase(ca.getCourseName()) || name.equalsIgnoreCase(ca.getCourseNameWithTitle()))) return true;
} else {
if (name.equals(course.getCourseName()) || name.equalsIgnoreCase(course.getCourseNameWithTitle())) return true;
}
}
if (iGroups != null)
for (DegreeGroupInterface g: iGroups) {
if ((!isChoice() || g.isSelected()) && g.hasCourse(name)) return true;
}
return false;
}
protected boolean hasCourse(RequestedCourse rc) {
if (iCourses != null)
for (DegreeCourseInterface course: iCourses) {
if (course.hasCourses()) {
for (CourseAssignment ca: course.getCourses())
if (rc.equals(ca)) return true;
}
if (rc.equals(course)) return true;
}
if (iGroups != null)
for (DegreeGroupInterface g: iGroups)
if (g.hasCourse(rc)) return true;
return false;
}
protected boolean isCourseSelected(RequestedCourse rc) {
if (iCourses != null)
for (DegreeCourseInterface course: iCourses) {
if (isChoice() && !course.isSelected()) continue;
if (course.hasCourses() && course.getCourseId() != null) {
for (CourseAssignment ca: course.getCourses())
if (course.getCourseId().equals(ca.getCourseId()) && rc.equals(ca)) return true;
} else {
if (rc.equals(course)) return true;
}
}
if (iGroups != null)
for (DegreeGroupInterface g: iGroups) {
if ((!isChoice() || g.isSelected()) && g.hasCourse(rc)) return true;
}
return false;
}
}
public static class DegreeCourseInterface extends DegreeItemInterface {
private static final long serialVersionUID = 1L;
private Long iCourseId = null;
private String iSubject, iCourse, iTitle;
private Boolean iSelected = null;
private List<CourseAssignment> iCourses;
public DegreeCourseInterface() {}
public String getSubject() { return iSubject; }
public void setSubject(String subject) { iSubject = subject; }
public String getCourse() { return iCourse; }
public void setCourse(String course) { iCourse = course; }
public boolean hasTitle() { return iTitle != null && !iTitle.isEmpty(); }
public String getTitle() { return iTitle; }
public void setTitle(String title) { iTitle = title; }
public boolean hasSelected() { return iSelected != null; }
public boolean isSelected() { return iSelected == null || iSelected.booleanValue(); }
public void setSelected(boolean selected) { iSelected = selected; }
public Long getCourseId() { return iCourseId; }
public void setCourseId(Long courseId) { iCourseId = courseId; }
public String getCourseName() {
return getSubject() + " " + getCourse();
}
public String getCourseNameWithTitle() {
return hasTitle() ? getSubject() + " " + getCourse() + " - " + getTitle() : getSubject() + " " + getCourse();
}
public boolean hasCourses() { return iCourses != null && !iCourses.isEmpty(); }
public boolean hasMultipleCourses() { return iCourses != null && iCourses.size() > 1; }
public List<CourseAssignment> getCourses() { return iCourses; }
public void addCourse(CourseAssignment course) {
if (iCourses == null) iCourses = new ArrayList<CourseAssignment>();
iCourses.add(course);
}
public CourseAssignment getSelectedCourse(boolean pickOneWhenNoneSelected) {
if (iCourses != null && iCourseId != null)
for (CourseAssignment course: iCourses)
if (iCourseId.equals(course.getCourseId())) return course;
if (pickOneWhenNoneSelected && iCourses != null && !iCourses.isEmpty()) {
for (CourseAssignment course: iCourses)
if (getCourseName().equals(course.getCourseName())) return course;
return iCourses.get(0);
}
return null;
}
@Override
public String toString() {
CourseAssignment ca = getSelectedCourse(false);
if (ca != null) return ca.getCourseName();
return getCourseName();
}
}
public static class DegreePlaceHolderInterface extends DegreeItemInterface {
private static final long serialVersionUID = 1L;
private String iType;
private String iName;
public DegreePlaceHolderInterface() {}
public String getName() { return iName; }
public void setName(String name) { iName = name; }
public String getType() { return iType; }
public void setType(String type) { iType = type; }
@Override
public String toString() { return iType; }
}
}
| |
package org.apache.maven.plugins.shade;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.shade.filter.Filter;
import com.google.common.base.Joiner;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import org.apache.maven.plugins.shade.relocation.Relocator;
import org.apache.maven.plugins.shade.resource.ManifestResourceTransformer;
import org.apache.maven.plugins.shade.resource.ResourceTransformer;
import org.codehaus.plexus.component.annotations.Component;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.util.IOUtil;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.commons.Remapper;
import org.objectweb.asm.commons.RemappingClassAdapter;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarOutputStream;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.ZipException;
/**
* @author Jason van Zyl
*/
@Component( role = Shader.class, hint = "default" )
public class DefaultShader
extends AbstractLogEnabled
implements Shader
{
public void shade( ShadeRequest shadeRequest )
throws IOException, MojoExecutionException
{
Set<String> resources = new HashSet<String>();
ResourceTransformer manifestTransformer = null;
List<ResourceTransformer> transformers =
new ArrayList<ResourceTransformer>( shadeRequest.getResourceTransformers() );
for ( Iterator<ResourceTransformer> it = transformers.iterator(); it.hasNext(); )
{
ResourceTransformer transformer = it.next();
if ( transformer instanceof ManifestResourceTransformer )
{
manifestTransformer = transformer;
it.remove();
}
}
RelocatorRemapper remapper = new RelocatorRemapper( shadeRequest.getRelocators() );
//noinspection ResultOfMethodCallIgnored
shadeRequest.getUberJar().getParentFile().mkdirs();
FileOutputStream fileOutputStream = new FileOutputStream( shadeRequest.getUberJar() );
JarOutputStream jos = new JarOutputStream( new BufferedOutputStream( fileOutputStream ) );
if ( manifestTransformer != null )
{
for ( File jar : shadeRequest.getJars() )
{
JarFile jarFile = newJarFile( jar );
for ( Enumeration<JarEntry> en = jarFile.entries(); en.hasMoreElements(); )
{
JarEntry entry = en.nextElement();
String resource = entry.getName();
if ( manifestTransformer.canTransformResource( resource ) )
{
resources.add( resource );
manifestTransformer.processResource( resource, jarFile.getInputStream( entry ),
shadeRequest.getRelocators() );
break;
}
}
}
if ( manifestTransformer.hasTransformedResource() )
{
manifestTransformer.modifyOutputStream( jos );
}
}
Multimap<String, File> duplicates = HashMultimap.create( 10000, 3 );
for ( File jar : shadeRequest.getJars() )
{
getLogger().debug( "Processing JAR " + jar );
List<Filter> jarFilters = getFilters( jar, shadeRequest.getFilters() );
JarFile jarFile = newJarFile( jar );
for ( Enumeration<JarEntry> j = jarFile.entries(); j.hasMoreElements(); )
{
JarEntry entry = j.nextElement();
String name = entry.getName();
if ( "META-INF/INDEX.LIST".equals( name ) )
{
// we cannot allow the jar indexes to be copied over or the
// jar is useless. Ideally, we could create a new one
// later
continue;
}
if ( !entry.isDirectory() && !isFiltered( jarFilters, name ) )
{
InputStream is = jarFile.getInputStream( entry );
String mappedName = remapper.map( name );
int idx = mappedName.lastIndexOf( '/' );
if ( idx != -1 )
{
// make sure dirs are created
String dir = mappedName.substring( 0, idx );
if ( !resources.contains( dir ) )
{
addDirectory( resources, jos, dir );
}
}
if ( name.endsWith( ".class" ) )
{
duplicates.put(name, jar);
addRemappedClass( remapper, jos, jar, name, is );
}
else if ( shadeRequest.isShadeSourcesContent() && name.endsWith( ".java" ) )
{
// Avoid duplicates
if ( resources.contains( mappedName ) )
{
continue;
}
addJavaSource( resources, jos, mappedName, is, shadeRequest.getRelocators() );
}
else
{
if ( !resourceTransformed( transformers, mappedName, is, shadeRequest.getRelocators() ) )
{
// Avoid duplicates that aren't accounted for by the resource transformers
if ( resources.contains( mappedName ) )
{
continue;
}
addResource( resources, jos, mappedName, is );
}
}
IOUtil.close( is );
}
}
jarFile.close();
}
Multimap<Collection<File>, String> overlapping = HashMultimap.create( 20, 15 );
for ( String clazz: duplicates.keySet() )
{
Collection<File> jarz = duplicates.get( clazz );
if ( jarz.size() > 1 ) {
overlapping.put( jarz, clazz );
}
}
// Log a summary of duplicates
for ( Collection<File> jarz : overlapping.keySet() )
{
List<String> jarzS = new LinkedList<String>();
for (File jjar : jarz)
jarzS.add(jjar.getName());
List<String> classes = new LinkedList<String>();
for (String clazz : overlapping.get(jarz))
classes.add(clazz.replace(".class", "").replace("/", "."));
getLogger().warn( Joiner.on( ", " ).join(jarzS) + " define " + classes.size()
+ " overlappping classes: " );
int max = 10;
for ( int i = 0; i < Math.min(max, classes.size()); i++ )
getLogger().warn(" - " + classes.get(i));
if ( classes.size() > max )
getLogger().warn(" - " + (classes.size() - max) + " more...");
}
if (overlapping.keySet().size() > 0) {
getLogger().warn("maven-shade-plugin has detected that some .class files");
getLogger().warn("are present in two or more JARs. When this happens, only");
getLogger().warn("one single version of the class is copied in the uberjar.");
getLogger().warn("Usually this is not harmful and you can skeep these");
getLogger().warn("warnings, otherwise try to manually exclude artifacts");
getLogger().warn("based on mvn dependency:tree -Ddetail=true and the above");
getLogger().warn("output");
getLogger().warn("See http://docs.codehaus.org/display/MAVENUSER/Shade+Plugin");
}
for ( ResourceTransformer transformer : transformers )
{
if ( transformer.hasTransformedResource() )
{
transformer.modifyOutputStream( jos );
}
}
IOUtil.close( jos );
for ( Filter filter : shadeRequest.getFilters() )
{
filter.finished();
}
}
private JarFile newJarFile( File jar )
throws IOException
{
try
{
return new JarFile( jar );
}
catch ( ZipException zex )
{
// JarFile is not very verbose and doesn't tell the user which file it was
// so we will create a new Exception instead
throw new ZipException( "error in opening zip file " + jar );
}
}
private List<Filter> getFilters( File jar, List<Filter> filters )
{
List<Filter> list = new ArrayList<Filter>();
for ( Filter filter : filters )
{
if ( filter.canFilter( jar ) )
{
list.add( filter );
}
}
return list;
}
private void addDirectory( Set<String> resources, JarOutputStream jos, String name )
throws IOException
{
if ( name.lastIndexOf( '/' ) > 0 )
{
String parent = name.substring( 0, name.lastIndexOf( '/' ) );
if ( !resources.contains( parent ) )
{
addDirectory( resources, jos, parent );
}
}
// directory entries must end in "/"
JarEntry entry = new JarEntry( name + "/" );
jos.putNextEntry( entry );
resources.add( name );
}
private void addRemappedClass( RelocatorRemapper remapper, JarOutputStream jos, File jar, String name,
InputStream is )
throws IOException, MojoExecutionException
{
if ( !remapper.hasRelocators() )
{
try
{
jos.putNextEntry( new JarEntry( name ) );
IOUtil.copy( is, jos );
}
catch ( ZipException e )
{
getLogger().debug( "We have a duplicate " + name + " in " + jar );
}
return;
}
ClassReader cr = new ClassReader( is );
// We don't pass the ClassReader here. This forces the ClassWriter to rebuild the constant pool.
// Copying the original constant pool should be avoided because it would keep references
// to the original class names. This is not a problem at runtime (because these entries in the
// constant pool are never used), but confuses some tools such as Felix' maven-bundle-plugin
// that use the constant pool to determine the dependencies of a class.
ClassWriter cw = new ClassWriter( 0 );
ClassVisitor cv = new RemappingClassAdapter( cw, remapper );
try
{
cr.accept( cv, ClassReader.EXPAND_FRAMES );
}
catch ( Throwable ise )
{
throw new MojoExecutionException( "Error in ASM processing class " + name, ise );
}
byte[] renamedClass = cw.toByteArray();
// Need to take the .class off for remapping evaluation
String mappedName = remapper.map( name.substring( 0, name.indexOf( '.' ) ) );
try
{
// Now we put it back on so the class file is written out with the right extension.
jos.putNextEntry( new JarEntry( mappedName + ".class" ) );
IOUtil.copy( renamedClass, jos );
}
catch ( ZipException e )
{
getLogger().debug( "We have a duplicate " + mappedName + " in " + jar );
}
}
private boolean isFiltered( List<Filter> filters, String name )
{
for ( Filter filter : filters )
{
if ( filter.isFiltered( name ) )
{
return true;
}
}
return false;
}
private boolean resourceTransformed( List<ResourceTransformer> resourceTransformers, String name, InputStream is,
List<Relocator> relocators )
throws IOException
{
boolean resourceTransformed = false;
for ( ResourceTransformer transformer : resourceTransformers )
{
if ( transformer.canTransformResource( name ) )
{
getLogger().debug( "Transforming " + name + " using " + transformer.getClass().getName() );
transformer.processResource( name, is, relocators );
resourceTransformed = true;
break;
}
}
return resourceTransformed;
}
private void addJavaSource( Set<String> resources, JarOutputStream jos, String name, InputStream is,
List<Relocator> relocators )
throws IOException
{
jos.putNextEntry( new JarEntry( name ) );
String sourceContent = IOUtil.toString( new InputStreamReader( is, "UTF-8" ) );
for ( Relocator relocator : relocators )
{
sourceContent = relocator.applyToSourceContent( sourceContent );
}
OutputStreamWriter writer = new OutputStreamWriter( jos, "UTF-8" );
IOUtil.copy( sourceContent, writer );
writer.flush();
resources.add( name );
}
private void addResource( Set<String> resources, JarOutputStream jos, String name, InputStream is )
throws IOException
{
jos.putNextEntry( new JarEntry( name ) );
IOUtil.copy( is, jos );
resources.add( name );
}
class RelocatorRemapper
extends Remapper
{
private final Pattern classPattern = Pattern.compile( "(\\[*)?L(.+);" );
List<Relocator> relocators;
public RelocatorRemapper( List<Relocator> relocators )
{
this.relocators = relocators;
}
public boolean hasRelocators()
{
return !relocators.isEmpty();
}
public Object mapValue( Object object )
{
if ( object instanceof String )
{
String name = (String) object;
String value = name;
String prefix = "";
String suffix = "";
Matcher m = classPattern.matcher( name );
if ( m.matches() )
{
prefix = m.group( 1 ) + "L";
suffix = ";";
name = m.group( 2 );
}
for ( Relocator r : relocators )
{
if ( r.canRelocateClass( name ) )
{
value = prefix + r.relocateClass( name ) + suffix;
break;
}
else if ( r.canRelocatePath( name ) )
{
value = prefix + r.relocatePath( name ) + suffix;
break;
}
}
return value;
}
return super.mapValue( object );
}
public String map( String name )
{
String value = name;
String prefix = "";
String suffix = "";
Matcher m = classPattern.matcher( name );
if ( m.matches() )
{
prefix = m.group( 1 ) + "L";
suffix = ";";
name = m.group( 2 );
}
for ( Relocator r : relocators )
{
if ( r.canRelocatePath( name ) )
{
value = prefix + r.relocatePath( name ) + suffix;
break;
}
}
return value;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.spark.translation;
import static com.google.common.base.Preconditions.checkState;
import com.google.common.collect.AbstractIterator;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import org.apache.beam.runners.spark.aggregators.NamedAggregators;
import org.apache.beam.runners.spark.util.BroadcastHelper;
import org.apache.beam.runners.spark.util.SparkSideInputReader;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.Aggregator;
import org.apache.beam.sdk.transforms.Combine;
import org.apache.beam.sdk.transforms.OldDoFn;
import org.apache.beam.sdk.transforms.OldDoFn.RequiresWindowAccess;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.transforms.windowing.WindowFn;
import org.apache.beam.sdk.util.SideInputReader;
import org.apache.beam.sdk.util.SystemDoFnInternal;
import org.apache.beam.sdk.util.TimerInternals;
import org.apache.beam.sdk.util.UserCodeException;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.util.WindowingInternals;
import org.apache.beam.sdk.util.WindowingStrategy;
import org.apache.beam.sdk.util.state.InMemoryStateInternals;
import org.apache.beam.sdk.util.state.StateInternals;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.beam.sdk.values.TupleTag;
import org.apache.spark.Accumulator;
import org.joda.time.Instant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Spark runner process context.
*/
public abstract class SparkProcessContext<InputT, OutputT, ValueT>
extends OldDoFn<InputT, OutputT>.ProcessContext {
private static final Logger LOG = LoggerFactory.getLogger(SparkProcessContext.class);
private final OldDoFn<InputT, OutputT> fn;
private final SparkRuntimeContext mRuntimeContext;
private final SideInputReader sideInputReader;
private final WindowFn<Object, ?> windowFn;
WindowedValue<InputT> windowedValue;
SparkProcessContext(OldDoFn<InputT, OutputT> fn,
SparkRuntimeContext runtime,
Map<TupleTag<?>, KV<WindowingStrategy<?, ?>, BroadcastHelper<?>>> sideInputs,
WindowFn<Object, ?> windowFn) {
fn.super();
this.fn = fn;
this.mRuntimeContext = runtime;
this.sideInputReader = new SparkSideInputReader(sideInputs);
this.windowFn = windowFn;
}
void setup() {
setupDelegateAggregators();
}
Iterable<ValueT> callWithCtxt(Iterator<WindowedValue<InputT>> iter) throws Exception{
this.setup();
// skip if bundle is empty.
if (!iter.hasNext()) {
return Lists.newArrayList();
}
try {
fn.setup();
fn.startBundle(this);
return this.getOutputIterable(iter, fn);
} catch (Exception e) {
try {
// this teardown handles exceptions encountered in setup() and startBundle(). teardown
// after execution or due to exceptions in process element is called in the iterator
// produced by ctxt.getOutputIterable returned from this method.
fn.teardown();
} catch (Exception teardownException) {
LOG.error(
"Suppressing exception while tearing down Function {}", fn, teardownException);
e.addSuppressed(teardownException);
}
throw wrapUserCodeException(e);
}
}
@Override
public PipelineOptions getPipelineOptions() {
return mRuntimeContext.getPipelineOptions();
}
@Override
public <T> T sideInput(PCollectionView<T> view) {
//validate element window.
final Collection<? extends BoundedWindow> elementWindows = windowedValue.getWindows();
checkState(elementWindows.size() == 1, "sideInput can only be called when the main "
+ "input element is in exactly one window");
return sideInputReader.get(view, elementWindows.iterator().next());
}
@Override
public <AggregatorInputT, AggregatorOutputT>
Aggregator<AggregatorInputT, AggregatorOutputT> createAggregatorInternal(
String named,
Combine.CombineFn<AggregatorInputT, ?, AggregatorOutputT> combineFn) {
return mRuntimeContext.createAggregator(getAccumulator(), named, combineFn);
}
public abstract Accumulator<NamedAggregators> getAccumulator();
@Override
public InputT element() {
return windowedValue.getValue();
}
@Override
public void output(OutputT output) {
outputWithTimestamp(output, windowedValue != null ? windowedValue.getTimestamp() : null);
}
@Override
public void outputWithTimestamp(OutputT output, Instant timestamp) {
if (windowedValue == null) {
// this is start/finishBundle.
outputWindowedValue(noElementWindowedValue(output, timestamp, windowFn));
} else {
outputWindowedValue(WindowedValue.of(output, timestamp, windowedValue.getWindows(),
windowedValue.getPane()));
}
}
@Override
public <T> void sideOutput(TupleTag<T> tag, T output) {
sideOutputWithTimestamp(
tag, output, windowedValue != null ? windowedValue.getTimestamp() : null);
}
@Override
public <T> void sideOutputWithTimestamp(TupleTag<T> tag, T output, Instant timestamp) {
if (windowedValue == null) {
// this is start/finishBundle.
sideOutputWindowedValue(tag, noElementWindowedValue(output, timestamp, windowFn));
} else {
sideOutputWindowedValue(tag, WindowedValue.of(output, timestamp, windowedValue.getWindows(),
windowedValue.getPane()));
}
}
protected abstract void outputWindowedValue(WindowedValue<OutputT> output);
protected abstract <T> void sideOutputWindowedValue(TupleTag<T> tag, WindowedValue<T> output);
static <T, W extends BoundedWindow> WindowedValue<T> noElementWindowedValue(
final T output, final Instant timestamp, WindowFn<Object, W> windowFn) {
WindowFn<Object, W>.AssignContext assignContext =
windowFn.new AssignContext() {
@Override
public Object element() {
return output;
}
@Override
public Instant timestamp() {
if (timestamp != null) {
return timestamp;
}
throw new UnsupportedOperationException(
"outputWithTimestamp was called with " + "null timestamp.");
}
@Override
public BoundedWindow window() {
throw new UnsupportedOperationException(
"Window not available for " + "start/finishBundle output.");
}
};
try {
@SuppressWarnings("unchecked")
Collection<? extends BoundedWindow> windows = windowFn.assignWindows(assignContext);
Instant outputTimestamp = timestamp != null ? timestamp : BoundedWindow.TIMESTAMP_MIN_VALUE;
return WindowedValue.of(output, outputTimestamp, windows, PaneInfo.NO_FIRING);
} catch (Exception e) {
throw new RuntimeException("Failed to assign windows at start/finishBundle.", e);
}
}
@Override
public Instant timestamp() {
return windowedValue.getTimestamp();
}
@Override
public BoundedWindow window() {
if (!(fn instanceof OldDoFn.RequiresWindowAccess)) {
throw new UnsupportedOperationException(
"window() is only available in the context of a OldDoFn marked as RequiresWindowAccess.");
}
return Iterables.getOnlyElement(windowedValue.getWindows());
}
@Override
public PaneInfo pane() {
return windowedValue.getPane();
}
@Override
public WindowingInternals<InputT, OutputT> windowingInternals() {
return new WindowingInternals<InputT, OutputT>() {
@Override
public Collection<? extends BoundedWindow> windows() {
return windowedValue.getWindows();
}
@Override
public void outputWindowedValue(
OutputT output,
Instant timestamp,
Collection<? extends BoundedWindow> windows,
PaneInfo paneInfo) {
SparkProcessContext.this.outputWindowedValue(
WindowedValue.of(output, timestamp, windows, paneInfo));
}
@Override
public <SideOutputT> void sideOutputWindowedValue(
TupleTag<SideOutputT> tag,
SideOutputT output,
Instant timestamp,
Collection<? extends BoundedWindow> windows,
PaneInfo paneInfo) {
SparkProcessContext.this.sideOutputWindowedValue(
tag, WindowedValue.of(output, timestamp, windows, paneInfo));
}
@Override
public StateInternals stateInternals() {
//TODO: implement state internals.
// This is a temporary placeholder to get the TfIdfTest
// working for the initial Beam code drop.
return InMemoryStateInternals.forKey("DUMMY");
}
@Override
public TimerInternals timerInternals() {
throw new UnsupportedOperationException(
"WindowingInternals#timerInternals() is not yet supported.");
}
@Override
public PaneInfo pane() {
return windowedValue.getPane();
}
@Override
public <T> T sideInput(PCollectionView<T> view, BoundedWindow sideInputWindow) {
throw new UnsupportedOperationException(
"WindowingInternals#sideInput() is not yet supported.");
}
};
}
protected abstract void clearOutput();
protected abstract Iterator<ValueT> getOutputIterator();
protected Iterable<ValueT> getOutputIterable(final Iterator<WindowedValue<InputT>> iter,
final OldDoFn<InputT, OutputT> doFn) {
return new Iterable<ValueT>() {
@Override
public Iterator<ValueT> iterator() {
return new ProcCtxtIterator(iter, doFn);
}
};
}
private class ProcCtxtIterator extends AbstractIterator<ValueT> {
private final Iterator<WindowedValue<InputT>> inputIterator;
private final OldDoFn<InputT, OutputT> doFn;
private Iterator<ValueT> outputIterator;
private boolean calledFinish;
ProcCtxtIterator(Iterator<WindowedValue<InputT>> iterator, OldDoFn<InputT, OutputT> doFn) {
this.inputIterator = iterator;
this.doFn = doFn;
this.outputIterator = getOutputIterator();
}
@Override
protected ValueT computeNext() {
// Process each element from the (input) iterator, which produces, zero, one or more
// output elements (of type V) in the output iterator. Note that the output
// collection (and iterator) is reset between each call to processElement, so the
// collection only holds the output values for each call to processElement, rather
// than for the whole partition (which would use too much memory).
while (true) {
if (outputIterator.hasNext()) {
return outputIterator.next();
} else if (inputIterator.hasNext()) {
clearOutput();
// grab the next element and process it.
windowedValue = inputIterator.next();
if (windowedValue.getWindows().size() <= 1
|| (!RequiresWindowAccess.class.isAssignableFrom(doFn.getClass())
&& sideInputReader.isEmpty())) {
// if there's no reason to explode, process compacted.
invokeProcessElement();
} else {
// explode and process the element in each of it's assigned windows.
for (WindowedValue<InputT> wv: windowedValue.explodeWindows()) {
windowedValue = wv;
invokeProcessElement();
}
}
outputIterator = getOutputIterator();
} else {
// no more input to consume, but finishBundle can produce more output
if (!calledFinish) {
windowedValue = null; // clear the last element processed
clearOutput();
try {
calledFinish = true;
doFn.finishBundle(SparkProcessContext.this);
} catch (Exception e) {
handleProcessingException(e);
throw wrapUserCodeException(e);
}
outputIterator = getOutputIterator();
continue; // try to consume outputIterator from start of loop
}
try {
doFn.teardown();
} catch (Exception e) {
LOG.error(
"Suppressing teardown exception that occurred after processing entire input", e);
}
return endOfData();
}
}
}
private void invokeProcessElement() {
try {
doFn.processElement(SparkProcessContext.this);
} catch (Exception e) {
handleProcessingException(e);
throw wrapUserCodeException(e);
}
}
private void handleProcessingException(Exception e) {
try {
doFn.teardown();
} catch (Exception e1) {
LOG.error("Exception while cleaning up DoFn", e1);
e.addSuppressed(e1);
}
}
}
private RuntimeException wrapUserCodeException(Throwable t) {
throw UserCodeException.wrapIf(!isSystemDoFn(), t);
}
private boolean isSystemDoFn() {
return fn.getClass().isAnnotationPresent(SystemDoFnInternal.class);
}
}
| |
/*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Created by IntelliJ IDEA.
* User: yole
* Date: 05.12.2006
* Time: 19:39:22
*/
package com.intellij.openapi.vcs.changes.committed;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import consulo.disposer.Disposable;
import consulo.logging.Logger;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import consulo.disposer.Disposer;
import consulo.util.dataholder.Key;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.*;
import com.intellij.openapi.vcs.changes.BackgroundFromStartOption;
import com.intellij.openapi.vcs.versionBrowser.ChangeBrowserSettings;
import com.intellij.openapi.vcs.versionBrowser.CommittedChangeList;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.FilterComponent;
import com.intellij.util.AsynchConsumer;
import com.intellij.util.BufferedListConsumer;
import com.intellij.util.Consumer;
import com.intellij.util.WaitForProgressToShow;
import com.intellij.util.containers.ContainerUtil;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import java.awt.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
public class CommittedChangesPanel extends JPanel implements TypeSafeDataProvider, Disposable {
private static final Logger LOG = Logger.getInstance(CommittedChangesPanel.class);
private final CommittedChangesTreeBrowser myBrowser;
private final Project myProject;
private CommittedChangesProvider myProvider;
private ChangeBrowserSettings mySettings;
private final RepositoryLocation myLocation;
private int myMaxCount = 0;
private final MyFilterComponent myFilterComponent = new MyFilterComponent();
private final List<Runnable> myShouldBeCalledOnDispose;
private volatile boolean myDisposed;
private volatile boolean myInLoad;
private Consumer<String> myIfNotCachedReloader;
public CommittedChangesPanel(Project project, final CommittedChangesProvider provider, final ChangeBrowserSettings settings,
@Nullable final RepositoryLocation location, @javax.annotation.Nullable ActionGroup extraActions) {
super(new BorderLayout());
mySettings = settings;
myProject = project;
myProvider = provider;
myLocation = location;
myShouldBeCalledOnDispose = new ArrayList<Runnable>();
myBrowser = new CommittedChangesTreeBrowser(project, new ArrayList<CommittedChangeList>());
Disposer.register(this, myBrowser);
add(myBrowser, BorderLayout.CENTER);
final VcsCommittedViewAuxiliary auxiliary = provider.createActions(myBrowser, location);
JPanel toolbarPanel = new JPanel();
toolbarPanel.setLayout(new BoxLayout(toolbarPanel, BoxLayout.X_AXIS));
ActionGroup group = (ActionGroup) ActionManager.getInstance().getAction("CommittedChangesToolbar");
ActionToolbar toolBar = myBrowser.createGroupFilterToolbar(project, group, extraActions,
auxiliary != null ? auxiliary.getToolbarActions() : Collections.<AnAction>emptyList());
toolbarPanel.add(toolBar.getComponent());
toolbarPanel.add(Box.createHorizontalGlue());
toolbarPanel.add(myFilterComponent);
myFilterComponent.setMinimumSize(myFilterComponent.getPreferredSize());
myFilterComponent.setMaximumSize(myFilterComponent.getPreferredSize());
myBrowser.setToolBar(toolbarPanel);
if (auxiliary != null) {
myShouldBeCalledOnDispose.add(auxiliary.getCalledOnViewDispose());
myBrowser.setTableContextMenu(group, (auxiliary.getPopupActions() == null) ? Collections.<AnAction>emptyList() : auxiliary.getPopupActions());
} else {
myBrowser.setTableContextMenu(group, Collections.<AnAction>emptyList());
}
final AnAction anAction = ActionManager.getInstance().getAction("CommittedChanges.Refresh");
anAction.registerCustomShortcutSet(CommonShortcuts.getRerun(), this);
myBrowser.addFilter(myFilterComponent);
myIfNotCachedReloader = myLocation == null ? null : new Consumer<String>() {
@Override
public void consume(String s) {
refreshChanges(false);
}
};
}
public RepositoryLocation getRepositoryLocation() {
return myLocation;
}
public void setMaxCount(final int maxCount) {
myMaxCount = maxCount;
}
public void setProvider(final CommittedChangesProvider provider) {
if (myProvider != provider) {
myProvider = provider;
mySettings = provider.createDefaultSettings();
}
}
public void refreshChanges(final boolean cacheOnly) {
if (myLocation != null) {
refreshChangesFromLocation();
}
else {
refreshChangesFromCache(cacheOnly);
}
}
private void refreshChangesFromLocation() {
myBrowser.reset();
myInLoad = true;
myBrowser.setLoading(true);
ProgressManager.getInstance().run(new Task.Backgroundable(myProject, "Loading changes", true, BackgroundFromStartOption.getInstance()) {
@Override
public void run(@Nonnull final ProgressIndicator indicator) {
try {
final AsynchConsumer<List<CommittedChangeList>> appender = new AsynchConsumer<List<CommittedChangeList>>() {
@Override
public void finished() {
}
@Override
public void consume(final List<CommittedChangeList> list) {
new AbstractCalledLater(myProject, ModalityState.stateForComponent(myBrowser)) {
@Override
public void run() {
myBrowser.append(list);
}
}.callMe();
}
};
final BufferedListConsumer<CommittedChangeList> bufferedListConsumer = new BufferedListConsumer<CommittedChangeList>(30, appender,-1);
myProvider.loadCommittedChanges(mySettings, myLocation, myMaxCount, new AsynchConsumer<CommittedChangeList>() {
@Override
public void finished() {
bufferedListConsumer.flush();
}
@Override
public void consume(CommittedChangeList committedChangeList) {
if (myDisposed) {
indicator.cancel();
}
ProgressManager.checkCanceled();
bufferedListConsumer.consumeOne(committedChangeList);
}
});
}
catch (final VcsException e) {
LOG.info(e);
WaitForProgressToShow.runOrInvokeLaterAboveProgress(new Runnable() {
@Override
public void run() {
Messages.showErrorDialog(myProject, "Error refreshing view: " + StringUtil.join(e.getMessages(), "\n"), "Committed Changes");
}
}, null, myProject);
} finally {
myInLoad = false;
myBrowser.setLoading(false);
}
}
});
}
public void clearCaches() {
final CommittedChangesCache cache = CommittedChangesCache.getInstance(myProject);
cache.clearCaches(new Runnable() {
@Override
public void run() {
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
updateFilteredModel(Collections.<CommittedChangeList>emptyList(), true);
}
}, ModalityState.NON_MODAL, myProject.getDisposed());
}
});
}
private void refreshChangesFromCache(final boolean cacheOnly) {
final CommittedChangesCache cache = CommittedChangesCache.getInstance(myProject);
cache.hasCachesForAnyRoot(new Consumer<Boolean>() {
@Override
public void consume(final Boolean notEmpty) {
if (! notEmpty) {
if (cacheOnly) {
myBrowser.getEmptyText().setText(VcsBundle.message("committed.changes.not.loaded.message"));
return;
}
if (!CacheSettingsDialog.showSettingsDialog(myProject)) return;
}
cache.getProjectChangesAsync(mySettings, myMaxCount, cacheOnly,
new Consumer<List<CommittedChangeList>>() {
@Override
public void consume(final List<CommittedChangeList> committedChangeLists) {
updateFilteredModel(committedChangeLists, false);
}
},
new Consumer<List<VcsException>>() {
@Override
public void consume(final List<VcsException> vcsExceptions) {
AbstractVcsHelper.getInstance(myProject).showErrors(vcsExceptions, "Error refreshing VCS history");
}
});
}
});
}
private static class FilterHelper {
private final String[] myParts;
FilterHelper(final String filterString) {
myParts = filterString.split(" ");
for(int i = 0; i < myParts.length; ++ i) {
myParts [i] = myParts [i].toLowerCase();
}
}
public boolean filter(@Nonnull final CommittedChangeList cl) {
return changeListMatches(cl, myParts);
}
private static boolean changeListMatches(@Nonnull final CommittedChangeList changeList, final String[] filterWords) {
for(String word: filterWords) {
final String comment = changeList.getComment();
final String committer = changeList.getCommitterName();
if ((comment != null && comment.toLowerCase().indexOf(word) >= 0) ||
(committer != null && committer.toLowerCase().indexOf(word) >= 0) ||
Long.toString(changeList.getNumber()).indexOf(word) >= 0) {
return true;
}
}
return false;
}
}
private void updateFilteredModel(List<CommittedChangeList> committedChangeLists, final boolean reset) {
if (committedChangeLists == null) {
return;
}
final String emptyText;
if (reset) {
emptyText = VcsBundle.message("committed.changes.not.loaded.message");
} else {
emptyText = VcsBundle.message("committed.changes.empty.message");
}
myBrowser.getEmptyText().setText(emptyText);
myBrowser.setItems(committedChangeLists, CommittedChangesBrowserUseCase.COMMITTED);
}
public void setChangesFilter() {
CommittedChangesFilterDialog filterDialog = new CommittedChangesFilterDialog(myProject, myProvider.createFilterUI(true), mySettings);
filterDialog.show();
if (filterDialog.isOK()) {
mySettings = filterDialog.getSettings();
refreshChanges(false);
}
}
@Override
public void calcData(Key<?> key, DataSink sink) {
if (VcsDataKeys.REMOTE_HISTORY_CHANGED_LISTENER == key) {
sink.put(VcsDataKeys.REMOTE_HISTORY_CHANGED_LISTENER, myIfNotCachedReloader);
} else if (VcsDataKeys.REMOTE_HISTORY_LOCATION.equals(key)) {
sink.put(VcsDataKeys.REMOTE_HISTORY_LOCATION, myLocation);
}
//if (key.equals(VcsDataKeys.CHANGES) || key.equals(VcsDataKeys.CHANGE_LISTS)) {
myBrowser.calcData(key, sink);
//}
}
@Override
public void dispose() {
for (Runnable runnable : myShouldBeCalledOnDispose) {
runnable.run();
}
myDisposed = true;
}
private class MyFilterComponent extends FilterComponent implements ChangeListFilteringStrategy {
private final List<ChangeListener> myList = ContainerUtil.createLockFreeCopyOnWriteList();
public MyFilterComponent() {
super("COMMITTED_CHANGES_FILTER_HISTORY", 20);
}
@Override
public CommittedChangesFilterKey getKey() {
return new CommittedChangesFilterKey("text", CommittedChangesFilterPriority.TEXT);
}
@Override
public void filter() {
for (ChangeListener changeListener : myList) {
changeListener.stateChanged(new ChangeEvent(this));
}
}
@Override
public JComponent getFilterUI() {
return null;
}
@Override
public void setFilterBase(List<CommittedChangeList> changeLists) {
}
@Override
public void addChangeListener(ChangeListener listener) {
myList.add(listener);
}
@Override
public void removeChangeListener(ChangeListener listener) {
myList.remove(listener);
}
@Override
public void resetFilterBase() {
}
@Override
public void appendFilterBase(List<CommittedChangeList> changeLists) {
}
@Override
@Nonnull
public List<CommittedChangeList> filterChangeLists(List<CommittedChangeList> changeLists) {
final FilterHelper filterHelper = new FilterHelper(myFilterComponent.getFilter());
final List<CommittedChangeList> result = new ArrayList<CommittedChangeList>();
for (CommittedChangeList list : changeLists) {
if (filterHelper.filter(list)) {
result.add(list);
}
}
return result;
}
}
public void passCachedListsToListener(final VcsConfigurationChangeListener.DetailedNotification notification,
final Project project, final VirtualFile root) {
final LinkedList<CommittedChangeList> resultList = new LinkedList<CommittedChangeList>();
myBrowser.reportLoadedLists(new CommittedChangeListsListener() {
@Override
public void onBeforeStartReport() {
}
@Override
public boolean report(CommittedChangeList list) {
resultList.add(list);
return false;
}
@Override
public void onAfterEndReport() {
if (! resultList.isEmpty()) {
notification.execute(project, root, resultList);
}
}
});
}
public boolean isInLoad() {
return myInLoad;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.metadata.utils;
import java.util.List;
import java.util.Map;
import org.apache.asterix.common.config.DatasetConfig.DatasetType;
import org.apache.asterix.common.context.AsterixVirtualBufferCacheProvider;
import org.apache.asterix.common.context.IStorageComponentProvider;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.exceptions.CompilationException;
import org.apache.asterix.common.exceptions.ErrorCode;
import org.apache.asterix.external.indexing.IndexingConstants;
import org.apache.asterix.metadata.api.IResourceFactoryProvider;
import org.apache.asterix.metadata.declared.MetadataProvider;
import org.apache.asterix.metadata.entities.Dataset;
import org.apache.asterix.metadata.entities.Index;
import org.apache.asterix.metadata.entities.InternalDatasetDetails;
import org.apache.asterix.om.types.ARecordType;
import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.utils.NonTaggedFormatUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
import org.apache.hyracks.algebricks.data.ITypeTraitProvider;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ITypeTraits;
import org.apache.hyracks.storage.am.common.api.IMetadataPageManagerFactory;
import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallbackFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationSchedulerProvider;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
import org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTrackerFactory;
import org.apache.hyracks.storage.am.lsm.rtree.dataflow.ExternalRTreeLocalResourceFactory;
import org.apache.hyracks.storage.am.lsm.rtree.dataflow.LSMRTreeWithAntiMatterLocalResourceFactory;
import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
import org.apache.hyracks.storage.common.IResourceFactory;
import org.apache.hyracks.storage.common.IStorageManager;
public class RTreeResourceFactoryProvider implements IResourceFactoryProvider {
private static final RTreePolicyType rTreePolicyType = RTreePolicyType.RTREE;
public static final RTreeResourceFactoryProvider INSTANCE = new RTreeResourceFactoryProvider();
private RTreeResourceFactoryProvider() {
}
@Override
public IResourceFactory getResourceFactory(MetadataProvider mdProvider, Dataset dataset, Index index,
ARecordType recordType, ARecordType metaType, ILSMMergePolicyFactory mergePolicyFactory,
Map<String, String> mergePolicyProperties, ITypeTraits[] filterTypeTraits,
IBinaryComparatorFactory[] filterCmpFactories) throws AlgebricksException {
if (index.getKeyFieldNames().size() != 1) {
throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_INDEX_NUM_OF_FIELD,
index.getKeyFieldNames().size(), index.getIndexType(), 1);
}
IAType spatialType = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0),
index.getKeyFieldNames().get(0), recordType).first;
if (spatialType == null) {
throw new CompilationException(ErrorCode.COMPILATION_FIELD_NOT_FOUND,
StringUtils.join(index.getKeyFieldNames().get(0), '.'));
}
List<List<String>> primaryKeyFields = dataset.getPrimaryKeys();
int numPrimaryKeys = primaryKeyFields.size();
ITypeTraits[] primaryTypeTraits = null;
IBinaryComparatorFactory[] primaryComparatorFactories = null;
IStorageComponentProvider storageComponentProvider = mdProvider.getStorageComponentProvider();
if (dataset.getDatasetType() == DatasetType.INTERNAL) {
primaryTypeTraits = new ITypeTraits[numPrimaryKeys + 1 + (dataset.hasMetaPart() ? 1 : 0)];
primaryComparatorFactories = new IBinaryComparatorFactory[numPrimaryKeys];
List<Integer> indicators = null;
if (dataset.hasMetaPart()) {
indicators = ((InternalDatasetDetails) dataset.getDatasetDetails()).getKeySourceIndicator();
}
for (int i = 0; i < numPrimaryKeys; i++) {
IAType keyType = (indicators == null || indicators.get(i) == 0)
? recordType.getSubFieldType(primaryKeyFields.get(i))
: metaType.getSubFieldType(primaryKeyFields.get(i));
primaryComparatorFactories[i] = storageComponentProvider.getComparatorFactoryProvider()
.getBinaryComparatorFactory(keyType, true);
primaryTypeTraits[i] = storageComponentProvider.getTypeTraitProvider().getTypeTrait(keyType);
}
primaryTypeTraits[numPrimaryKeys] =
storageComponentProvider.getTypeTraitProvider().getTypeTrait(recordType);
if (dataset.hasMetaPart()) {
primaryTypeTraits[numPrimaryKeys + 1] =
storageComponentProvider.getTypeTraitProvider().getTypeTrait(recordType);
}
}
boolean isPointMBR = spatialType.getTypeTag() == ATypeTag.POINT || spatialType.getTypeTag() == ATypeTag.POINT3D;
int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
int numNestedSecondaryKeyFields = numDimensions * 2;
IBinaryComparatorFactory[] secondaryComparatorFactories =
new IBinaryComparatorFactory[numNestedSecondaryKeyFields];
IPrimitiveValueProviderFactory[] valueProviderFactories =
new IPrimitiveValueProviderFactory[numNestedSecondaryKeyFields];
ITypeTraits[] secondaryTypeTraits = new ITypeTraits[numNestedSecondaryKeyFields + numPrimaryKeys];
IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
ATypeTag keyType = nestedKeyType.getTypeTag();
for (int i = 0; i < numNestedSecondaryKeyFields; i++) {
secondaryComparatorFactories[i] = storageComponentProvider.getComparatorFactoryProvider()
.getBinaryComparatorFactory(nestedKeyType, true);
secondaryTypeTraits[i] = storageComponentProvider.getTypeTraitProvider().getTypeTrait(nestedKeyType);
valueProviderFactories[i] = storageComponentProvider.getPrimitiveValueProviderFactory();
}
for (int i = 0; i < numPrimaryKeys; i++) {
secondaryTypeTraits[numNestedSecondaryKeyFields + i] = (dataset.getDatasetType() == DatasetType.INTERNAL)
? primaryTypeTraits[i] : IndexingConstants.getTypeTraits(i);
}
int[] rtreeFields = null;
if (filterTypeTraits != null && filterTypeTraits.length > 0) {
rtreeFields = new int[numNestedSecondaryKeyFields + numPrimaryKeys];
for (int i = 0; i < rtreeFields.length; i++) {
rtreeFields[i] = i;
}
}
IStorageManager storageManager = storageComponentProvider.getStorageManager();
ILSMOperationTrackerFactory opTrackerFactory = dataset.getIndexOperationTrackerFactory(index);
ILSMIOOperationCallbackFactory ioOpCallbackFactory = dataset.getIoOperationCallbackFactory(index);
IMetadataPageManagerFactory metadataPageManagerFactory =
storageComponentProvider.getMetadataPageManagerFactory();
ILSMIOOperationSchedulerProvider ioSchedulerProvider =
storageComponentProvider.getIoOperationSchedulerProvider();
ILinearizeComparatorFactory linearizeCmpFactory =
MetadataProvider.proposeLinearizer(keyType, secondaryComparatorFactories.length);
ITypeTraits[] typeTraits = getTypeTraits(mdProvider, dataset, index, recordType, metaType);
IBinaryComparatorFactory[] rtreeCmpFactories = getCmpFactories(mdProvider, index, recordType, metaType);
int[] secondaryFilterFields = (filterTypeTraits != null && filterTypeTraits.length > 0)
? new int[] { numNestedSecondaryKeyFields + numPrimaryKeys } : null;
IBinaryComparatorFactory[] btreeCompFactories =
dataset.getDatasetType() == DatasetType.EXTERNAL ? IndexingConstants.getBuddyBtreeComparatorFactories()
: getComparatorFactoriesForDeletedKeyBTree(secondaryTypeTraits, primaryComparatorFactories,
secondaryComparatorFactories);
if (dataset.getDatasetType() == DatasetType.INTERNAL) {
AsterixVirtualBufferCacheProvider vbcProvider =
new AsterixVirtualBufferCacheProvider(dataset.getDatasetId());
return new LSMRTreeWithAntiMatterLocalResourceFactory(storageManager, typeTraits, rtreeCmpFactories,
filterTypeTraits, filterCmpFactories, secondaryFilterFields, opTrackerFactory, ioOpCallbackFactory,
metadataPageManagerFactory, vbcProvider, ioSchedulerProvider, mergePolicyFactory,
mergePolicyProperties, true, valueProviderFactories, rTreePolicyType, linearizeCmpFactory,
rtreeFields, isPointMBR, btreeCompFactories);
} else {
return new ExternalRTreeLocalResourceFactory(storageManager, typeTraits, rtreeCmpFactories,
filterTypeTraits, filterCmpFactories, secondaryFilterFields, opTrackerFactory, ioOpCallbackFactory,
metadataPageManagerFactory, ioSchedulerProvider, mergePolicyFactory, mergePolicyProperties, true,
btreeCompFactories, valueProviderFactories, rTreePolicyType, linearizeCmpFactory, rtreeFields,
new int[] { numNestedSecondaryKeyFields }, isPointMBR,
mdProvider.getStorageProperties().getBloomFilterFalsePositiveRate());
}
}
private static IBinaryComparatorFactory[] getComparatorFactoriesForDeletedKeyBTree(
ITypeTraits[] secondaryTypeTraits, IBinaryComparatorFactory[] primaryComparatorFactories,
IBinaryComparatorFactory[] secondaryComparatorFactories) {
IBinaryComparatorFactory[] btreeCompFactories = new IBinaryComparatorFactory[secondaryTypeTraits.length];
int i = 0;
for (; i < secondaryComparatorFactories.length; i++) {
btreeCompFactories[i] = secondaryComparatorFactories[i];
}
for (int j = 0; i < secondaryTypeTraits.length; i++, j++) {
btreeCompFactories[i] = primaryComparatorFactories[j];
}
return btreeCompFactories;
}
private static ITypeTraits[] getTypeTraits(MetadataProvider metadataProvider, Dataset dataset, Index index,
ARecordType recordType, ARecordType metaType) throws AlgebricksException {
ITypeTraitProvider ttProvider = metadataProvider.getStorageComponentProvider().getTypeTraitProvider();
List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
int numSecondaryKeys = secondaryKeyFields.size();
int numPrimaryKeys = dataset.getPrimaryKeys().size();
ITypeTraits[] primaryTypeTraits = dataset.getPrimaryTypeTraits(metadataProvider, recordType, metaType);
if (numSecondaryKeys != 1) {
throw new AsterixException("Cannot use " + numSecondaryKeys + " fields as a key for the R-tree index. "
+ "There can be only one field as a key for the R-tree index.");
}
ARecordType sourceType;
List<Integer> keySourceIndicators = index.getKeyFieldSourceIndicators();
if (keySourceIndicators == null || keySourceIndicators.get(0) == 0) {
sourceType = recordType;
} else {
sourceType = metaType;
}
Pair<IAType, Boolean> spatialTypePair = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0),
secondaryKeyFields.get(0), sourceType);
IAType spatialType = spatialTypePair.first;
if (spatialType == null) {
throw new AsterixException("Could not find field " + secondaryKeyFields.get(0) + " in the schema.");
}
int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
int numNestedSecondaryKeyFields = numDimensions * 2;
ITypeTraits[] secondaryTypeTraits = new ITypeTraits[numNestedSecondaryKeyFields + numPrimaryKeys];
IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
for (int i = 0; i < numNestedSecondaryKeyFields; i++) {
secondaryTypeTraits[i] = ttProvider.getTypeTrait(nestedKeyType);
}
for (int i = 0; i < numPrimaryKeys; i++) {
secondaryTypeTraits[numNestedSecondaryKeyFields + i] = primaryTypeTraits[i];
}
return secondaryTypeTraits;
}
private static IBinaryComparatorFactory[] getCmpFactories(MetadataProvider metadataProvider, Index index,
ARecordType recordType, ARecordType metaType) throws AlgebricksException {
IBinaryComparatorFactoryProvider cmpFactoryProvider =
metadataProvider.getStorageComponentProvider().getComparatorFactoryProvider();
List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
int numSecondaryKeys = secondaryKeyFields.size();
if (numSecondaryKeys != 1) {
throw new AsterixException("Cannot use " + numSecondaryKeys + " fields as a key for the R-tree index. "
+ "There can be only one field as a key for the R-tree index.");
}
List<Integer> keySourceIndicators = index.getKeyFieldSourceIndicators();
ARecordType sourceType;
if (keySourceIndicators == null || keySourceIndicators.get(0) == 0) {
sourceType = recordType;
} else {
sourceType = metaType;
}
Pair<IAType, Boolean> spatialTypePair = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0),
secondaryKeyFields.get(0), sourceType);
IAType spatialType = spatialTypePair.first;
if (spatialType == null) {
throw new AsterixException("Could not find field " + secondaryKeyFields.get(0) + " in the schema.");
}
IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
int numNestedSecondaryKeyFields = numDimensions * 2;
IBinaryComparatorFactory[] secondaryComparatorFactories =
new IBinaryComparatorFactory[numNestedSecondaryKeyFields];
for (int i = 0; i < numNestedSecondaryKeyFields; i++) {
secondaryComparatorFactories[i] = cmpFactoryProvider.getBinaryComparatorFactory(nestedKeyType, true);
}
return secondaryComparatorFactories;
}
}
| |
/*
* Copyright (C) 2013 The Android Open Source Project
* Copyright (C) 2014 Recruit Marketing Partners Co.,Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.fly.bmark2.utils;
import android.content.Context;
import android.database.DataSetObservable;
import android.database.DataSetObserver;
import android.os.Build;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.Filter;
import android.widget.Filterable;
import android.widget.FrameLayout;
import android.widget.ListAdapter;
import android.widget.WrapperListAdapter;
import java.util.ArrayList;
/**
* A {@link com.flyfirefly.app.utils.GridView} that supports adding header rows and footer rows in a very
* similar way to {@link android.widget.ListView}. See
* {@link HeaderFooterGridView#addHeaderView(android.view.View, Object, boolean)} and
* {@link HeaderFooterGridView#addFooterView(android.view.View, Object, boolean)}.
* <p/>
* This source code is based from
* http://grepcode.com/file/repository.grepcode.com
* /java/ext/com.google.android/android
* -apps/4.4_r1/com/android/photos/views/HeaderGridView.java
*/
public class GridView extends android.widget.GridView
{
private static final String TAG = "HeaderFooterGridView";
private ArrayList<FixedViewInfo> mHeaderViewInfos = new ArrayList<FixedViewInfo>();
private ArrayList<FixedViewInfo> mFooterViewInfos = new ArrayList<FixedViewInfo>();
private int mRequestedNumColumns;
private int mNumColmuns = 1;
public GridView(Context context)
{
super(context);
initHeaderGridView();
}
public GridView(Context context, AttributeSet attrs)
{
super(context, attrs);
initHeaderGridView();
}
public GridView(Context context, AttributeSet attrs, int defStyle)
{
super(context, attrs, defStyle);
initHeaderGridView();
}
private void initHeaderGridView()
{
super.setClipChildren(false);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec)
{
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
if (mRequestedNumColumns != AUTO_FIT)
{
mNumColmuns = mRequestedNumColumns;
}
if (mNumColmuns <= 0)
{
mNumColmuns = 1;
}
ListAdapter adapter = getAdapter();
if (adapter != null && adapter instanceof HeaderFooterViewGridAdapter)
{
((HeaderFooterViewGridAdapter) adapter).setNumColumns(getNumColumns());
}
}
@Override
public void setClipChildren(boolean clipChildren)
{
// Ignore, since the header rows depend on not being clipped
}
/**
* Add a fixed view to appear at the top of the grid. If addHeaderView is
* called more than once, the views will appear in the order they were
* added. Views added using this call can take focus if they want.
* <p/>
* NOTE: Call this before calling setAdapter. This is so
* HeaderFooterGridView can wrap the supplied cursor with one that will also
* account for header views.
*
* @param v The view to add.
* @param data Data to associate with this view
* @param isSelectable whether the item is selectable
*/
public void addHeaderView(View v, Object data, boolean isSelectable)
{
ListAdapter adapter = getAdapter();
if (adapter != null && !(adapter instanceof HeaderFooterViewGridAdapter))
{
throw new IllegalStateException(
"Cannot add header view to grid -- setAdapter has already been called.");
}
FixedViewInfo info = new FixedViewInfo();
FrameLayout fl = new FullWidthFixedViewLayout(getContext());
fl.addView(v);
info.view = v;
info.viewContainer = fl;
info.data = data;
info.isSelectable = isSelectable;
mHeaderViewInfos.add(info);
// in the case of re-adding a header view, or adding one later on,
// we need to notify the observer
if (adapter != null)
{
((HeaderFooterViewGridAdapter) adapter).notifyDataSetChanged();
}
}
/**
* Add a fixed view to appear at the top of the grid. If addHeaderView is
* called more than once, the views will appear in the order they were
* added. Views added using this call can take focus if they want.
* <p/>
* NOTE: Call this before calling setAdapter. This is so
* HeaderFooterGridView can wrap the supplied cursor with one that will also
* account for header views.
*
* @param v The view to add.
*/
public void addHeaderView(View v)
{
addHeaderView(v, null, true);
}
/**
* Add a fixed view to appear at the bottom of the grid. If addFooterView is
* called more than once, the views will appear in the order they were
* added. Views added using this call can take focus if they want.
* <p/>
* NOTE: Call this before calling setAdapter. This is so
* HeaderFooterGridView can wrap the supplied cursor with one that will also
* account for header views.
*
* @param v The view to add.
* @param data Data to associate with this view
* @param isSelectable whether the item is selectable
*/
public void addFooterView(View v, Object data, boolean isSelectable)
{
ListAdapter adapter = getAdapter();
if (adapter != null && !(adapter instanceof HeaderFooterViewGridAdapter))
{
throw new IllegalStateException(
"Cannot add footer view to grid -- setAdapter has already been called.");
}
FixedViewInfo info = new FixedViewInfo();
FrameLayout fl = new FullWidthFixedViewLayout(getContext());
fl.addView(v);
info.view = v;
info.viewContainer = fl;
info.data = data;
info.isSelectable = isSelectable;
mFooterViewInfos.add(info);
// in the case of re-adding a header view, or adding one later on,
// we need to notify the observer
if (adapter != null)
{
((HeaderFooterViewGridAdapter) adapter).notifyDataSetChanged();
}
}
/**
* Add a fixed view to appear at the bottom of the grid. If addFooterView is
* called more than once, the views will appear in the order they were
* added. Views added using this call can take focus if they want.
* <p/>
* NOTE: Call this before calling setAdapter. This is so
* HeaderFooterGridView can wrap the supplied cursor with one that will also
* account for header views.
*
* @param v The view to add.
*/
public void addFooterView(View v)
{
addFooterView(v, null, true);
}
public int getHeaderViewCount()
{
return mHeaderViewInfos.size();
}
public int getFooterViewCount()
{
return mFooterViewInfos.size();
}
/**
* Removes a previously-added header view.
*
* @param v The view to remove
* @return true if the view was removed, false if the view was not a header
* view
*/
public boolean removeHeaderView(View v)
{
if (mHeaderViewInfos.size() > 0)
{
boolean result = false;
ListAdapter adapter = getAdapter();
if (adapter != null && ((HeaderFooterViewGridAdapter) adapter).removeHeader(v))
{
result = true;
}
removeFixedViewInfo(v, mHeaderViewInfos);
return result;
}
return false;
}
/**
* Removes a previously-added footer view.
*
* @param v The view to remove
* @return true if the view was removed, false if the view was not a footer
* view
*/
public boolean removeFooterView(View v)
{
if (mFooterViewInfos.size() > 0)
{
boolean result = false;
ListAdapter adapter = getAdapter();
if (adapter != null && ((HeaderFooterViewGridAdapter) adapter).removeFooter(v))
{
result = true;
}
removeFixedViewInfo(v, mFooterViewInfos);
return result;
}
return false;
}
private void removeFixedViewInfo(View v, ArrayList<FixedViewInfo> where)
{
int len = where.size();
for (int i = 0; i < len; ++i)
{
FixedViewInfo info = where.get(i);
if (info.view == v)
{
where.remove(i);
break;
}
}
}
@Override
public void setAdapter(ListAdapter adapter)
{
if (mHeaderViewInfos.size() > 0 || mFooterViewInfos.size() > 0)
{
HeaderFooterViewGridAdapter hadapter = new HeaderFooterViewGridAdapter(mHeaderViewInfos, mFooterViewInfos, adapter);
int numColumns = getNumColumns();
if (numColumns > 1)
{
hadapter.setNumColumns(numColumns);
}
super.setAdapter(hadapter);
}
else
{
super.setAdapter(adapter);
}
}
@Override
public int getNumColumns()
{
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB)
{
return super.getNumColumns();
}
// Return value for less than Honeycomb.
return mNumColmuns;
}
@Override
public void setNumColumns(int numColumns)
{
super.setNumColumns(numColumns);
// Store specified value for less than Honeycomb.
mRequestedNumColumns = numColumns;
}
/**
* A class that represents a fixed view in a list, for example a header at
* the top or a footer at the bottom.
*/
private static class FixedViewInfo
{
/**
* The view to add to the grid
*/
public View view;
public ViewGroup viewContainer;
/**
* The data backing the view. This is returned from
* {@link android.widget.ListAdapter#getItem(int)}.
*/
public Object data;
/**
* <code>true</code> if the fixed view should be selectable in the grid
*/
public boolean isSelectable;
}
/**
* ListAdapter used when a HeaderFooterGridView has header views. This
* ListAdapter wraps another one and also keeps track of the header views
* and their associated data objects.
* <p/>
* This is intended as a base class; you will probably not need to use this
* class directly in your own code.
*/
private static class HeaderFooterViewGridAdapter implements WrapperListAdapter, Filterable
{
// This is used to notify the container of updates relating to number of
// columns
// or headers changing, which changes the number of placeholders needed
private final DataSetObservable mDataSetObservable = new DataSetObservable();
private final ListAdapter mAdapter;
private final boolean mIsFilterable;
// This ArrayList is assumed to NOT be null.
ArrayList<FixedViewInfo> mHeaderViewInfos;
ArrayList<FixedViewInfo> mFooterViewInfos;
boolean mAreAllFixedViewsSelectable;
private int mNumColumns = 1;
public HeaderFooterViewGridAdapter(ArrayList<FixedViewInfo> headerViewInfos, ArrayList<FixedViewInfo> footerViewInfos, ListAdapter adapter)
{
mAdapter = adapter;
mIsFilterable = adapter instanceof Filterable;
if (headerViewInfos == null)
{
throw new IllegalArgumentException("headerViewInfos cannot be null");
}
if (footerViewInfos == null)
{
throw new IllegalArgumentException("footerViewInfos cannot be null");
}
mHeaderViewInfos = headerViewInfos;
mFooterViewInfos = footerViewInfos;
mAreAllFixedViewsSelectable = (areAllListInfosSelectable(mHeaderViewInfos) && areAllListInfosSelectable(mFooterViewInfos));
}
public int getHeadersCount()
{
return mHeaderViewInfos.size();
}
public int getFootersCount()
{
return mFooterViewInfos.size();
}
@Override
public boolean isEmpty()
{
return (mAdapter == null || mAdapter.isEmpty()) && getHeadersCount() == 0 && getFootersCount() == 0;
}
public void setNumColumns(int numColumns)
{
if (numColumns < 1)
{
throw new IllegalArgumentException("Number of columns must be 1 or more");
}
if (mNumColumns != numColumns)
{
mNumColumns = numColumns;
notifyDataSetChanged();
}
}
private boolean areAllListInfosSelectable(ArrayList<FixedViewInfo> infos)
{
if (infos != null)
{
for (FixedViewInfo info : infos)
{
if (!info.isSelectable)
{
return false;
}
}
}
return true;
}
public boolean removeHeader(View v)
{
for (int i = 0; i < mHeaderViewInfos.size(); i++)
{
FixedViewInfo info = mHeaderViewInfos.get(i);
if (info.view == v)
{
mHeaderViewInfos.remove(i);
mAreAllFixedViewsSelectable = (areAllListInfosSelectable(mHeaderViewInfos) && areAllListInfosSelectable(mFooterViewInfos));
mDataSetObservable.notifyChanged();
return true;
}
}
return false;
}
public boolean removeFooter(View v)
{
for (int i = 0; i < mFooterViewInfos.size(); i++)
{
FixedViewInfo info = mFooterViewInfos.get(i);
if (info.view == v)
{
mFooterViewInfos.remove(i);
mAreAllFixedViewsSelectable = (areAllListInfosSelectable(mHeaderViewInfos) && areAllListInfosSelectable(mFooterViewInfos));
mDataSetObservable.notifyChanged();
return true;
}
}
return false;
}
@Override
public int getCount()
{
if (mAdapter != null)
{
final int lastRowItemCount = (mAdapter.getCount() % mNumColumns);
final int emptyItemCount = ((lastRowItemCount == 0) ? 0 : mNumColumns - lastRowItemCount);
return (getHeadersCount() * mNumColumns) + mAdapter.getCount() + emptyItemCount + (getFootersCount() * mNumColumns);
}
else
{
return (getHeadersCount() * mNumColumns) + (getFootersCount() * mNumColumns);
}
}
@Override
public boolean areAllItemsEnabled()
{
if (mAdapter != null)
{
return mAreAllFixedViewsSelectable && mAdapter.areAllItemsEnabled();
}
else
{
return true;
}
}
@Override
public boolean isEnabled(int position)
{
// Header (negative positions will throw an
// ArrayIndexOutOfBoundsException)
int numHeadersAndPlaceholders = getHeadersCount() * mNumColumns;
if (position < numHeadersAndPlaceholders)
{
return (position % mNumColumns == 0)
&& mHeaderViewInfos.get(position / mNumColumns).isSelectable;
}
// Adapter
if (position < numHeadersAndPlaceholders + mAdapter.getCount())
{
final int adjPosition = position - numHeadersAndPlaceholders;
int adapterCount = 0;
if (mAdapter != null)
{
adapterCount = mAdapter.getCount();
if (adjPosition < adapterCount)
{
return mAdapter.isEnabled(adjPosition);
}
}
}
// Empty item
final int lastRowItemCount = (mAdapter.getCount() % mNumColumns);
final int emptyItemCount = ((lastRowItemCount == 0) ? 0 : mNumColumns - lastRowItemCount);
if (position < numHeadersAndPlaceholders + mAdapter.getCount() + emptyItemCount)
{
return false;
}
// Footer
int numFootersAndPlaceholders = getFootersCount() * mNumColumns;
if (position < numHeadersAndPlaceholders + mAdapter.getCount() + emptyItemCount + numFootersAndPlaceholders)
{
return (position % mNumColumns == 0)
&& mFooterViewInfos.get((position - numHeadersAndPlaceholders - mAdapter.getCount() - emptyItemCount) / mNumColumns).isSelectable;
}
throw new ArrayIndexOutOfBoundsException(position);
}
@Override
public Object getItem(int position)
{
// Header (negative positions will throw an
// ArrayIndexOutOfBoundsException)
int numHeadersAndPlaceholders = getHeadersCount() * mNumColumns;
if (position < numHeadersAndPlaceholders)
{
if (position % mNumColumns == 0)
{
return mHeaderViewInfos.get(position / mNumColumns).data;
}
return null;
}
// Adapter
if (position < numHeadersAndPlaceholders + mAdapter.getCount())
{
final int adjPosition = position - numHeadersAndPlaceholders;
int adapterCount = 0;
if (mAdapter != null)
{
adapterCount = mAdapter.getCount();
if (adjPosition < adapterCount)
{
return mAdapter.getItem(adjPosition);
}
}
}
// Empty item
final int lastRowItemCount = (mAdapter.getCount() % mNumColumns);
final int emptyItemCount = ((lastRowItemCount == 0) ? 0 : mNumColumns - lastRowItemCount);
if (position < numHeadersAndPlaceholders + mAdapter.getCount() + emptyItemCount)
{
return null;
}
// Footer
int numFootersAndPlaceholders = getFootersCount() * mNumColumns;
if (position < numHeadersAndPlaceholders + mAdapter.getCount() + emptyItemCount + numFootersAndPlaceholders)
{
if (position % mNumColumns == 0)
{
return mFooterViewInfos.get((position - numHeadersAndPlaceholders - mAdapter.getCount() - emptyItemCount) / mNumColumns).data;
}
}
throw new ArrayIndexOutOfBoundsException(position);
}
@Override
public long getItemId(int position)
{
int numHeadersAndPlaceholders = getHeadersCount() * mNumColumns;
if (mAdapter != null)
{
if (position >= numHeadersAndPlaceholders && position < numHeadersAndPlaceholders + mAdapter.getCount())
{
int adjPosition = position - numHeadersAndPlaceholders;
int adapterCount = mAdapter.getCount();
if (adjPosition < adapterCount)
{
return mAdapter.getItemId(adjPosition);
}
}
}
return -1;
}
@Override
public boolean hasStableIds()
{
if (mAdapter != null)
{
return mAdapter.hasStableIds();
}
return false;
}
@Override
public View getView(int position, View convertView, ViewGroup parent)
{
// Header (negative positions will throw an
// ArrayIndexOutOfBoundsException)
int numHeadersAndPlaceholders = getHeadersCount() * mNumColumns;
if (position < numHeadersAndPlaceholders)
{
View headerViewContainer = mHeaderViewInfos
.get(position / mNumColumns).viewContainer;
if (position % mNumColumns == 0)
{
return headerViewContainer;
}
else
{
convertView = new View(parent.getContext());
// We need to do this because GridView uses the height of
// the last item
// in a row to determine the height for the entire row.
convertView.setVisibility(View.INVISIBLE);
convertView.setMinimumHeight(headerViewContainer.getHeight());
return convertView;
}
}
// Adapter
if (position < numHeadersAndPlaceholders + mAdapter.getCount())
{
final int adjPosition = position - numHeadersAndPlaceholders;
int adapterCount = 0;
if (mAdapter != null)
{
adapterCount = mAdapter.getCount();
if (adjPosition < adapterCount)
{
return mAdapter.getView(adjPosition, convertView, parent);
}
}
}
// Empty item
final int lastRowItemCount = (mAdapter.getCount() % mNumColumns);
final int emptyItemCount = ((lastRowItemCount == 0) ? 0 : mNumColumns - lastRowItemCount);
if (position < numHeadersAndPlaceholders + mAdapter.getCount() + emptyItemCount)
{
// We need to do this because GridView uses the height of the
// last item
// in a row to determine the height for the entire row.
// TODO Current implementation may not be enough in the case of
// 3 or more column. May need to be careful on the INVISIBLE
// View height.
convertView = mAdapter.getView(mAdapter.getCount() - 1, convertView, parent);
convertView.setVisibility(View.INVISIBLE);
return convertView;
}
// Footer
int numFootersAndPlaceholders = getFootersCount() * mNumColumns;
if (position < numHeadersAndPlaceholders + mAdapter.getCount() + emptyItemCount + numFootersAndPlaceholders)
{
View footerViewContainer = mFooterViewInfos
.get((position - numHeadersAndPlaceholders - mAdapter.getCount() - emptyItemCount) / mNumColumns).viewContainer;
if (position % mNumColumns == 0)
{
footerViewContainer.setPadding(0, 130, 0, 0);
return footerViewContainer;
}
else
{
convertView = new View(parent.getContext());
// We need to do this because GridView uses the height of
// the last item
// in a row to determine the height for the entire row.
convertView.setVisibility(View.INVISIBLE);
convertView.setMinimumHeight(footerViewContainer.getHeight());
return convertView;
}
}
throw new ArrayIndexOutOfBoundsException(position);
}
@Override
public int getItemViewType(int position)
{
int numHeadersAndPlaceholders = getHeadersCount() * mNumColumns;
if (position < numHeadersAndPlaceholders && (position % mNumColumns != 0))
{
// Placeholders get the last view type number
return mAdapter != null ? mAdapter.getViewTypeCount() : 1;
}
if (mAdapter != null && position >= numHeadersAndPlaceholders && position < numHeadersAndPlaceholders + mAdapter.getCount())
{
int adjPosition = position - numHeadersAndPlaceholders;
int adapterCount = mAdapter.getCount();
if (adjPosition < adapterCount)
{
return mAdapter.getItemViewType(adjPosition);
}
}
int numFootersAndPlaceholders = getFootersCount() * mNumColumns;
if (mAdapter != null && position < numHeadersAndPlaceholders + mAdapter.getCount() + numFootersAndPlaceholders)
{
return mAdapter != null ? mAdapter.getViewTypeCount() : 1;
}
return AdapterView.ITEM_VIEW_TYPE_HEADER_OR_FOOTER;
}
@Override
public int getViewTypeCount()
{
if (mAdapter != null)
{
return mAdapter.getViewTypeCount() + 1;
}
return 2;
}
@Override
public void registerDataSetObserver(DataSetObserver observer)
{
mDataSetObservable.registerObserver(observer);
if (mAdapter != null)
{
mAdapter.registerDataSetObserver(observer);
}
}
@Override
public void unregisterDataSetObserver(DataSetObserver observer)
{
mDataSetObservable.unregisterObserver(observer);
if (mAdapter != null)
{
mAdapter.unregisterDataSetObserver(observer);
}
}
@Override
public Filter getFilter()
{
if (mIsFilterable)
{
return ((Filterable) mAdapter).getFilter();
}
return null;
}
@Override
public ListAdapter getWrappedAdapter()
{
return mAdapter;
}
public void notifyDataSetChanged()
{
mDataSetObservable.notifyChanged();
}
}
private class FullWidthFixedViewLayout extends FrameLayout
{
public FullWidthFixedViewLayout(Context context)
{
super(context);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec)
{
int targetWidth = GridView.this.getMeasuredWidth()
- GridView.this.getPaddingLeft()
- GridView.this.getPaddingRight();
widthMeasureSpec = MeasureSpec.makeMeasureSpec(targetWidth,
MeasureSpec.getMode(widthMeasureSpec));
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
}
}
| |
/*
* NOTE: This copyright does *not* cover user programs that use HQ
* program services by normal system calls through the application
* program interfaces provided as part of the Hyperic Plug-in Development
* Kit or the Hyperic Client Development Kit - this is merely considered
* normal use of the program, and does *not* fall under the heading of
* "derived work".
*
* Copyright (C) [2004, 2005, 2006], Hyperic, Inc.
* This file is part of HQ.
*
* HQ is free software; you can redistribute it and/or modify
* it under the terms version 2 of the GNU General Public License as
* published by the Free Software Foundation. This program is distributed
* in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA.
*/
package org.hyperic.hq.plugin.openldap;
import org.hyperic.hq.product.MeasurementPlugin;
import org.hyperic.hq.product.Metric;
import org.hyperic.hq.product.MetricUnreachableException;
import org.hyperic.hq.product.MetricInvalidException;
import org.hyperic.hq.product.MetricNotFoundException;
import org.hyperic.hq.product.MetricValue;
import org.hyperic.hq.product.PluginException;
import javax.naming.Context;
import javax.naming.directory.Attribute;
import javax.naming.directory.Attributes;
import javax.naming.directory.DirContext;
import javax.naming.directory.InitialDirContext;
import javax.naming.directory.SearchControls;
import javax.naming.directory.SearchResult;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import java.util.Collection;
import java.util.Hashtable;
import java.util.Properties;
import java.util.TreeSet;
import javax.naming.CommunicationException;
import org.apache.commons.logging.Log;
import org.hyperic.hq.product.PluginManager;
public class OpenLDAPMeasurementPlugin
extends MeasurementPlugin {
private DirContext ctx = null;
private boolean hasMonitoringEnabled = false;
private final Log log = getLog();
public DirContext getDirContext(Properties props) throws NamingException {
if (this.ctx == null) {
synchronized (this) {
if (this.ctx == null) {
log.debug("[getDirContext] creating new connection");
Collection rtn = new TreeSet();
Hashtable ldapEnv = new Hashtable();
String ldapDriver = props.getProperty("ldapDriver"),
ldapHostURL = props.getProperty("ldapHostURL"),
ldapAuthType = props.getProperty("ldapAuthType"),
ldapPasswd = props.getProperty("ldapPasswd"),
ldapTreePathToDN = props.getProperty("ldapTreePathToDN");
ldapTreePathToDN = (ldapTreePathToDN == null)
? "" : ldapTreePathToDN;
ldapPasswd = (ldapPasswd == null) ? "" : ldapPasswd;
ldapPasswd = (ldapPasswd.matches("^\\s*$")) ? "" : ldapPasswd;
ldapEnv.put(Context.INITIAL_CONTEXT_FACTORY, ldapDriver);
ldapEnv.put(Context.PROVIDER_URL, ldapHostURL);
ldapEnv.put(Context.SECURITY_AUTHENTICATION, ldapAuthType);
ldapEnv.put(Context.SECURITY_PRINCIPAL, ldapTreePathToDN);
ldapEnv.put(Context.SECURITY_CREDENTIALS, ldapPasswd);
this.ctx = new InitialDirContext(ldapEnv);
}
}
}
return this.ctx;
}
@Override
public MetricValue getValue(Metric metric)
throws PluginException,
MetricUnreachableException,
MetricInvalidException,
MetricNotFoundException {
// will look like "generic:Type=GenericService,option1=option1,option2=option2"
String objectName = metric.getObjectName();
// will look like "Availability"
// -OR-
// "cn=PDU,cn=Statistics,cn=Monitor:monitorCounter"
String alias = metric.getAttributeName();
MetricValue res;
if (metric.isAvail()) {
try {
hasMonitoringEnabled = hasMonitoringEnabled(metric);
res = new MetricValue(Metric.AVAIL_UP, System.currentTimeMillis());
} catch (NamingException ex) {
res = new MetricValue(Metric.AVAIL_DOWN, System.currentTimeMillis());
hasMonitoringEnabled = false;
this.ctx = null; // reset connection [HHQ-4986]
log.debug("[getValue] error:" + ex, ex);
}
} else {
try {
if (alias.equalsIgnoreCase("connectiontimems")) {
res = getConnTimeMetric(metric);
} else {
if (hasMonitoringEnabled) {
String[] attrs = alias.split(":");
if (attrs[0] == null || attrs[1] == null) {
throw new MetricNotFoundException("bad template format");
}
res = getMetric(metric, attrs[0], attrs[1]);
} else {
res = new MetricValue(MetricValue.NONE, System.currentTimeMillis());
}
}
} catch (CommunicationException ex) {
log.debug("[getValue] error:" + ex, ex);
this.ctx = null; // reset connection [HHQ-4986]
throw new MetricNotFoundException(ex.getMessage(), ex);
} catch (NamingException ex) {
log.debug("[getValue] error:" + ex, ex);
throw new MetricNotFoundException("Service " + objectName + ", " + alias + " not found", ex);
}
}
return res;
}
private MetricValue getConnTimeMetric(Metric metric)
throws NamingException {
long start = System.currentTimeMillis();
hasMonitoringEnabled(metric);
long now = System.currentTimeMillis();
return new MetricValue((now - start), now);
}
private MetricValue getMetric(Metric metric, String tree, String attr)
throws MetricNotFoundException, NamingException {
NamingEnumeration enumer = null;
try {
String[] a = {attr};
SearchControls cons = new SearchControls();
cons.setSearchScope(SearchControls.OBJECT_SCOPE);
cons.setReturningAttributes(a);
enumer = getDirContext(metric.getProperties()).search(tree,
"(&(objectClass=*))", cons);
while (enumer.hasMore()) {
SearchResult searchresult = (SearchResult) enumer.next();
Attributes attrs = searchresult.getAttributes();
Attribute val;
if (null != (val = attrs.get(attr))) {
return new MetricValue(new Double(val.get().toString()),
System.currentTimeMillis());
}
}
throw new MetricNotFoundException("");
} finally {
if (enumer != null) {
enumer.close();
}
}
}
/**
* @return true = monitoring is enabled
* @return false = monitoring is not enabled
* @exception NamingException no conection
*/
private boolean hasMonitoringEnabled(Metric metric)
throws NamingException {
NamingEnumeration enumer = null,
enumerx = null,
enumery = null;
boolean res = false;
try {
String[] a = {"monitorContext"};
SearchControls cons = new SearchControls();
cons.setSearchScope(SearchControls.OBJECT_SCOPE);
cons.setReturningAttributes(a);
enumer = getDirContext(metric.getProperties()).search("", "(&(objectClass=*))", cons);
while (enumer.hasMore() && !res) {
SearchResult searchresult = (SearchResult) enumer.next();
Attributes attrs = searchresult.getAttributes();
enumerx = attrs.getIDs();
while (enumerx.hasMore()) {
String id = (String) enumerx.next();
Attribute attr = attrs.get(id);
res = true;
}
}
} finally {
if (enumer != null) {
enumer.close();
}
if (enumerx != null) {
enumerx.close();
}
if (enumery != null) {
enumery.close();
}
}
log.debug("[hasMonitoringEnabled] res=" + res + " metric:" + metric);
return res;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.node;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.Version;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.*;
import org.elasticsearch.common.network.NetworkUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.transport.TransportAddressSerializers;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.net.InetAddress;
import java.util.Map;
import static org.elasticsearch.common.transport.TransportAddressSerializers.addressToStream;
/**
* A discovery node represents a node that is part of the cluster.
*/
public class DiscoveryNode implements Streamable, ToXContent {
/**
* Minimum version of a node to communicate with. This version corresponds to the minimum compatibility version
* of the current elasticsearch major version.
*/
public static final Version MINIMUM_DISCOVERY_NODE_VERSION = Version.CURRENT.minimumCompatibilityVersion();
public static boolean localNode(Settings settings) {
if (settings.get("node.local") != null) {
return settings.getAsBoolean("node.local", false);
}
if (settings.get("node.mode") != null) {
String nodeMode = settings.get("node.mode");
if ("local".equals(nodeMode)) {
return true;
} else if ("network".equals(nodeMode)) {
return false;
} else {
throw new IllegalArgumentException("unsupported node.mode [" + nodeMode + "]. Should be one of [local, network].");
}
}
return false;
}
public static boolean nodeRequiresLocalStorage(Settings settings) {
return !(settings.getAsBoolean("node.client", false) || (!settings.getAsBoolean("node.data", true) && !settings.getAsBoolean("node.master", true)));
}
public static boolean clientNode(Settings settings) {
String client = settings.get("node.client");
return Booleans.isExplicitTrue(client);
}
public static boolean masterNode(Settings settings) {
String master = settings.get("node.master");
if (master == null) {
return !clientNode(settings);
}
return Booleans.isExplicitTrue(master);
}
public static boolean dataNode(Settings settings) {
String data = settings.get("node.data");
if (data == null) {
return !clientNode(settings);
}
return Booleans.isExplicitTrue(data);
}
public static final ImmutableList<DiscoveryNode> EMPTY_LIST = ImmutableList.of();
private String nodeName = "";
private String nodeId;
private String hostName;
private String hostAddress;
private TransportAddress address;
private ImmutableMap<String, String> attributes;
private Version version = Version.CURRENT;
DiscoveryNode() {
}
/**
* Creates a new {@link DiscoveryNode}
* <p>
* <b>Note:</b> if the version of the node is unknown {@link #MINIMUM_DISCOVERY_NODE_VERSION} should be used.
* it corresponds to the minimum version this elasticsearch version can communicate with. If a higher version is used
* the node might not be able to communicate with the remove node. After initial handshakes node versions will be discovered
* and updated.
* </p>
*
* @param nodeId the nodes unique id.
* @param address the nodes transport address
* @param version the version of the node.
*/
public DiscoveryNode(String nodeId, TransportAddress address, Version version) {
this("", nodeId, address, ImmutableMap.<String, String>of(), version);
}
/**
* Creates a new {@link DiscoveryNode}
* <p>
* <b>Note:</b> if the version of the node is unknown {@link #MINIMUM_DISCOVERY_NODE_VERSION} should be used.
* it corresponds to the minimum version this elasticsearch version can communicate with. If a higher version is used
* the node might not be able to communicate with the remove node. After initial handshakes node versions will be discovered
* and updated.
* </p>
*
* @param nodeName the nodes name
* @param nodeId the nodes unique id.
* @param address the nodes transport address
* @param attributes node attributes
* @param version the version of the node.
*/
public DiscoveryNode(String nodeName, String nodeId, TransportAddress address, Map<String, String> attributes, Version version) {
this(nodeName, nodeId, NetworkUtils.getLocalHost().getHostName(), NetworkUtils.getLocalHost().getHostAddress(), address, attributes, version);
}
/**
* Creates a new {@link DiscoveryNode}
* <p>
* <b>Note:</b> if the version of the node is unknown {@link #MINIMUM_DISCOVERY_NODE_VERSION} should be used.
* it corresponds to the minimum version this elasticsearch version can communicate with. If a higher version is used
* the node might not be able to communicate with the remove node. After initial handshakes node versions will be discovered
* and updated.
* </p>
*
* @param nodeName the nodes name
* @param nodeId the nodes unique id.
* @param hostName the nodes hostname
* @param hostAddress the nodes host address
* @param address the nodes transport address
* @param attributes node attributes
* @param version the version of the node.
*/
public DiscoveryNode(String nodeName, String nodeId, String hostName, String hostAddress, TransportAddress address, Map<String, String> attributes, Version version) {
if (nodeName != null) {
this.nodeName = nodeName.intern();
}
ImmutableMap.Builder<String, String> builder = ImmutableMap.builder();
for (Map.Entry<String, String> entry : attributes.entrySet()) {
builder.put(entry.getKey().intern(), entry.getValue().intern());
}
this.attributes = builder.build();
this.nodeId = nodeId.intern();
this.hostName = hostName.intern();
this.hostAddress = hostAddress.intern();
this.address = address;
this.version = version;
}
/**
* Should this node form a connection to the provided node.
*/
public boolean shouldConnectTo(DiscoveryNode otherNode) {
if (clientNode() && otherNode.clientNode()) {
return false;
}
return true;
}
/**
* The address that the node can be communicated with.
*/
public TransportAddress address() {
return address;
}
/**
* The address that the node can be communicated with.
*/
public TransportAddress getAddress() {
return address();
}
/**
* The unique id of the node.
*/
public String id() {
return nodeId;
}
/**
* The unique id of the node.
*/
public String getId() {
return id();
}
/**
* The name of the node.
*/
public String name() {
return this.nodeName;
}
/**
* The name of the node.
*/
public String getName() {
return name();
}
/**
* The node attributes.
*/
public ImmutableMap<String, String> attributes() {
return this.attributes;
}
/**
* The node attributes.
*/
public ImmutableMap<String, String> getAttributes() {
return attributes();
}
/**
* Should this node hold data (shards) or not.
*/
public boolean dataNode() {
String data = attributes.get("data");
if (data == null) {
return !clientNode();
}
return Booleans.parseBooleanExact(data);
}
/**
* Should this node hold data (shards) or not.
*/
public boolean isDataNode() {
return dataNode();
}
/**
* Is the node a client node or not.
*/
public boolean clientNode() {
String client = attributes.get("client");
return client != null && Booleans.parseBooleanExact(client);
}
public boolean isClientNode() {
return clientNode();
}
/**
* Can this node become master or not.
*/
public boolean masterNode() {
String master = attributes.get("master");
if (master == null) {
return !clientNode();
}
return Booleans.parseBooleanExact(master);
}
/**
* Can this node become master or not.
*/
public boolean isMasterNode() {
return masterNode();
}
public Version version() {
return this.version;
}
public String getHostName() {
return this.hostName;
}
public String getHostAddress() {
return this.hostAddress;
}
public Version getVersion() {
return this.version;
}
public static DiscoveryNode readNode(StreamInput in) throws IOException {
DiscoveryNode node = new DiscoveryNode();
node.readFrom(in);
return node;
}
@Override
public void readFrom(StreamInput in) throws IOException {
nodeName = in.readString().intern();
nodeId = in.readString().intern();
hostName = in.readString().intern();
hostAddress = in.readString().intern();
address = TransportAddressSerializers.addressFromStream(in);
int size = in.readVInt();
ImmutableMap.Builder<String, String> builder = ImmutableMap.builder();
for (int i = 0; i < size; i++) {
builder.put(in.readString().intern(), in.readString().intern());
}
attributes = builder.build();
version = Version.readVersion(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(nodeName);
out.writeString(nodeId);
out.writeString(hostName);
out.writeString(hostAddress);
addressToStream(out, address);
out.writeVInt(attributes.size());
for (Map.Entry<String, String> entry : attributes.entrySet()) {
out.writeString(entry.getKey());
out.writeString(entry.getValue());
}
Version.writeVersion(version, out);
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof DiscoveryNode)) {
return false;
}
DiscoveryNode other = (DiscoveryNode) obj;
return this.nodeId.equals(other.nodeId);
}
@Override
public int hashCode() {
return nodeId.hashCode();
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
if (nodeName.length() > 0) {
sb.append('[').append(nodeName).append(']');
}
if (nodeId != null) {
sb.append('[').append(nodeId).append(']');
}
if (Strings.hasLength(hostName)) {
sb.append('[').append(hostName).append(']');
}
if (address != null) {
sb.append('[').append(address).append(']');
}
if (!attributes.isEmpty()) {
sb.append(attributes);
}
return sb.toString();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(id(), XContentBuilder.FieldCaseConversion.NONE);
builder.field("name", name());
builder.field("transport_address", address().toString());
builder.startObject("attributes");
for (Map.Entry<String, String> attr : attributes().entrySet()) {
builder.field(attr.getKey(), attr.getValue());
}
builder.endObject();
builder.endObject();
return builder;
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.core.ml.dataframe.analyses;
import org.elasticsearch.Version;
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.common.xcontent.ParseField;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.BooleanFieldMapper;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.NestedObjectMapper;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.xpack.core.ml.inference.preprocessing.LenientlyParsedPreProcessor;
import org.elasticsearch.xpack.core.ml.inference.preprocessing.PreProcessor;
import org.elasticsearch.xpack.core.ml.inference.preprocessing.StrictlyParsedPreProcessor;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig;
import org.elasticsearch.xpack.core.ml.inference.trainedmodel.PredictionFieldType;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.core.ml.utils.NamedXContentObjectHelper;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class Classification implements DataFrameAnalysis {
public static final ParseField NAME = new ParseField("classification");
public static final ParseField DEPENDENT_VARIABLE = new ParseField("dependent_variable");
public static final ParseField PREDICTION_FIELD_NAME = new ParseField("prediction_field_name");
public static final ParseField CLASS_ASSIGNMENT_OBJECTIVE = new ParseField("class_assignment_objective");
public static final ParseField NUM_TOP_CLASSES = new ParseField("num_top_classes");
public static final ParseField TRAINING_PERCENT = new ParseField("training_percent");
public static final ParseField RANDOMIZE_SEED = new ParseField("randomize_seed");
public static final ParseField FEATURE_PROCESSORS = new ParseField("feature_processors");
public static final ParseField EARLY_STOPPING_ENABLED = new ParseField("early_stopping_enabled");
private static final String STATE_DOC_ID_INFIX = "_classification_state#";
private static final String NUM_CLASSES = "num_classes";
private static final ConstructingObjectParser<Classification, Void> LENIENT_PARSER = createParser(true);
private static final ConstructingObjectParser<Classification, Void> STRICT_PARSER = createParser(false);
/**
* The max number of classes classification supports
*/
public static final int MAX_DEPENDENT_VARIABLE_CARDINALITY = 30;
@SuppressWarnings("unchecked")
private static ConstructingObjectParser<Classification, Void> createParser(boolean lenient) {
ConstructingObjectParser<Classification, Void> parser = new ConstructingObjectParser<>(
NAME.getPreferredName(),
lenient,
a -> new Classification(
(String) a[0],
new BoostedTreeParams((Double) a[1], (Double) a[2], (Double) a[3], (Integer) a[4], (Double) a[5], (Integer) a[6],
(Double) a[7], (Double) a[8], (Double) a[9], (Double) a[10], (Double) a[11], (Integer) a[12]),
(String) a[13],
(ClassAssignmentObjective) a[14],
(Integer) a[15],
(Double) a[16],
(Long) a[17],
(List<PreProcessor>) a[18],
(Boolean) a[19]));
parser.declareString(constructorArg(), DEPENDENT_VARIABLE);
BoostedTreeParams.declareFields(parser);
parser.declareString(optionalConstructorArg(), PREDICTION_FIELD_NAME);
parser.declareString(optionalConstructorArg(), ClassAssignmentObjective::fromString, CLASS_ASSIGNMENT_OBJECTIVE);
parser.declareInt(optionalConstructorArg(), NUM_TOP_CLASSES);
parser.declareDouble(optionalConstructorArg(), TRAINING_PERCENT);
parser.declareLong(optionalConstructorArg(), RANDOMIZE_SEED);
parser.declareNamedObjects(optionalConstructorArg(),
(p, c, n) -> lenient ?
p.namedObject(LenientlyParsedPreProcessor.class, n, new PreProcessor.PreProcessorParseContext(true)) :
p.namedObject(StrictlyParsedPreProcessor.class, n, new PreProcessor.PreProcessorParseContext(true)),
(classification) -> {/*TODO should we throw if this is not set?*/},
FEATURE_PROCESSORS);
parser.declareBoolean(optionalConstructorArg(), EARLY_STOPPING_ENABLED);
return parser;
}
public static Classification fromXContent(XContentParser parser, boolean ignoreUnknownFields) {
return ignoreUnknownFields ? LENIENT_PARSER.apply(parser, null) : STRICT_PARSER.apply(parser, null);
}
private static final Set<String> ALLOWED_DEPENDENT_VARIABLE_TYPES =
Stream.of(Types.categorical(), Types.discreteNumerical(), Types.bool())
.flatMap(Set::stream)
.collect(Collectors.toUnmodifiableSet());
/**
* Name of the parameter passed down to C++.
* This parameter is used to decide which JSON data type from {string, int, bool} to use when writing the prediction.
*/
private static final String PREDICTION_FIELD_TYPE = "prediction_field_type";
/**
* As long as we only support binary classification it makes sense to always report both classes with their probabilities.
* This way the user can see if the prediction was made with confidence they need.
*/
private static final int DEFAULT_NUM_TOP_CLASSES = 2;
private static final List<String> PROGRESS_PHASES = Collections.unmodifiableList(
Arrays.asList(
"feature_selection",
"coarse_parameter_search",
"fine_tuning_parameters",
"final_training"
)
);
static final Map<String, Object> FEATURE_IMPORTANCE_MAPPING;
static {
Map<String, Object> classesProperties = new HashMap<>();
classesProperties.put("class_name", Collections.singletonMap("type", KeywordFieldMapper.CONTENT_TYPE));
classesProperties.put("importance", Collections.singletonMap("type", NumberFieldMapper.NumberType.DOUBLE.typeName()));
Map<String, Object> classesMapping = new HashMap<>();
classesMapping.put("dynamic", false);
classesMapping.put("type", NestedObjectMapper.CONTENT_TYPE);
classesMapping.put("properties", classesProperties);
Map<String, Object> properties = new HashMap<>();
properties.put("feature_name", Collections.singletonMap("type", KeywordFieldMapper.CONTENT_TYPE));
properties.put("classes", classesMapping);
Map<String, Object> mapping = new HashMap<>();
mapping.put("dynamic", false);
mapping.put("type", NestedObjectMapper.CONTENT_TYPE);
mapping.put("properties", properties);
FEATURE_IMPORTANCE_MAPPING = Collections.unmodifiableMap(mapping);
}
private final String dependentVariable;
private final BoostedTreeParams boostedTreeParams;
private final String predictionFieldName;
private final ClassAssignmentObjective classAssignmentObjective;
private final int numTopClasses;
private final double trainingPercent;
private final long randomizeSeed;
private final List<PreProcessor> featureProcessors;
private final boolean earlyStoppingEnabled;
public Classification(String dependentVariable,
BoostedTreeParams boostedTreeParams,
@Nullable String predictionFieldName,
@Nullable ClassAssignmentObjective classAssignmentObjective,
@Nullable Integer numTopClasses,
@Nullable Double trainingPercent,
@Nullable Long randomizeSeed,
@Nullable List<PreProcessor> featureProcessors,
@Nullable Boolean earlyStoppingEnabled) {
if (numTopClasses != null && (numTopClasses < -1 || numTopClasses > 1000)) {
throw ExceptionsHelper.badRequestException(
"[{}] must be an integer in [0, 1000] or a special value -1", NUM_TOP_CLASSES.getPreferredName());
}
if (trainingPercent != null && (trainingPercent <= 0.0 || trainingPercent > 100.0)) {
throw ExceptionsHelper.badRequestException("[{}] must be a positive double in (0, 100]", TRAINING_PERCENT.getPreferredName());
}
this.dependentVariable = ExceptionsHelper.requireNonNull(dependentVariable, DEPENDENT_VARIABLE);
this.boostedTreeParams = ExceptionsHelper.requireNonNull(boostedTreeParams, BoostedTreeParams.NAME);
this.predictionFieldName = predictionFieldName == null ? dependentVariable + "_prediction" : predictionFieldName;
this.classAssignmentObjective = classAssignmentObjective == null ?
ClassAssignmentObjective.MAXIMIZE_MINIMUM_RECALL : classAssignmentObjective;
this.numTopClasses = numTopClasses == null ? DEFAULT_NUM_TOP_CLASSES : numTopClasses;
this.trainingPercent = trainingPercent == null ? 100.0 : trainingPercent;
this.randomizeSeed = randomizeSeed == null ? Randomness.get().nextLong() : randomizeSeed;
this.featureProcessors = featureProcessors == null ? Collections.emptyList() : Collections.unmodifiableList(featureProcessors);
// Early stopping is true by default
this.earlyStoppingEnabled = earlyStoppingEnabled == null ? true : earlyStoppingEnabled;
}
public Classification(String dependentVariable) {
this(dependentVariable, BoostedTreeParams.builder().build(), null, null, null, null, null, null, null);
}
public Classification(StreamInput in) throws IOException {
dependentVariable = in.readString();
boostedTreeParams = new BoostedTreeParams(in);
predictionFieldName = in.readOptionalString();
if (in.getVersion().onOrAfter(Version.V_7_7_0)) {
classAssignmentObjective = in.readEnum(ClassAssignmentObjective.class);
} else {
classAssignmentObjective = ClassAssignmentObjective.MAXIMIZE_MINIMUM_RECALL;
}
numTopClasses = in.readOptionalVInt();
trainingPercent = in.readDouble();
if (in.getVersion().onOrAfter(Version.V_7_6_0)) {
randomizeSeed = in.readOptionalLong();
} else {
randomizeSeed = Randomness.get().nextLong();
}
if (in.getVersion().onOrAfter(Version.V_7_10_0)) {
featureProcessors = Collections.unmodifiableList(in.readNamedWriteableList(PreProcessor.class));
} else {
featureProcessors = Collections.emptyList();
}
earlyStoppingEnabled = in.readBoolean();
}
public String getDependentVariable() {
return dependentVariable;
}
public BoostedTreeParams getBoostedTreeParams() {
return boostedTreeParams;
}
public String getPredictionFieldName() {
return predictionFieldName;
}
public ClassAssignmentObjective getClassAssignmentObjective() {
return classAssignmentObjective;
}
public int getNumTopClasses() {
return numTopClasses;
}
@Override
public double getTrainingPercent() {
return trainingPercent;
}
public long getRandomizeSeed() {
return randomizeSeed;
}
public List<PreProcessor> getFeatureProcessors() {
return featureProcessors;
}
public Boolean getEarlyStoppingEnabled() {
return earlyStoppingEnabled;
}
@Override
public String getWriteableName() {
return NAME.getPreferredName();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(dependentVariable);
boostedTreeParams.writeTo(out);
out.writeOptionalString(predictionFieldName);
if (out.getVersion().onOrAfter(Version.V_7_7_0)) {
out.writeEnum(classAssignmentObjective);
}
out.writeOptionalVInt(numTopClasses);
out.writeDouble(trainingPercent);
if (out.getVersion().onOrAfter(Version.V_7_6_0)) {
out.writeOptionalLong(randomizeSeed);
}
if (out.getVersion().onOrAfter(Version.V_7_10_0)) {
out.writeNamedWriteableList(featureProcessors);
}
out.writeBoolean(earlyStoppingEnabled);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
Version version = Version.fromString(params.param("version", Version.CURRENT.toString()));
builder.startObject();
builder.field(DEPENDENT_VARIABLE.getPreferredName(), dependentVariable);
boostedTreeParams.toXContent(builder, params);
builder.field(CLASS_ASSIGNMENT_OBJECTIVE.getPreferredName(), classAssignmentObjective);
builder.field(NUM_TOP_CLASSES.getPreferredName(), numTopClasses);
if (predictionFieldName != null) {
builder.field(PREDICTION_FIELD_NAME.getPreferredName(), predictionFieldName);
}
builder.field(TRAINING_PERCENT.getPreferredName(), trainingPercent);
if (version.onOrAfter(Version.V_7_6_0)) {
builder.field(RANDOMIZE_SEED.getPreferredName(), randomizeSeed);
}
if (featureProcessors.isEmpty() == false) {
NamedXContentObjectHelper.writeNamedObjects(builder, params, true, FEATURE_PROCESSORS.getPreferredName(), featureProcessors);
}
builder.field(EARLY_STOPPING_ENABLED.getPreferredName(), earlyStoppingEnabled);
builder.endObject();
return builder;
}
@Override
public Map<String, Object> getParams(FieldInfo fieldInfo) {
Map<String, Object> params = new HashMap<>();
params.put(DEPENDENT_VARIABLE.getPreferredName(), dependentVariable);
params.putAll(boostedTreeParams.getParams());
params.put(CLASS_ASSIGNMENT_OBJECTIVE.getPreferredName(), classAssignmentObjective);
params.put(NUM_TOP_CLASSES.getPreferredName(), numTopClasses);
if (predictionFieldName != null) {
params.put(PREDICTION_FIELD_NAME.getPreferredName(), predictionFieldName);
}
String predictionFieldType = getPredictionFieldTypeParamString(getPredictionFieldType(fieldInfo.getTypes(dependentVariable)));
if (predictionFieldType != null) {
params.put(PREDICTION_FIELD_TYPE, predictionFieldType);
}
params.put(NUM_CLASSES, fieldInfo.getCardinality(dependentVariable));
params.put(TRAINING_PERCENT.getPreferredName(), trainingPercent);
if (featureProcessors.isEmpty() == false) {
params.put(FEATURE_PROCESSORS.getPreferredName(),
featureProcessors.stream().map(p -> Collections.singletonMap(p.getName(), p)).collect(Collectors.toList()));
}
params.put(EARLY_STOPPING_ENABLED.getPreferredName(), earlyStoppingEnabled);
return params;
}
private static String getPredictionFieldTypeParamString(PredictionFieldType predictionFieldType) {
if (predictionFieldType == null) {
return null;
}
switch(predictionFieldType)
{
case NUMBER:
// C++ process uses int64_t type, so it is safe for the dependent variable to use long numbers.
return "int";
case STRING:
return "string";
case BOOLEAN:
return "bool";
default:
return null;
}
}
public static PredictionFieldType getPredictionFieldType(Set<String> dependentVariableTypes) {
if (dependentVariableTypes == null) {
return null;
}
if (Types.categorical().containsAll(dependentVariableTypes)) {
return PredictionFieldType.STRING;
}
if (Types.bool().containsAll(dependentVariableTypes)) {
return PredictionFieldType.BOOLEAN;
}
if (Types.discreteNumerical().containsAll(dependentVariableTypes)) {
return PredictionFieldType.NUMBER;
}
return null;
}
@Override
public boolean supportsCategoricalFields() {
return true;
}
@Override
public Set<String> getAllowedCategoricalTypes(String fieldName) {
if (dependentVariable.equals(fieldName)) {
return ALLOWED_DEPENDENT_VARIABLE_TYPES;
}
return Types.categorical();
}
@Override
public List<RequiredField> getRequiredFields() {
return Collections.singletonList(new RequiredField(dependentVariable, ALLOWED_DEPENDENT_VARIABLE_TYPES));
}
@Override
public List<FieldCardinalityConstraint> getFieldCardinalityConstraints() {
// This restriction is due to the fact that currently the C++ backend only supports binomial classification.
return Collections.singletonList(FieldCardinalityConstraint.between(dependentVariable, 2, MAX_DEPENDENT_VARIABLE_CARDINALITY));
}
@SuppressWarnings("unchecked")
@Override
public Map<String, Object> getResultMappings(String resultsFieldName, FieldCapabilitiesResponse fieldCapabilitiesResponse) {
Map<String, Object> additionalProperties = new HashMap<>();
additionalProperties.put(resultsFieldName + ".is_training", Collections.singletonMap("type", BooleanFieldMapper.CONTENT_TYPE));
additionalProperties.put(resultsFieldName + ".prediction_probability",
Collections.singletonMap("type", NumberFieldMapper.NumberType.DOUBLE.typeName()));
additionalProperties.put(resultsFieldName + ".prediction_score",
Collections.singletonMap("type", NumberFieldMapper.NumberType.DOUBLE.typeName()));
additionalProperties.put(resultsFieldName + ".feature_importance", FEATURE_IMPORTANCE_MAPPING);
Map<String, FieldCapabilities> dependentVariableFieldCaps = fieldCapabilitiesResponse.getField(dependentVariable);
if (dependentVariableFieldCaps == null || dependentVariableFieldCaps.isEmpty()) {
throw ExceptionsHelper.badRequestException("no mappings could be found for required field [{}]", DEPENDENT_VARIABLE);
}
Object dependentVariableMappingType = dependentVariableFieldCaps.values().iterator().next().getType();
additionalProperties.put(
resultsFieldName + "." + predictionFieldName, Collections.singletonMap("type", dependentVariableMappingType));
Map<String, Object> topClassesProperties = new HashMap<>();
topClassesProperties.put("class_name", Collections.singletonMap("type", dependentVariableMappingType));
topClassesProperties.put("class_probability", Collections.singletonMap("type", NumberFieldMapper.NumberType.DOUBLE.typeName()));
topClassesProperties.put("class_score", Collections.singletonMap("type", NumberFieldMapper.NumberType.DOUBLE.typeName()));
Map<String, Object> topClassesMapping = new HashMap<>();
topClassesMapping.put("type", NestedObjectMapper.CONTENT_TYPE);
topClassesMapping.put("properties", topClassesProperties);
additionalProperties.put(resultsFieldName + ".top_classes", topClassesMapping);
return additionalProperties;
}
@Override
public boolean supportsMissingValues() {
return true;
}
@Override
public boolean persistsState() {
return true;
}
@Override
public String getStateDocIdPrefix(String jobId) {
return jobId + STATE_DOC_ID_INFIX;
}
@Override
public List<String> getProgressPhases() {
return PROGRESS_PHASES;
}
@Override
public InferenceConfig inferenceConfig(FieldInfo fieldInfo) {
PredictionFieldType predictionFieldType = getPredictionFieldType(fieldInfo.getTypes(dependentVariable));
return ClassificationConfig.builder()
.setResultsField(predictionFieldName)
.setNumTopClasses(numTopClasses)
.setNumTopFeatureImportanceValues(getBoostedTreeParams().getNumTopFeatureImportanceValues())
.setPredictionFieldType(predictionFieldType)
.build();
}
@Override
public boolean supportsInference() {
return true;
}
public static String extractJobIdFromStateDoc(String stateDocId) {
int suffixIndex = stateDocId.lastIndexOf(STATE_DOC_ID_INFIX);
return suffixIndex <= 0 ? null : stateDocId.substring(0, suffixIndex);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Classification that = (Classification) o;
return Objects.equals(dependentVariable, that.dependentVariable)
&& Objects.equals(boostedTreeParams, that.boostedTreeParams)
&& Objects.equals(predictionFieldName, that.predictionFieldName)
&& Objects.equals(classAssignmentObjective, that.classAssignmentObjective)
&& Objects.equals(numTopClasses, that.numTopClasses)
&& Objects.equals(featureProcessors, that.featureProcessors)
&& Objects.equals(earlyStoppingEnabled, that.earlyStoppingEnabled)
&& trainingPercent == that.trainingPercent
&& randomizeSeed == that.randomizeSeed;
}
@Override
public int hashCode() {
return Objects.hash(dependentVariable, boostedTreeParams, predictionFieldName, classAssignmentObjective,
numTopClasses, trainingPercent, randomizeSeed, featureProcessors,
earlyStoppingEnabled);
}
public enum ClassAssignmentObjective {
MAXIMIZE_ACCURACY, MAXIMIZE_MINIMUM_RECALL;
public static ClassAssignmentObjective fromString(String value) {
return ClassAssignmentObjective.valueOf(value.toUpperCase(Locale.ROOT));
}
@Override
public String toString() {
return name().toLowerCase(Locale.ROOT);
}
}
}
| |
/*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bitcoinj.net;
import org.bitcoinj.core.Message;
import org.bitcoinj.utils.Threading;
import com.google.common.base.Throwables;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import javax.annotation.concurrent.GuardedBy;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.CancelledKeyException;
import java.nio.channels.SelectionKey;
import java.nio.channels.SocketChannel;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Set;
import java.util.concurrent.locks.ReentrantLock;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
/**
* A simple NIO MessageWriteTarget which handles all the business logic of a connection (reading+writing bytes).
* Used only by the NioClient and NioServer classes
*/
class ConnectionHandler implements MessageWriteTarget {
private static final org.slf4j.Logger log = LoggerFactory.getLogger(ConnectionHandler.class);
private static final int BUFFER_SIZE_LOWER_BOUND = 4096;
private static final int BUFFER_SIZE_UPPER_BOUND = 65536;
private static final int OUTBOUND_BUFFER_BYTE_COUNT = Message.MAX_SIZE + 24; // 24 byte message header
// We lock when touching local flags and when writing data, but NEVER when calling any methods which leave this
// class into non-Java classes.
private final ReentrantLock lock = Threading.lock("nioConnectionHandler");
@GuardedBy("lock") private final ByteBuffer readBuff;
@GuardedBy("lock") private final SocketChannel channel;
@GuardedBy("lock") private final SelectionKey key;
@GuardedBy("lock") StreamParser parser;
@GuardedBy("lock") private boolean closeCalled = false;
@GuardedBy("lock") private long bytesToWriteRemaining = 0;
@GuardedBy("lock") private final LinkedList<ByteBuffer> bytesToWrite = new LinkedList<ByteBuffer>();
private Set<ConnectionHandler> connectedHandlers;
public ConnectionHandler(StreamParserFactory parserFactory, SelectionKey key) throws IOException {
this(parserFactory.getNewParser(((SocketChannel)key.channel()).socket().getInetAddress(), ((SocketChannel)key.channel()).socket().getPort()), key);
if (parser == null)
throw new IOException("Parser factory.getNewParser returned null");
}
private ConnectionHandler(@Nullable StreamParser parser, SelectionKey key) {
this.key = key;
this.channel = checkNotNull(((SocketChannel)key.channel()));
if (parser == null) {
readBuff = null;
return;
}
this.parser = parser;
readBuff = ByteBuffer.allocateDirect(Math.min(Math.max(parser.getMaxMessageSize(), BUFFER_SIZE_LOWER_BOUND), BUFFER_SIZE_UPPER_BOUND));
parser.setWriteTarget(this); // May callback into us (eg closeConnection() now)
connectedHandlers = null;
}
public ConnectionHandler(StreamParser parser, SelectionKey key, Set<ConnectionHandler> connectedHandlers) {
this(checkNotNull(parser), key);
// closeConnection() may have already happened because we invoked the other c'tor above, which called
// parser.setWriteTarget which might have re-entered already. In this case we shouldn't add ourselves
// to the connectedHandlers set.
lock.lock();
boolean alreadyClosed = false;
try {
alreadyClosed = closeCalled;
this.connectedHandlers = connectedHandlers;
if (!alreadyClosed)
checkState(connectedHandlers.add(this));
} finally {
lock.unlock();
}
}
@GuardedBy("lock")
private void setWriteOps() {
// Make sure we are registered to get updated when writing is available again
key.interestOps(key.interestOps() | SelectionKey.OP_WRITE);
// Refresh the selector to make sure it gets the new interestOps
key.selector().wakeup();
}
// Tries to write any outstanding write bytes, runs in any thread (possibly unlocked)
private void tryWriteBytes() throws IOException {
lock.lock();
try {
// Iterate through the outbound ByteBuff queue, pushing as much as possible into the OS' network buffer.
Iterator<ByteBuffer> bytesIterator = bytesToWrite.iterator();
while (bytesIterator.hasNext()) {
ByteBuffer buff = bytesIterator.next();
bytesToWriteRemaining -= channel.write(buff);
if (!buff.hasRemaining())
bytesIterator.remove();
else {
setWriteOps();
break;
}
}
// If we are done writing, clear the OP_WRITE interestOps
if (bytesToWrite.isEmpty())
key.interestOps(key.interestOps() & ~SelectionKey.OP_WRITE);
// Don't bother waking up the selector here, since we're just removing an op, not adding
} finally {
lock.unlock();
}
}
@Override
public void writeBytes(byte[] message) throws IOException {
lock.lock();
try {
// Network buffers are not unlimited (and are often smaller than some messages we may wish to send), and
// thus we have to buffer outbound messages sometimes. To do this, we use a queue of ByteBuffers and just
// append to it when we want to send a message. We then let tryWriteBytes() either send the message or
// register our SelectionKey to wakeup when we have free outbound buffer space available.
if (bytesToWriteRemaining + message.length > OUTBOUND_BUFFER_BYTE_COUNT)
throw new IOException("Outbound buffer overflowed");
// Just dump the message onto the write buffer and call tryWriteBytes
// TODO: Kill the needless message duplication when the write completes right away
bytesToWrite.offer(ByteBuffer.wrap(Arrays.copyOf(message, message.length)));
bytesToWriteRemaining += message.length;
setWriteOps();
} catch (IOException e) {
lock.unlock();
log.error("Error writing message to connection, closing connection", e);
closeConnection();
throw e;
} catch (CancelledKeyException e) {
lock.unlock();
log.error("Error writing message to connection, closing connection", e);
closeConnection();
throw new IOException(e);
}
lock.unlock();
}
@Override
// May NOT be called with lock held
public void closeConnection() {
try {
channel.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
connectionClosed();
}
private void connectionClosed() {
boolean callClosed = false;
lock.lock();
try {
callClosed = !closeCalled;
closeCalled = true;
} finally {
lock.unlock();
}
if (callClosed) {
checkState(connectedHandlers == null || connectedHandlers.remove(this));
parser.connectionClosed();
}
}
// Handle a SelectionKey which was selected
// Runs unlocked as the caller is single-threaded (or if not, should enforce that handleKey is only called
// atomically for a given ConnectionHandler)
public static void handleKey(SelectionKey key) {
ConnectionHandler handler = ((ConnectionHandler)key.attachment());
try {
if (handler == null)
return;
if (!key.isValid()) {
handler.closeConnection(); // Key has been cancelled, make sure the socket gets closed
return;
}
if (key.isReadable()) {
// Do a socket read and invoke the parser's receiveBytes message
int read = handler.channel.read(handler.readBuff);
if (read == 0)
return; // Was probably waiting on a write
else if (read == -1) { // Socket was closed
key.cancel();
handler.closeConnection();
return;
}
// "flip" the buffer - setting the limit to the current position and setting position to 0
handler.readBuff.flip();
// Use parser.receiveBytes's return value as a check that it stopped reading at the right location
int bytesConsumed = checkNotNull(handler.parser).receiveBytes(handler.readBuff);
checkState(handler.readBuff.position() == bytesConsumed);
// Now drop the bytes which were read by compacting readBuff (resetting limit and keeping relative
// position)
handler.readBuff.compact();
}
if (key.isWritable())
handler.tryWriteBytes();
} catch (Exception e) {
// This can happen eg if the channel closes while the thread is about to get killed
// (ClosedByInterruptException), or if handler.parser.receiveBytes throws something
Throwable t = Throwables.getRootCause(e);
log.error("Error handling SelectionKey: {}", t.getMessage() != null ? t.getMessage() : t.getClass().getName());
handler.closeConnection();
}
}
}
| |
/*
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
/*
* Copyright (c) 2009 by Oracle Corporation. All Rights Reserved.
*/
package javax.xml.stream;
import javax.xml.transform.Source;
import javax.xml.stream.util.XMLEventAllocator;
/**
* Defines an abstract implementation of a factory for getting streams.
*
* The following table defines the standard properties of this specification.
* Each property varies in the level of support required by each implementation.
* The level of support required is described in the 'Required' column.
*
* <table border="2" rules="all" cellpadding="4">
* <thead>
* <tr>
* <th align="center" colspan="5">
* Configuration parameters
* </th>
* </tr>
* </thead>
* <tbody>
* <tr>
* <th>Property Name</th>
* <th>Behavior</th>
* <th>Return type</th>
* <th>Default Value</th>
* <th>Required</th>
* </tr>
* <tr><td>javax.xml.stream.isValidating</td><td>Turns on/off implementation specific DTD validation</td><td>Boolean</td><td>False</td><td>No</td></tr>
* <tr><td>javax.xml.stream.isNamespaceAware</td><td>Turns on/off namespace processing for XML 1.0 support</td><td>Boolean</td><td>True</td><td>True (required) / False (optional)</td></tr>
* <tr><td>javax.xml.stream.isCoalescing</td><td>Requires the processor to coalesce adjacent character data</td><td>Boolean</td><td>False</td><td>Yes</td></tr>
* <tr><td>javax.xml.stream.isReplacingEntityReferences</td><td>replace internal entity references with their replacement text and report them as characters</td><td>Boolean</td><td>True</td><td>Yes</td></tr>
*<tr><td>javax.xml.stream.isSupportingExternalEntities</td><td>Resolve external parsed entities</td><td>Boolean</td><td>Unspecified</td><td>Yes</td></tr>
*<tr><td>javax.xml.stream.supportDTD</td><td>Use this property to request processors that do not support DTDs</td><td>Boolean</td><td>True</td><td>Yes</td></tr>
*<tr><td>javax.xml.stream.reporter</td><td>sets/gets the impl of the XMLReporter </td><td>javax.xml.stream.XMLReporter</td><td>Null</td><td>Yes</td></tr>
*<tr><td>javax.xml.stream.resolver</td><td>sets/gets the impl of the XMLResolver interface</td><td>javax.xml.stream.XMLResolver</td><td>Null</td><td>Yes</td></tr>
*<tr><td>javax.xml.stream.allocator</td><td>sets/gets the impl of the XMLEventAllocator interface</td><td>javax.xml.stream.util.XMLEventAllocator</td><td>Null</td><td>Yes</td></tr>
* </tbody>
* </table>
*
*
* @version 1.2
* @author Copyright (c) 2009 by Oracle Corporation. All Rights Reserved.
* @see XMLOutputFactory
* @see XMLEventReader
* @see XMLStreamReader
* @see EventFilter
* @see XMLReporter
* @see XMLResolver
* @see XMLEventAllocator
* @since 1.6
*/
public abstract class XMLInputFactory {
/**
* The property used to turn on/off namespace support,
* this is to support XML 1.0 documents,
* only the true setting must be supported
*/
public static final String IS_NAMESPACE_AWARE=
"javax.xml.stream.isNamespaceAware";
/**
* The property used to turn on/off implementation specific validation
*/
public static final String IS_VALIDATING=
"javax.xml.stream.isValidating";
/**
* The property that requires the parser to coalesce adjacent character data sections
*/
public static final String IS_COALESCING=
"javax.xml.stream.isCoalescing";
/**
* Requires the parser to replace internal
* entity references with their replacement
* text and report them as characters
*/
public static final String IS_REPLACING_ENTITY_REFERENCES=
"javax.xml.stream.isReplacingEntityReferences";
/**
* The property that requires the parser to resolve external parsed entities
*/
public static final String IS_SUPPORTING_EXTERNAL_ENTITIES=
"javax.xml.stream.isSupportingExternalEntities";
/**
* The property that requires the parser to support DTDs
*/
public static final String SUPPORT_DTD=
"javax.xml.stream.supportDTD";
/**
* The property used to
* set/get the implementation of the XMLReporter interface
*/
public static final String REPORTER=
"javax.xml.stream.reporter";
/**
* The property used to set/get the implementation of the XMLResolver
*/
public static final String RESOLVER=
"javax.xml.stream.resolver";
/**
* The property used to set/get the implementation of the allocator
*/
public static final String ALLOCATOR=
"javax.xml.stream.allocator";
static final String JAXPFACTORYID = "javax.xml.stream.XMLInputFactory";
// static final String DEFAULIMPL = "com.sun.xml.stream.ZephyrParserFactory";
static final String DEFAULIMPL = "com.sun.xml.stream.ZephyrParserFactory";
protected XMLInputFactory(){}
/**
* Create a new instance of the factory.
* @throws FactoryConfigurationError if an instance of this factory cannot be loaded
*/
public static XMLInputFactory newInstance()
throws FactoryConfigurationError
{
return (XMLInputFactory) FactoryFinder.find(JAXPFACTORYID, DEFAULIMPL, true);
}
/**
* Create a new instance of the factory.
* This static method creates a new factory instance.
* This method uses the following ordered lookup procedure to determine
* the XMLInputFactory implementation class to load:
* Use the javax.xml.stream.XMLInputFactory system property.
* Use the properties file "lib/stax.properties" in the JRE directory.
* This configuration file is in standard java.util.Properties format
* and contains the fully qualified name of the implementation class
* with the key being the system property defined above.
* Use the Services API (as detailed in the JAR specification), if available,
* to determine the classname. The Services API will look for a classname
* in the file META-INF/services/javax.xml.stream.XMLInputFactory in jars
* available to the runtime.
* Platform default XMLInputFactory instance.
*
* Once an application has obtained a reference to a XMLInputFactory it
* can use the factory to configure and obtain stream instances.
*
* Note that this is a new method that replaces the deprecated newInstance() method.
* No changes in behavior are defined by this replacement method relative to
* the deprecated method.
*
* @throws FactoryConfigurationError if an instance of this factory cannot be loaded
*/
public static XMLInputFactory newFactory()
throws FactoryConfigurationError
{
return (XMLInputFactory) FactoryFinder.find(JAXPFACTORYID, DEFAULIMPL, true);
}
/**
* Create a new instance of the factory
*
* @param factoryId Name of the factory to find, same as
* a property name
* @param classLoader classLoader to use
* @return the factory implementation
* @throws FactoryConfigurationError if an instance of this factory cannot be loaded
*
* @deprecated This method has been deprecated to maintain API consistency.
* All newInstance methods have been replaced with corresponding
* newFactory methods. The replacement {@link
* #newFactory(String, ClassLoader)} method
* defines no changes in behavior.
*/
public static XMLInputFactory newInstance(String factoryId,
ClassLoader classLoader)
throws FactoryConfigurationError {
try {
//do not fallback if given classloader can't find the class, throw exception
return (XMLInputFactory) FactoryFinder.find(factoryId, classLoader,
null, factoryId.equals(JAXPFACTORYID) ? true : false);
} catch (FactoryFinder.ConfigurationError e) {
throw new FactoryConfigurationError(e.getException(),
e.getMessage());
}
}
/**
* Create a new instance of the factory.
* If the classLoader argument is null, then the ContextClassLoader is used.
*
* Note that this is a new method that replaces the deprecated
* newInstance(String factoryId, ClassLoader classLoader) method.
* No changes in behavior are defined by this replacement method relative
* to the deprecated method.
*
* @param factoryId Name of the factory to find, same as
* a property name
* @param classLoader classLoader to use
* @return the factory implementation
* @throws FactoryConfigurationError if an instance of this factory cannot be loaded
*/
public static XMLInputFactory newFactory(String factoryId,
ClassLoader classLoader)
throws FactoryConfigurationError {
try {
//do not fallback if given classloader can't find the class, throw exception
return (XMLInputFactory) FactoryFinder.find(factoryId, classLoader,
null, factoryId.equals(JAXPFACTORYID) ? true : false);
} catch (FactoryFinder.ConfigurationError e) {
throw new FactoryConfigurationError(e.getException(),
e.getMessage());
}
}
/**
* Create a new XMLStreamReader from a reader
* @param reader the XML data to read from
* @throws XMLStreamException
*/
public abstract XMLStreamReader createXMLStreamReader(java.io.Reader reader)
throws XMLStreamException;
/**
* Create a new XMLStreamReader from a JAXP source. This method is optional.
* @param source the source to read from
* @throws UnsupportedOperationException if this method is not
* supported by this XMLInputFactory
* @throws XMLStreamException
*/
public abstract XMLStreamReader createXMLStreamReader(Source source)
throws XMLStreamException;
/**
* Create a new XMLStreamReader from a java.io.InputStream
* @param stream the InputStream to read from
* @throws XMLStreamException
*/
public abstract XMLStreamReader createXMLStreamReader(java.io.InputStream stream)
throws XMLStreamException;
/**
* Create a new XMLStreamReader from a java.io.InputStream
* @param stream the InputStream to read from
* @param encoding the character encoding of the stream
* @throws XMLStreamException
*/
public abstract XMLStreamReader createXMLStreamReader(java.io.InputStream stream, String encoding)
throws XMLStreamException;
/**
* Create a new XMLStreamReader from a java.io.InputStream
* @param systemId the system ID of the stream
* @param stream the InputStream to read from
*/
public abstract XMLStreamReader createXMLStreamReader(String systemId, java.io.InputStream stream)
throws XMLStreamException;
/**
* Create a new XMLStreamReader from a java.io.InputStream
* @param systemId the system ID of the stream
* @param reader the InputStream to read from
*/
public abstract XMLStreamReader createXMLStreamReader(String systemId, java.io.Reader reader)
throws XMLStreamException;
/**
* Create a new XMLEventReader from a reader
* @param reader the XML data to read from
* @throws XMLStreamException
*/
public abstract XMLEventReader createXMLEventReader(java.io.Reader reader)
throws XMLStreamException;
/**
* Create a new XMLEventReader from a reader
* @param systemId the system ID of the input
* @param reader the XML data to read from
* @throws XMLStreamException
*/
public abstract XMLEventReader createXMLEventReader(String systemId, java.io.Reader reader)
throws XMLStreamException;
/**
* Create a new XMLEventReader from an XMLStreamReader. After being used
* to construct the XMLEventReader instance returned from this method
* the XMLStreamReader must not be used.
* @param reader the XMLStreamReader to read from (may not be modified)
* @return a new XMLEventReader
* @throws XMLStreamException
*/
public abstract XMLEventReader createXMLEventReader(XMLStreamReader reader)
throws XMLStreamException;
/**
* Create a new XMLEventReader from a JAXP source.
* Support of this method is optional.
* @param source the source to read from
* @throws UnsupportedOperationException if this method is not
* supported by this XMLInputFactory
*/
public abstract XMLEventReader createXMLEventReader(Source source)
throws XMLStreamException;
/**
* Create a new XMLEventReader from a java.io.InputStream
* @param stream the InputStream to read from
* @throws XMLStreamException
*/
public abstract XMLEventReader createXMLEventReader(java.io.InputStream stream)
throws XMLStreamException;
/**
* Create a new XMLEventReader from a java.io.InputStream
* @param stream the InputStream to read from
* @param encoding the character encoding of the stream
* @throws XMLStreamException
*/
public abstract XMLEventReader createXMLEventReader(java.io.InputStream stream, String encoding)
throws XMLStreamException;
/**
* Create a new XMLEventReader from a java.io.InputStream
* @param systemId the system ID of the stream
* @param stream the InputStream to read from
* @throws XMLStreamException
*/
public abstract XMLEventReader createXMLEventReader(String systemId, java.io.InputStream stream)
throws XMLStreamException;
/**
* Create a filtered reader that wraps the filter around the reader
* @param reader the reader to filter
* @param filter the filter to apply to the reader
* @throws XMLStreamException
*/
public abstract XMLStreamReader createFilteredReader(XMLStreamReader reader, StreamFilter filter)
throws XMLStreamException;
/**
* Create a filtered event reader that wraps the filter around the event reader
* @param reader the event reader to wrap
* @param filter the filter to apply to the event reader
* @throws XMLStreamException
*/
public abstract XMLEventReader createFilteredReader(XMLEventReader reader, EventFilter filter)
throws XMLStreamException;
/**
* The resolver that will be set on any XMLStreamReader or XMLEventReader created
* by this factory instance.
*/
public abstract XMLResolver getXMLResolver();
/**
* The resolver that will be set on any XMLStreamReader or XMLEventReader created
* by this factory instance.
* @param resolver the resolver to use to resolve references
*/
public abstract void setXMLResolver(XMLResolver resolver);
/**
* The reporter that will be set on any XMLStreamReader or XMLEventReader created
* by this factory instance.
*/
public abstract XMLReporter getXMLReporter();
/**
* The reporter that will be set on any XMLStreamReader or XMLEventReader created
* by this factory instance.
* @param reporter the resolver to use to report non fatal errors
*/
public abstract void setXMLReporter(XMLReporter reporter);
/**
* Allows the user to set specific feature/property on the underlying
* implementation. The underlying implementation is not required to support
* every setting of every property in the specification and may use
* IllegalArgumentException to signal that an unsupported property may not be
* set with the specified value.
* <p>
* All implementations that implement JAXP 1.5 or newer are required to
* support the {@link javax.xml.XMLConstants#ACCESS_EXTERNAL_DTD} property.
* </p>
* <ul>
* <li>
* <p>
* Access to external DTDs, external Entity References is restricted to the
* protocols specified by the property. If access is denied during parsing
* due to the restriction of this property, {@link XMLStreamException}
* will be thrown by the {@link XMLStreamReader#next()} or
* {@link XMLEventReader#nextEvent()} method.
* </p>
* </li>
* </ul>
* @param name The name of the property (may not be null)
* @param value The value of the property
* @throws IllegalArgumentException if the property is not supported
*/
public abstract void setProperty(String name, Object value)
throws IllegalArgumentException;
/**
* Get the value of a feature/property from the underlying implementation
* @param name The name of the property (may not be null)
* @return The value of the property
* @throws IllegalArgumentException if the property is not supported
*/
public abstract Object getProperty(String name)
throws IllegalArgumentException;
/**
* Query the set of properties that this factory supports.
*
* @param name The name of the property (may not be null)
* @return true if the property is supported and false otherwise
*/
public abstract boolean isPropertySupported(String name);
/**
* Set a user defined event allocator for events
* @param allocator the user defined allocator
*/
public abstract void setEventAllocator(XMLEventAllocator allocator);
/**
* Gets the allocator used by streams created with this factory
*/
public abstract XMLEventAllocator getEventAllocator();
}
| |
package net.sourceforge.mayfly.acceptance;
public class IndexTest extends SqlTestCase {
public void testMysqlSyntax() throws Exception {
String sql = "create table foo(a integer, index(a))";
if (dialect.createTableCanContainIndex()) {
execute(sql);
execute("create table bar(b integer, index named_index(b))");
}
else {
expectExecuteFailure(sql, "expected data type but got '('");
}
}
public void testMysqlSyntaxTwoColumns() throws Exception {
String sql = "create table foo(a integer, b integer, index(b, a))";
if (dialect.createTableCanContainIndex()) {
execute(sql);
}
else {
expectExecuteFailure(sql, "expected data type but got '('");
}
}
/* TODO: Might want to insist that the index be on a NOT NULL
column the way that MySQL 5.1 does
(apparently these tests just pass with MySQL 5.0). */
public void testCreateIndexSyntax() throws Exception {
execute("create table foo(a integer)");
assertEquals(0,
execute("create index an_index_name on foo(a)"));
}
public void testTwoColumns() throws Exception {
execute("create table foo(a integer, b integer, c integer)");
execute("create index an_index_name on foo(c, b)");
}
public void testNotUniqueByDefault() throws Exception {
execute("create table foo(a integer, b varchar(80))");
execute("create index an_index_name on foo(a)");
execute("insert into foo(a, b) values(4, 'one')");
execute("insert into foo(a, b) values(4, 'two')");
assertResultSet(new String[] { " 4, 'one' ", " 4, 'two' "},
query("select a, b from foo"));
}
public void testUnique() throws Exception {
execute("create table foo(a integer, b varchar(80))");
execute("create unique index an_index_name on foo(a)");
// Also acts as a constraint
execute("insert into foo(a, b) values(4, 'one')");
expectExecuteFailure("insert into foo(a, b) values(4, 'two')",
"unique constraint in table foo, column a: duplicate value 4");
}
public void testUniqueAndExistingRows() throws Exception {
execute("create table foo(a integer, b varchar(80))");
execute("insert into foo(a, b) values(4, 'one')");
execute("insert into foo(a, b) values(4, 'two')");
expectExecuteFailure(
"create unique index an_index_name on foo(a)",
"unique constraint in table foo, column a: duplicate value 4");
execute("create index an_index_name on foo(a)");
}
public void testDuplicateName() throws Exception {
execute("create table foo(f integer)");
execute("create table bar(b integer)");
execute("create index an_index_name on foo(f)");
String duplicate = "create index an_index_name on bar(b)";
if (dialect.indexNamesArePerTable()) {
execute(duplicate);
}
else {
expectExecuteFailure(duplicate,
"table foo already has an index an_index_name");
}
}
public void testIndexNamesAreCaseInsensitive() throws Exception {
execute("create table foo(a integer, b integer)");
execute("create index an_index_name on foo(a)");
String duplicate = "create index an_iNdEx_name on foo(b)";
if (false) {
execute(duplicate);
}
else {
expectExecuteFailure(duplicate,
"table foo already has an index an_iNdEx_name");
}
}
public void testIndexOnPartOfColumn() throws Exception {
execute("create table foo(a varchar(255))");
String sql = "create index my_index on foo(a(10))";
if (dialect.canIndexPartOfColumn()) {
execute(sql);
}
else {
expectExecuteFailure(sql, "expected ')' but got '('");
}
}
public void testDropIndex() throws Exception {
execute("create table foo(a integer, b integer)");
execute("create index an_index on foo(a)");
String tryToCreateIndexWithSameName = "create index an_index on foo(b)";
expectExecuteFailure(tryToCreateIndexWithSameName,
"table foo already has an index an_index");
String dropWithoutGivingTable = "drop index an_index";
if (dialect.indexNamesArePerTable()) {
expectExecuteFailure(dropWithoutGivingTable,
"expected ON but got end of file");
execute("drop index an_index on foo");
}
else {
execute(dropWithoutGivingTable);
}
execute(tryToCreateIndexWithSameName);
}
public void testDropIndexBadName() throws Exception {
execute("create table foo(a integer)");
expectExecuteFailure(
dropIndexCommand("no_such", "foo"),
"no index no_such"
);
}
public void testDropIndexWithWrongTable() throws Exception {
execute("create table foo(a integer, b integer)");
execute("create table bar(a integer)");
execute("create index an_index on foo(a)");
String dropIndexOn = "drop index an_index on bar";
if (dialect.canDropIndexGivingWrongTable()) {
execute(dropIndexOn);
// check it is really gone
execute("create index an_index on foo(b)");
}
else {
/* Could be syntax error, or something like "no index an_index",
or the Mayfly expectation of telling exactly what is happening.
*/
expectExecuteFailure(dropIndexOn,
"attempt to drop index an_index from table bar " +
"although the index is on table foo");
}
}
public void testDropIndexWithCorrectTable() throws Exception {
execute("create table foo(a integer, b integer)");
execute("create table bar(a integer)");
execute("create index an_index on foo(a)");
String dropIndexOn = "drop index an_index on foo";
if (dialect.haveDropIndexOn()) {
execute(dropIndexOn);
// Check that it is really gone
execute("create index an_index on foo(b)");
}
else {
expectExecuteFailure(dropIndexOn, "expected end of file but got ON");
}
}
public void testDroppingUniqueIndexDropsConstraint() throws Exception {
execute("create table foo(a integer, b varchar(20))");
execute("create unique index an_index on foo(a)");
dropIndex("an_index", "foo");
dialect.checkDump(
"CREATE TABLE foo(\n" +
" a INTEGER,\n" +
" b VARCHAR(20)\n" +
");\n\n");
execute("insert into foo(a, b) values(6, 'first')");
execute("insert into foo(a, b) values(6, 'second')");
}
public void testDroppingIndexDoesNotAffectConstraint() throws Exception {
String createTable = "create table foo(a integer, b varchar(20), unique(a))";
if (dialect.uniqueColumnMayBeNullable()) {
execute(createTable);
}
else {
expectExecuteFailure(createTable,
"cannot combine nullable column and unique constraint: table foo, column a");
}
if (dialect instanceof DerbyDialect) {
// I couldn't get the rest of this test to work in Derby.
// Dropping the index is failing, saying no such index, and
// I'm not sure sure why.
return;
}
execute("create index an_index on foo(a)");
dropIndex("an_index", "foo");
dialect.checkDump(
"CREATE TABLE foo(\n" +
" a INTEGER,\n" +
" b VARCHAR(20),\n" +
" UNIQUE(a)\n" +
");\n\n");
execute("insert into foo(a, b) values(6, 'first')");
expectExecuteFailure("insert into foo(a, b) values(6, 'second')",
"unique constraint in table foo, column a: duplicate value 6");
}
public void mysql_only_testAlterTableDropIndex() throws Exception {
// another syntax, as an alternative to the DROP INDEX one
execute("create table foo(a integer, b integer)");
execute("create index an_index on foo(a)");
String tryToCreateIndexWithSameName = "create index an_index on foo(b)";
expectExecuteFailure(tryToCreateIndexWithSameName,
"duplicate index an_index");
execute("alter table foo drop index an_index");
execute(tryToCreateIndexWithSameName);
}
/* Another case for duplicates is:
execute("create index an_index on foo(a)");
execute("create index an_index on foo(a)");
Derby seems to like that one, but hypersonic and postgres don't.
*/
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import com.google.common.base.Objects;
import com.google.common.collect.Iterators;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.io.IVersionedSerializer;
/**
* A combination of a top-level (or row) tombstone and range tombstones describing the deletions
* within a {@link ColumnFamily} (or row).
*/
public class DeletionInfo
{
private static final Serializer serializer = new Serializer();
/**
* This represents a deletion of the entire row. We can't represent this within the RangeTombstoneList, so it's
* kept separately. This also slightly optimizes the common case of a full row deletion.
*/
private DeletionTime topLevel;
/**
* A list of range tombstones within the row. This is left as null if there are no range tombstones
* (to save an allocation (since it's a common case).
*/
private RangeTombstoneList ranges;
/**
* Creates a DeletionInfo with only a top-level (row) tombstone.
* @param markedForDeleteAt the time after which the entire row should be considered deleted
* @param localDeletionTime what time the deletion write was applied locally (for purposes of
* purging the tombstone after gc_grace_seconds).
*/
public DeletionInfo(long markedForDeleteAt, int localDeletionTime)
{
// Pre-1.1 node may return MIN_VALUE for non-deleted container, but the new default is MAX_VALUE
// (see CASSANDRA-3872)
this(new DeletionTime(markedForDeleteAt, localDeletionTime == Integer.MIN_VALUE ? Integer.MAX_VALUE : localDeletionTime));
}
public DeletionInfo(DeletionTime topLevel)
{
this(topLevel, null);
}
public DeletionInfo(ByteBuffer start, ByteBuffer end, Comparator<ByteBuffer> comparator, long markedForDeleteAt, int localDeletionTime)
{
this(DeletionTime.LIVE, new RangeTombstoneList(comparator, 1));
ranges.add(start, end, markedForDeleteAt, localDeletionTime);
}
public DeletionInfo(RangeTombstone rangeTombstone, Comparator<ByteBuffer> comparator)
{
this(rangeTombstone.min, rangeTombstone.max, comparator, rangeTombstone.data.markedForDeleteAt, rangeTombstone.data.localDeletionTime);
}
private DeletionInfo(DeletionTime topLevel, RangeTombstoneList ranges)
{
this.topLevel = topLevel;
this.ranges = ranges;
}
/**
* Returns a new DeletionInfo that has no top-level tombstone or any range tombstones.
*/
public static DeletionInfo live()
{
return new DeletionInfo(DeletionTime.LIVE);
}
public static Serializer serializer()
{
return serializer;
}
public DeletionInfo copy()
{
return new DeletionInfo(topLevel, ranges == null ? null : ranges.copy());
}
/**
* Returns whether this DeletionInfo is live, that is deletes no columns.
*/
public boolean isLive()
{
return topLevel.markedForDeleteAt == Long.MIN_VALUE
&& topLevel.localDeletionTime == Integer.MAX_VALUE
&& (ranges == null || ranges.isEmpty());
}
/**
* Return whether a given column is deleted by the container having this deletion info.
*
* @param column the column to check.
* @return true if the column is deleted, false otherwise
*/
public boolean isDeleted(Column column)
{
return isDeleted(column.name(), column.timestamp());
}
public boolean isDeleted(ByteBuffer name, long timestamp)
{
// We do rely on this test: if topLevel.markedForDeleteAt is MIN_VALUE, we should not
// consider the column deleted even if timestamp=MIN_VALUE, otherwise this break QueryFilter.isRelevant
if (isLive())
return false;
if (timestamp <= topLevel.markedForDeleteAt)
return true;
return ranges != null && ranges.isDeleted(name, timestamp);
}
/**
* Returns a new {@link InOrderTester} in forward order.
*/
InOrderTester inOrderTester()
{
return inOrderTester(false);
}
/**
* Returns a new {@link InOrderTester} given the order in which
* columns will be passed to it.
*/
public InOrderTester inOrderTester(boolean reversed)
{
return new InOrderTester(reversed);
}
/**
* Purge every tombstones that are older than {@code gcbefore}.
*
* @param gcBefore timestamp (in seconds) before which tombstones should be purged
*/
public void purge(int gcBefore)
{
topLevel = topLevel.localDeletionTime < gcBefore ? DeletionTime.LIVE : topLevel;
if (ranges != null)
{
ranges.purge(gcBefore);
if (ranges.isEmpty())
ranges = null;
}
}
/**
* Returns true if {@code purge} would remove the top-level tombstone or any of the range
* tombstones, false otherwise.
* @param gcBefore timestamp (in seconds) before which tombstones should be purged
*/
public boolean hasPurgeableTombstones(int gcBefore)
{
if (topLevel.localDeletionTime < gcBefore)
return true;
return ranges != null && ranges.hasPurgeableTombstones(gcBefore);
}
/**
* Potentially replaces the top-level tombstone with another, keeping whichever has the higher markedForDeleteAt
* timestamp.
* @param newInfo
*/
public void add(DeletionTime newInfo)
{
if (topLevel.markedForDeleteAt < newInfo.markedForDeleteAt)
topLevel = newInfo;
}
public void add(RangeTombstone tombstone, Comparator<ByteBuffer> comparator)
{
if (ranges == null)
ranges = new RangeTombstoneList(comparator, 1);
ranges.add(tombstone);
}
/**
* Combines another DeletionInfo with this one and returns the result. Whichever top-level tombstone
* has the higher markedForDeleteAt timestamp will be kept, along with its localDeletionTime. The
* range tombstones will be combined.
*
* @return this object.
*/
public DeletionInfo add(DeletionInfo newInfo)
{
add(newInfo.topLevel);
if (ranges == null)
ranges = newInfo.ranges == null ? null : newInfo.ranges.copy();
else if (newInfo.ranges != null)
ranges.addAll(newInfo.ranges);
return this;
}
/**
* Returns the minimum timestamp in any of the range tombstones or the top-level tombstone.
*/
public long minTimestamp()
{
return ranges == null
? topLevel.markedForDeleteAt
: Math.min(topLevel.markedForDeleteAt, ranges.minMarkedAt());
}
/**
* Returns the maximum timestamp in any of the range tombstones or the top-level tombstone.
*/
public long maxTimestamp()
{
return ranges == null
? topLevel.markedForDeleteAt
: Math.max(topLevel.markedForDeleteAt, ranges.maxMarkedAt());
}
/**
* Returns the top-level (or "row") tombstone.
*/
public DeletionTime getTopLevelDeletion()
{
return topLevel;
}
// Use sparingly, not the most efficient thing
public Iterator<RangeTombstone> rangeIterator()
{
return ranges == null ? Iterators.<RangeTombstone>emptyIterator() : ranges.iterator();
}
public DeletionTime rangeCovering(ByteBuffer name)
{
return ranges == null ? null : ranges.search(name);
}
public int dataSize()
{
int size = TypeSizes.NATIVE.sizeof(topLevel.markedForDeleteAt);
return size + (ranges == null ? 0 : ranges.dataSize());
}
public boolean hasRanges()
{
return ranges != null && !ranges.isEmpty();
}
public int rangeCount()
{
return hasRanges() ? ranges.size() : 0;
}
/**
* Whether this deletion info may modify the provided one if added to it.
*/
public boolean mayModify(DeletionInfo delInfo)
{
return topLevel.markedForDeleteAt > delInfo.topLevel.markedForDeleteAt
|| hasRanges();
}
@Override
public String toString()
{
if (ranges == null || ranges.isEmpty())
return String.format("{%s}", topLevel);
else
return String.format("{%s, ranges=%s}", topLevel, rangesAsString());
}
private String rangesAsString()
{
assert !ranges.isEmpty();
StringBuilder sb = new StringBuilder();
AbstractType at = (AbstractType)ranges.comparator();
assert at != null;
Iterator<RangeTombstone> iter = rangeIterator();
while (iter.hasNext())
{
RangeTombstone i = iter.next();
sb.append("[");
sb.append(at.getString(i.min)).append("-");
sb.append(at.getString(i.max)).append(", ");
sb.append(i.data);
sb.append("]");
}
return sb.toString();
}
// Updates all the timestamp of the deletion contained in this DeletionInfo to be {@code timestamp}.
public void updateAllTimestamp(long timestamp)
{
if (topLevel.markedForDeleteAt != Long.MIN_VALUE)
topLevel = new DeletionTime(timestamp, topLevel.localDeletionTime);
if (ranges != null)
ranges.updateAllTimestamp(timestamp);
}
@Override
public boolean equals(Object o)
{
if(!(o instanceof DeletionInfo))
return false;
DeletionInfo that = (DeletionInfo)o;
return topLevel.equals(that.topLevel) && Objects.equal(ranges, that.ranges);
}
@Override
public final int hashCode()
{
return Objects.hashCode(topLevel, ranges);
}
public static class Serializer implements IVersionedSerializer<DeletionInfo>
{
public void serialize(DeletionInfo info, DataOutput out, int version) throws IOException
{
DeletionTime.serializer.serialize(info.topLevel, out);
RangeTombstoneList.serializer.serialize(info.ranges, out, version);
}
/*
* Range tombstones internally depend on the column family serializer, but it is not serialized.
* Thus deserialize(DataInput, int, Comparator<ByteBuffer>) should be used instead of this method.
*/
public DeletionInfo deserialize(DataInput in, int version) throws IOException
{
throw new UnsupportedOperationException();
}
public DeletionInfo deserialize(DataInput in, int version, Comparator<ByteBuffer> comparator) throws IOException
{
DeletionTime topLevel = DeletionTime.serializer.deserialize(in);
RangeTombstoneList ranges = RangeTombstoneList.serializer.deserialize(in, version, comparator);
return new DeletionInfo(topLevel, ranges);
}
public long serializedSize(DeletionInfo info, TypeSizes typeSizes, int version)
{
long size = DeletionTime.serializer.serializedSize(info.topLevel, typeSizes);
return size + RangeTombstoneList.serializer.serializedSize(info.ranges, typeSizes, version);
}
public long serializedSize(DeletionInfo info, int version)
{
return serializedSize(info, TypeSizes.NATIVE, version);
}
}
/**
* This object allow testing whether a given column (name/timestamp) is deleted
* or not by this DeletionInfo, assuming that the columns given to this
* object are passed in forward or reversed comparator sorted order.
*
* This is more efficient that calling DeletionInfo.isDeleted() repeatedly
* in that case.
*/
public class InOrderTester
{
/*
* Note that because because range tombstone are added to this DeletionInfo while we iterate,
* `ranges` may be null initially and we need to wait for the first range to create the tester (once
* created the test will pick up new tombstones however). We are guaranteed that a range tombstone
* will be added *before* we test any column that it may delete, so this is ok.
*/
private RangeTombstoneList.InOrderTester tester;
private final boolean reversed;
private InOrderTester(boolean reversed)
{
this.reversed = reversed;
}
public boolean isDeleted(Column column)
{
return isDeleted(column.name(), column.timestamp());
}
public boolean isDeleted(ByteBuffer name, long timestamp)
{
if (timestamp <= topLevel.markedForDeleteAt)
return true;
/*
* We don't optimize the reversed case for now because RangeTombstoneList
* is always in forward sorted order.
*/
if (reversed)
return DeletionInfo.this.isDeleted(name, timestamp);
// Maybe create the tester if we hadn't yet and we now have some ranges (see above).
if (tester == null && ranges != null)
tester = ranges.inOrderTester();
return tester != null && tester.isDeleted(name, timestamp);
}
}
}
| |
// ----------------------------------------------------------------------------
// Copyright 2006-2010, GeoTelematic Solutions, Inc.
// All rights reserved
// ----------------------------------------------------------------------------
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ----------------------------------------------------------------------------
// Change History:
// 2008/12/01 Martin D. Flynn
// -Initial release
// 2009/01/28 Martin D. Flynn
// -Added "Logged-In" list column
// 2009/08/23 Martin D. Flynn
// -Added ability to log-in to selected account (this feature controlled by
// property "sysAdminAccounts.allowAccountLogin" - default is "false").
// -Convert new entered IDs to lowercase
// 2009/09/23 Martin D. Flynn
// -Added "TemporaryProperties" field.
// 2010/09/09 Martin D. Flynn
// -Moved to "org.opengts.war.track.page"
// ----------------------------------------------------------------------------
package org.opengts.war.track.page;
import java.util.*;
import java.io.*;
import javax.servlet.*;
import javax.servlet.http.*;
import org.opengts.util.*;
import org.opengts.dbtools.*;
import org.opengts.db.*;
import org.opengts.db.AclEntry.AccessLevel;
import org.opengts.db.tables.*;
import org.opengts.war.tools.*;
import org.opengts.war.track.*;
public class SysAdminAccounts
extends WebPageAdaptor
implements Constants
{
// ------------------------------------------------------------------------
// Parameters
// forms
public static final String FORM_ACCOUNT_SELECT = "SysAdminSelect";
public static final String FORM_ACCOUNT_EDIT = "SysAdminEdit";
public static final String FORM_ACCOUNT_NEW = "SysAdminNew";
// commands
public static final String COMMAND_INFO_UPDATE = "update";
public static final String COMMAND_INFO_SELECT = "select";
public static final String COMMAND_INFO_NEW = "new";
// submit
public static final String PARM_SUBMIT_EDIT = "a_subedit";
public static final String PARM_SUBMIT_VIEW = "a_subview";
public static final String PARM_SUBMIT_CHG = "a_subchg";
public static final String PARM_SUBMIT_DEL = "a_subdel";
public static final String PARM_SUBMIT_NEW = "a_subnew";
public static final String PARM_SUBMIT_LOGIN = "a_sublogin";
// buttons
public static final String PARM_BUTTON_CANCEL = "d_btncan";
public static final String PARM_BUTTON_BACK = "d_btnbak";
// parameters
public static final String PARM_NEW_NAME = "s_newname";
public static final String PARM_ACCOUNT_SELECT = "s_account";
public static final String PARM_ACCT_ID = "a_id";
public static final String PARM_ACCT_DESC = "a_desc";
public static final String PARM_ACCT_PASSWORD = "a_pass";
public static final String PARM_ACCT_ACTIVE = "a_active";
public static final String PARM_ACCT_PRIVLABEL = "a_privlbl";
public static final String PARM_ACCT_EXPIRE = "a_expire";
public static final String PARM_ACCT_TEMP_PROPS = "a_tmpProps";
// ------------------------------------------------------------------------
public static final String PROP_privateLabelAdminAccounts = "privateLabelAdminAccounts";
// ------------------------------------------------------------------------
// password holder/indicator
private static final String PASSWORD_HOLDER = "**********";
private static final char PASSWORD_INVALID_CHAR = '*'; // password can't have all '*'
// ------------------------------------------------------------------------
// WebPage interface
public SysAdminAccounts()
{
this.setBaseURI(Track.BASE_URI());
this.setPageName(PAGE_SYSADMIN_ACCOUNTS);
this.setPageNavigation(new String[] { PAGE_LOGIN, PAGE_MENU_TOP });
this.setLoginRequired(true);
//this.setCssDirectory("extra/css");
}
// ------------------------------------------------------------------------
//public void setCssDirectory(String cssDir)
//{
// super.setCssDirectory(cssDir);
// Print.logStackTrace("CSS Dir: " + cssDir);
//}
// ------------------------------------------------------------------------
public String getMenuName(RequestProperties reqState)
{
return MenuBar.MENU_ADMIN;
}
public String getMenuDescription(RequestProperties reqState, String parentMenuName)
{
PrivateLabel privLabel = reqState.getPrivateLabel();
I18N i18n = privLabel.getI18N(SysAdminAccounts.class);
return super._getMenuDescription(reqState,i18n.getString("SysAdminAccounts.editMenuDesc","System Accounts"));
}
public String getMenuHelp(RequestProperties reqState, String parentMenuName)
{
PrivateLabel privLabel = reqState.getPrivateLabel();
I18N i18n = privLabel.getI18N(SysAdminAccounts.class);
return super._getMenuHelp(reqState,i18n.getString("SysAdminAccounts.editMenuHelp","Create/Delete/Edit/View System Accounts"));
}
// ------------------------------------------------------------------------
public String getNavigationDescription(RequestProperties reqState)
{
PrivateLabel privLabel = reqState.getPrivateLabel();
I18N i18n = privLabel.getI18N(SysAdminAccounts.class);
return super._getNavigationDescription(reqState,i18n.getString("SysAdminAccounts.navDesc","System Accounts"));
}
public String getNavigationTab(RequestProperties reqState)
{
PrivateLabel privLabel = reqState.getPrivateLabel();
I18N i18n = privLabel.getI18N(SysAdminAccounts.class);
return i18n.getString("SysAdminAccounts.navTab","System Accounts");
}
// ------------------------------------------------------------------------
/* true if this page iis for the system admin only */
public boolean systemAdminOnly()
{
return true;
}
// ------------------------------------------------------------------------
private static String filter(String s)
{
return StringTools.isBlank(s)? " " : StringTools.htmlFilterText(s);
}
private boolean isValidPassword(String pwd)
{
if (StringTools.isBlank(pwd)) {
return true; // user is not allowed to log-in
} else
if (pwd.equals(PASSWORD_HOLDER)) {
return false;
} else {
for (int i = 0; i < pwd.length(); i++) {
if (pwd.charAt(i) != PASSWORD_INVALID_CHAR) {
return true;
}
}
return false; // all '*'
}
}
private Map<String,java.util.List<String>> getLoggedInAccounts(RequestProperties reqState)
{
final Map<String,java.util.List<String>> acctLoginMap = new HashMap<String,java.util.List<String>>();
HttpSession session = AttributeTools.getSession(reqState.getHttpServletRequest());
if (session != null) {
int count = RTConfigContextListener.GetSessionCount(session.getServletContext(),
new RTConfigContextListener.HttpSessionFilter() {
public boolean countSession(HttpSession session) {
String acctID = (String)AttributeTools.getSessionAttribute(session,Constants.PARM_ACCOUNT,null);
if (!StringTools.isBlank(acctID)) {
java.util.List<String> userList = acctLoginMap.get(acctID);
if (userList == null) {
userList = new Vector<String>();
acctLoginMap.put(acctID,userList);
}
String userID = (String)AttributeTools.getSessionAttribute(session,Constants.PARM_USER,null);
if (!StringTools.isBlank(userID)) {
userList.add(userID);
} else {
userID = "?";
}
Print.logInfo("Logged-in User: %s,%s", acctID, userID);
return true;
}
return false;
}
}
);
}
return acctLoginMap;
}
// ------------------------------------------------------------------------
public void writePage(
final RequestProperties reqState,
String pageMsg)
throws IOException
{
final HttpServletRequest request = reqState.getHttpServletRequest();
final PrivateLabel privLabel = reqState.getPrivateLabel(); // never null
final String dtFormat = privLabel.getDateFormat() + " " + privLabel.getTimeFormat();
final I18N i18n = privLabel.getI18N(SysAdminAccounts.class);
final Locale locale = reqState.getLocale();
final Account currAcct = reqState.getCurrentAccount(); // never null
final String currAcctID = reqState.getCurrentAccountID();
final String currAcctTZID = currAcct.getTimeZone();
final TimeZone currAcctTZ = currAcct.getTimeZone(null);
final boolean isSysAdmin = Account.isSystemAdmin(currAcct); // all access
final String pageName = this.getPageName();
final boolean accountProps = privLabel.getBooleanProperty(PrivateLabel.PROP_SysAdminAccounts_accountProperties,false);
String m = pageMsg;
boolean error = false;
/* "PrivateLabel Admin" handling */
final String currPrivateLabel = currAcct.getPrivateLabelName();
BasicPrivateLabel adminBPL = !isSysAdmin? BasicPrivateLabelLoader.getPrivateLabelForSysAdminAccountID(currAcctID) : null;
final boolean isPrivateAdmin = (adminBPL != null) && adminBPL.getDomainName().equals(currPrivateLabel);
final boolean isAdmin = isSysAdmin || isPrivateAdmin; // must be 'true', otherwise no access
/* invalid user? */
if (!isAdmin) {
Print.logWarn("Current user is neither a SysAdmin, nor PrivateLabelAdmin! ==> " + currAcctID);
// access will be restricted below
}
/* list of accounts */
Collection<String> accountList = null;
if (isAdmin) {
try {
accountList = Account.getAuthorizedAccounts(currAcct);
} catch (DBException dbe) {
Print.logError("Error reading authorized Accounts");
}
}
if (accountList == null) {
accountList = new Vector<String>();
accountList.add(currAcctID);
}
// 'accountList' has at least one element in it.
/* selected account-id */
String selAccountID = AttributeTools.getRequestString(reqState.getHttpServletRequest(), PARM_ACCOUNT_SELECT, "");
if (StringTools.isBlank(selAccountID)) {
selAccountID = ListTools.itemAt(accountList, 0, "");
}
if (!isAdmin && !ListTools.contains(accountList,selAccountID)) {
// not an admin account, and selected Account was not found in the list of allowed accounts
selAccountID = currAcctID;
}
final boolean isCurrentAccountSelected = selAccountID.equals(currAcctID);
/* account db */
Account selAccount = null;
try {
selAccount = !StringTools.isBlank(selAccountID)? Account.getAccount(selAccountID) : null; // may still be null
} catch (DBException dbe) {
// ignore
}
/* command */
String accountCmd = reqState.getCommandName();
boolean listAccounts = false;
boolean updateAccount = accountCmd.equals(COMMAND_INFO_UPDATE);
boolean selectAccount = accountCmd.equals(COMMAND_INFO_SELECT);
boolean newAccount = accountCmd.equals(COMMAND_INFO_NEW);
boolean deleteAccount = false;
boolean editAccount = false;
boolean viewAccount = false;
boolean loginAccount = false;
/* submit buttons */
String submitEdit = AttributeTools.getRequestString(request, PARM_SUBMIT_EDIT , "");
String submitView = AttributeTools.getRequestString(request, PARM_SUBMIT_VIEW , "");
String submitChange = AttributeTools.getRequestString(request, PARM_SUBMIT_CHG , "");
String submitNew = AttributeTools.getRequestString(request, PARM_SUBMIT_NEW , "");
String submitDelete = AttributeTools.getRequestString(request, PARM_SUBMIT_DEL , "");
String submitLogin = AttributeTools.getRequestString(request, PARM_SUBMIT_LOGIN, "");
/* CACHE_ACL: ACL allow edit/view */
boolean allowNew = isAdmin;
boolean allowDelete = allowNew; // 'delete' allowed if 'new' allowed
boolean allowEdit = isAdmin;
boolean allowView = true;
boolean allowLogin = allowEdit && privLabel.getBooleanProperty(PrivateLabel.PROP_SysAdminAccounts_allowAccountLogin,false);
/* sub-command */
String newAccountID = null;
if (newAccount) {
if (!allowNew) {
newAccount = false; // not authorized
} else {
HttpServletRequest httpReq = reqState.getHttpServletRequest();
newAccountID = AttributeTools.getRequestString(httpReq,PARM_NEW_NAME,"").trim();
newAccountID = newAccountID.toLowerCase();
if (StringTools.isBlank(newAccountID)) {
m = i18n.getString("SysAdminAccounts.enterNewAccount","Please enter a new Account name.");
error = true;
newAccount = false;
} else
if (!WebPageAdaptor.isValidID(reqState, PrivateLabel.PROP_SysAdminAccounts_validateNewIDs, newAccountID)) {
m = i18n.getString("SysAdminAccounts.invalidIDChar","ID contains invalid characters");
error = true;
newAccount = false;
}
}
} else
if (updateAccount) {
if (!allowEdit) {
// not authorized to update users
updateAccount = false;
} else
if (!SubmitMatch(submitChange,i18n.getString("SysAdminAccounts.change","Change"))) {
updateAccount = false;
}
} else
if (selectAccount) {
if (SubmitMatch(submitLogin,i18n.getString("SysAdminAccounts.login","Login"))) {
if (allowLogin) {
if (selAccount == null) {
m = i18n.getString("SysAdminAccounts.pleaseSelectAccount","Please select an Account");
error = true;
listAccounts = true;
} else
if (isCurrentAccountSelected) {
m = i18n.getString("SysAdminAccounts.alreadyLoggedInToAccount","Already Logged-In to this Account");
error = true;
listAccounts = true;
} else {
loginAccount = true;
}
}
} else
if (SubmitMatch(submitDelete,i18n.getString("SysAdminAccounts.delete","Delete"))) {
if (allowDelete) {
if (selAccount == null) {
m = i18n.getString("SysAdminAccounts.pleaseSelectAccount","Please select an Account");
error = true;
listAccounts = true;
} else
if (isCurrentAccountSelected) {
m = i18n.getString("SysAdminAccounts.cannotDeleteCurrentAccount","Cannot delete current logged-in Account");
error = true;
listAccounts = true;
} else {
deleteAccount = true;
}
}
} else
if (SubmitMatch(submitEdit,i18n.getString("SysAdminAccounts.edit","Edit"))) {
if (allowEdit) {
if (selAccount == null) {
m = i18n.getString("SysAdminAccounts.pleaseSelectAccount","Please select an Account");
error = true;
listAccounts = true;
} else {
editAccount = !isCurrentAccountSelected;
viewAccount = true;
}
}
} else
if (SubmitMatch(submitView,i18n.getString("SysAdminAccounts.view","View"))) {
if (allowView) {
if (selAccount == null) {
m = i18n.getString("SysAdminAccounts.pleaseSelectAccount","Please select an Account");
error = true;
listAccounts = true;
} else {
viewAccount = true;
}
}
} else {
listAccounts = true;
}
} else {
listAccounts = true;
}
/* login to account? */
if (loginAccount) {
if (selAccount == null) {
m = i18n.getString("SysAdminAccounts.pleaseSelectAccount","Please select an Account");
error = true;
} else {
try {
String loginAcctID = selAccount.getAccountID();
String loginUserID = User.getAdminUserID();
User loginUser = User.getUser(selAccount, loginUserID);
String loginPasswd = Account.decodePassword((loginUser != null)? loginUser.getPassword() : selAccount.getPassword());
String reloginKey = privLabel.getStringProperty(PrivateLabel.PROP_SysAdminAccounts_reloginPasscode,"");
URIArg url = new URIArg(reqState.getBaseURI());
url.addArg(Constants.PARM_ACCOUNT , loginAcctID);
url.addArg(Constants.PARM_USER , loginUserID);
url.addArg(Constants.PARM_PASSWORD, loginPasswd);
url.addArg(CommonServlet.PARM_PAGE, Constants.PAGE_MENU_TOP);
url.addArg(Constants.PARM_SYSADMIN_RELOGIN, reloginKey);
Print.logInfo("ReLogin URL: " + url);
AttributeTools.clearSessionAttributes(request); // invalidate/logout
HttpServletResponse response = reqState.getHttpServletResponse();
RequestDispatcher rd = request.getRequestDispatcher(url.toString());
rd.forward(request, response);
return;
} catch (Throwable th) {
m = i18n.getString("SysAdminAccounts.errorDuringLoginDispatch","Error ocurred during dispatch to login");
error = true;
}
}
listAccounts = true;
}
/* delete account? */
if (deleteAccount) {
if (selAccount == null) {
m = i18n.getString("SysAdminAccounts.pleaseSelectAccount","Please select an Account");
error = true;
} else {
try {
Account.Key accountKey = (Account.Key)selAccount.getRecordKey();
Print.logWarn("Deleting Account: " + accountKey);
accountKey.delete(true); // will also delete dependencies
accountList = Account.getAuthorizedAccounts(currAcct);
selAccountID = ListTools.itemAt(accountList, 0, "");
try {
selAccount = !selAccountID.equals("")? Account.getAccount(selAccountID) : null; // may still be null
} catch (DBException dbe) {
selAccount = null;
}
} catch (DBException dbe) {
m = i18n.getString("SysAdminAccounts.errorDelete","Internal error deleting Account");
error = true;
}
}
listAccounts = true;
}
/* new account? */
if (newAccount) {
boolean createAccountOK = true;
try {
if (Account.exists(newAccountID)) {
m = i18n.getString("SysAdminAccounts.alreadyExists","This Account already exists");
error = true;
createAccountOK = false;
}
} catch (DBException dbe) {
m = i18n.getString("SysAdminAccounts.accountError","Error checking account");
error = true;
createAccountOK = false;
}
if (createAccountOK) {
try {
String password = null;
String pLblName = isPrivateAdmin? currPrivateLabel : null;
Account account = Account.createNewAccount(newAccountID, password, pLblName); // already saved
accountList = Account.getAuthorizedAccounts(currAcct);
selAccount = account;
selAccountID = account.getAccountID();
m = i18n.getString("SysAdminAccounts.createdAccount","New Account has been created");
} catch (DBAlreadyExistsException dbaee) {
m = i18n.getString("SysAdminAccounts.alreadyExists","This Account already exists");
error = true;
} catch (DBException dbe) {
m = i18n.getString("SysAdminAccounts.errorCreate","Internal error creating Account");
error = true;
}
}
listAccounts = true;
}
/* change/update the account info? */
if (updateAccount) {
if (selAccount == null) {
m = i18n.getString("SysAdminAccounts.noAccounts","There are currently no defined Accounts.");
} else {
String acctDesc = AttributeTools.getRequestString(request, PARM_ACCT_DESC , "");
String acctActive = AttributeTools.getRequestString(request, PARM_ACCT_ACTIVE , "");
String acctPassword = AttributeTools.getRequestString(request, PARM_ACCT_PASSWORD , "");
String acctPrivLabel = AttributeTools.getRequestString(request, PARM_ACCT_PRIVLABEL , "<n/a>");
User adminUser = null;
listAccounts = true;
// update
try {
boolean saveOK = true;
// active
if (isCurrentAccountSelected) {
if (!selAccount.getIsActive()) {
selAccount.setIsActive(true);
}
} else {
boolean acctActv = ComboOption.parseYesNoText(locale, acctActive, true);
if (selAccount.getIsActive() != acctActv) {
selAccount.setIsActive(acctActv);
}
}
// password
if (!isCurrentAccountSelected) {
if (acctPassword.equals(PASSWORD_HOLDER)) {
// password not entered
} else
if (this.isValidPassword(acctPassword)) {
selAccount.setPassword(acctPassword);
try {
adminUser = User.getUser(selAccount, User.getAdminUserID());
if (adminUser != null) {
adminUser.setPassword(acctPassword);
}
} catch (DBException dbe) {
// ignore
}
} else {
m = i18n.getString("SysAdminAccounts.pleaseEnterValidPassword","Please enter a valid password");
error = true;
saveOK = false;
editAccount = true;
listAccounts = false;
}
}
// description
if (!acctDesc.equals("")) {
selAccount.setDescription(acctDesc);
}
// private label name
if (!acctPrivLabel.equals("<n/a>")) {
selAccount.setPrivateLabelName(acctPrivLabel);
}
// save
if (saveOK) {
if (adminUser != null) {
try {
adminUser.update(User.FLD_password);
} catch (DBException dbe) {
Print.logError("Error saving 'admin' User password", dbe);
}
}
selAccount.save();
if (accountProps) {
String acctTempProps = AttributeTools.getRequestString(request, PARM_ACCT_TEMP_PROPS, "");
try {
acctTempProps = (new RTProperties(acctTempProps.replace('\n',' '))).toString();
Resource resource = Resource.getResource(selAccount, Resource.RESID_TemporaryProperties);
if (StringTools.isBlank(acctTempProps)) {
if ((resource != null) && !StringTools.isBlank(resource.getProperties())) {
resource.setProperties("");
resource.update(Resource.FLD_properties);
} else {
// no change
}
} else {
if (resource != null) {
if (!acctTempProps.equals(resource.getProperties())) {
resource.setProperties(acctTempProps);
resource.update(Resource.FLD_properties);
} else {
// no change
}
} else {
resource = Resource.getResource(selAccount, Resource.RESID_TemporaryProperties, true);
resource.setType(Resource.TYPE_RTPROPS);
resource.setProperties(acctTempProps);
resource.save();
}
}
} catch (DBException dbe) {
Print.logException("Unable to save Resource: " + selAccount.getAccountID(), dbe);
}
} // accountProps
m = i18n.getString("SysAdminAccounts.accountUpdated","Account information updated");
} else {
// should stay on this page
editAccount = !isCurrentAccountSelected;
listAccounts = false;
}
} catch (Throwable t) {
m = i18n.getString("SysAdminAccounts.errorUpdate","Internal error updating Account");
error = true;
}
}
}
/* Style */
HTMLOutput HTML_CSS = new HTMLOutput() {
public void write(PrintWriter out) throws IOException {
String cssDir = SysAdminAccounts.this.getCssDirectory();
WebPageAdaptor.writeCssLink(out, reqState, "SysAdminAccounts.css", cssDir);
}
};
/* JavaScript */
HTMLOutput HTML_JS = new HTMLOutput() {
public void write(PrintWriter out) throws IOException {
MenuBar.writeJavaScript(out, pageName, reqState);
JavaScriptTools.writeJSInclude(out, JavaScriptTools.qualifyJSFileRef(SORTTABLE_JS));
}
};
/* Content */
final Collection<String> _accountList = accountList;
final String _selAccountID = selAccountID;
final Account _selAccount = selAccount;
final boolean _allowEdit = allowEdit;
final boolean _allowView = allowView;
final boolean _allowNew = allowNew;
final boolean _allowDelete = allowDelete;
final boolean _allowLogin = allowLogin;
final boolean _editAccount = _allowEdit && editAccount;
final boolean _viewAccount = _editAccount || viewAccount;
final boolean _listAccounts = listAccounts;
HTMLOutput HTML_CONTENT = new HTMLOutput(CommonServlet.CSS_CONTENT_FRAME, m) {
public void write(PrintWriter out) throws IOException {
String pageName = SysAdminAccounts.this.getPageName();
// frame header
//String menuURL = EncodeMakeURL(reqState,Track.BASE_URI(),PAGE_MENU_TOP);
String menuURL = privLabel.getWebPageURL(reqState, PAGE_MENU_TOP);
String editURL = SysAdminAccounts.this.encodePageURL(reqState);//,Track.BASE_URI());
String selectURL = SysAdminAccounts.this.encodePageURL(reqState);//,Track.BASE_URI());
String newURL = SysAdminAccounts.this.encodePageURL(reqState);//,Track.BASE_URI());
String frameTitle = _allowNew?
i18n.getString("SysAdminAccounts.createDeleteAccounts","Create/Delete/Edit Accounts") :
i18n.getString("SysAdminAccounts.viewEditAccounts","View/Edit Accounts");
out.write("<span class='"+CommonServlet.CSS_MENU_TITLE+"'>"+frameTitle+"</span><br/>\n");
out.write("<hr>\n");
// account selection table (Select, Account ID, Account Description)
if (_listAccounts) {
// account selection table (Select, Account ID, Account Description)
out.write("<h1 class='"+CommonServlet.CSS_ADMIN_SELECT_TITLE+"'>"+i18n.getString("SysAdminAccounts.selectAccount","Select an Account")+":</h1>\n");
out.write("<div style='margin-left:25px;'>\n");
out.write("<form name='"+FORM_ACCOUNT_SELECT+"' method='post' action='"+selectURL+"' target='_top'>");
out.write("<input type='hidden' name='"+PARM_COMMAND+"' value='"+COMMAND_INFO_SELECT+"'/>");
out.write("<table class='"+CommonServlet.CSS_ADMIN_SELECT_TABLE+"' cellspacing=0 cellpadding=0 border=0>\n");
out.write(" <thead>\n");
out.write(" <tr class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_ROW+"'>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL_SEL+"' nowrap>"+filter(i18n.getString("SysAdminAccounts.select","Select"))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+filter(i18n.getString("SysAdminAccounts.accountID","Account ID"))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+filter(i18n.getString("SysAdminAccounts.accountName","Account Description"))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+filter(i18n.getString("SysAdminAccounts.active","Active"))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+filter(i18n.getString("SysAdminAccounts.deviceCount","Device\nCount"))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+filter(i18n.getString("SysAdminAccounts.privateLabel","PrivateLabel\nName"))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+filter(i18n.getString("SysAdminAccounts.creationDate","Created\n{0}",currAcctTZID))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+filter(i18n.getString("SysAdminAccounts.lastLogin","Last Login\n{0}",currAcctTZID))+"</th>\n");
out.write(" <th class='"+CommonServlet.CSS_ADMIN_TABLE_HEADER_COL +"' nowrap>"+filter(i18n.getString("SysAdminAccounts.loggedIn","Logged\nIn Now"))+"</th>\n");
out.write(" </tr>\n");
out.write(" </thead>\n");
out.write(" <tbody>\n");
Map<String,java.util.List<String>> loggedInAccounts = SysAdminAccounts.this.getLoggedInAccounts(reqState);
for (int u = 0; u < ListTools.size(_accountList); u++) {
// get Account
Account acct = null;
try {
acct = Account.getAccount(ListTools.itemAt(_accountList,u,""));
} catch (DBException dbe) {
//
}
if (acct == null) {
continue;
}
String acctID = acct.getAccountID();
String acctDesc = acct.getDescription();
String prvLabelName = acct.getPrivateLabelName();
//if (!prvLabelName.equals("*")) { continue; } // <-- debug/testing
// odd/even row
boolean oddRow = ((u & 1) == 0); // odd row index starts at '0'
if (oddRow) {
out.write(" <tr class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_ROW_ODD+"'>\n");
} else {
out.write(" <tr class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_ROW_EVEN+"'>\n");
}
// display account info
String active = ComboOption.getYesNoText(locale,acct.isActive());
long creationTime = acct.getCreationTime();
String creationStr = (creationTime > 0L)? new DateTime(creationTime,currAcctTZ).format(dtFormat) : i18n.getString("SysAdminAccounts.unknown","unknown");
long lastLoginTime = acct.getLastLoginTime();
long deltaTimeSec = DateTime.getCurrentTimeSec() - lastLoginTime;
String lastLoginStr = (lastLoginTime > 0L)? new DateTime(lastLoginTime,currAcctTZ).format(dtFormat) : i18n.getString("SysAdminAccounts.never","never");
String lastLoginCls = oddRow? "normalLoginDate_odd" : "normalLoginDate_even";
if (deltaTimeSec <= DateTime.DaySeconds(1)) {
// has logged i within the last 24 hours (green)
lastLoginCls = oddRow? "recentLoginDate_odd" : "recentLoginDate_even";
} else
if (deltaTimeSec <= DateTime.DaySeconds(7)) {
// has logged i within the last week (black)
lastLoginCls = oddRow? "normalLoginDate_odd" : "normalLoginDate_even";
} else
if (deltaTimeSec <= DateTime.DaySeconds(21)) {
// has logged i within the last 3 weeks (yellow)
lastLoginCls = oddRow? "oldLoginDate_odd" : "oldLoginDate_even";
} else {
// logged in more than 3 weeks ago (red)
lastLoginCls = oddRow? "veryOldLoginDate_odd" : "veryOldLoginDate_even"; // (196, 54, 54)
}
String deviceCountS = String.valueOf(acct.getDeviceCount());
int loginCount = 0;
String loginCountS = "--"; // ComboOption.getYesNoText(locale,false);
if (loggedInAccounts.containsKey(acctID)) {
java.util.List<String> userList = loggedInAccounts.get(acctID);
loginCount = userList.size();
loginCountS = "(" + loginCount + ")";
}
//if (prvLabelName.equals("*")) { prvLabelName = "default"; }
String checked = _selAccountID.equals(acctID)? " checked" : "";
String viewStyle = currAcctID.equals(acctID)? "background-color:#E5E5E5;" : "background-color:#FFFFFF;";
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL_SEL+"' "+SORTTABLE_SORTKEY+"='"+u+"' style='"+viewStyle+"'><input type='radio' name='"+PARM_ACCOUNT_SELECT+"' id='"+acctID+"' value='"+acctID+"' "+checked+"></td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap><label for='"+acctID+"'>"+filter(acctID)+"</label></td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+filter(acctDesc)+"</td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+filter(active)+"</td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+filter(deviceCountS)+"</td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' nowrap>"+filter(prvLabelName)+"</td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' "+SORTTABLE_SORTKEY+"='"+creationTime +"' nowrap>"+filter(creationStr)+"</td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' "+SORTTABLE_SORTKEY+"='"+lastLoginTime+"' nowrap><span class='"+lastLoginCls+"'>"+filter(lastLoginStr)+"</span></td>\n");
out.write(" <td class='"+CommonServlet.CSS_ADMIN_TABLE_BODY_COL +"' "+SORTTABLE_SORTKEY+"='"+loginCount +"' nowrap>"+filter(loginCountS)+"</td>\n");
// end of table row
out.write(" </tr>\n");
}
out.write(" </tbody>\n");
out.write("</table>\n");
out.write("<table cellpadding='0' cellspacing='0' border='0' style='width:95%; margin-top:5px; margin-left:5px; margin-bottom:5px;'>\n");
out.write("<tr>\n");
if (_allowView) {
out.write("<td style='padding-left:5px;'>");
out.write("<input type='submit' name='"+PARM_SUBMIT_VIEW+"' value='"+i18n.getString("SysAdminAccounts.view","View")+"'>");
out.write("</td>\n");
}
if (_allowEdit) {
out.write("<td style='padding-left:5px;'>");
out.write("<input type='submit' name='"+PARM_SUBMIT_EDIT+"' value='"+i18n.getString("SysAdminAccounts.edit","Edit")+"'>");
out.write("</td>\n");
}
if (_allowLogin) {
out.write("<td style='padding-left:30px;'>");
out.write("<input type='submit' name='"+PARM_SUBMIT_LOGIN+"' value='"+i18n.getString("SysAdminAccounts.login","Login")+"' "+Onclick_ConfirmLogin(locale)+">");
out.write("</td>\n");
}
out.write("<td style='width:100%; text-align:right; padding-right:10px;'>");
if (_allowDelete) {
out.write("<input type='submit' name='"+PARM_SUBMIT_DEL+"' value='"+i18n.getString("SysAdminAccounts.delete","Delete")+"' "+Onclick_ConfirmDelete(locale)+">");
} else {
out.write(" ");
}
out.write("</td>\n");
out.write("</tr>\n");
out.write("</table>\n");
out.write("</form>\n");
out.write("</div>\n");
out.write("<hr>\n");
/* new Account */
if (_allowNew) {
out.write("<h1 class='"+CommonServlet.CSS_ADMIN_SELECT_TITLE+"'>"+i18n.getString("SysAdminAccounts.createNewAccount","Create a new Account")+":</h1>\n");
out.write("<div style='margin-top:5px; margin-left:5px; margin-bottom:5px;'>\n");
out.write("<form name='"+FORM_ACCOUNT_NEW+"' method='post' action='"+newURL+"' target='_top'>");
out.write(" <input type='hidden' name='"+PARM_COMMAND+"' value='"+COMMAND_INFO_NEW+"'/>");
out.write(i18n.getString("SysAdminAccounts.accountID","Account ID")+": <input type='text' name='"+PARM_NEW_NAME+"' value='' size='32' maxlength='32'><br>\n");
out.write(" <input type='submit' name='"+PARM_SUBMIT_NEW+"' value='"+i18n.getString("SysAdminAccounts.new","New")+"' style='margin-top:5px; margin-left:10px;'>\n");
out.write("</form>\n");
out.write("</div>\n");
out.write("<hr>\n");
}
} else {
// user view/edit form
/* start of form */
out.write("<form name='"+FORM_ACCOUNT_EDIT+"' method='post' action='"+editURL+"' target='_top'>\n");
out.write(" <input type='hidden' name='"+PARM_COMMAND+"' value='"+COMMAND_INFO_UPDATE+"'/>\n");
/* password */
String password = PASSWORD_HOLDER;
boolean showPass = privLabel.getBooleanProperty(PrivateLabel.PROP_SysAdminAccounts_showPasswords,false);
if (showPass && (_selAccount != null)) {
try {
User adminUser = User.getUser(_selAccount, User.getAdminUserID());
if (adminUser != null) {
password = Account.decodePassword(adminUser.getPassword());
} else {
password = Account.decodePassword(_selAccount.getPassword());
}
} catch (DBException dbe) {
password = Account.decodePassword(_selAccount.getPassword());
}
}
password = StringTools.htmlFilterValue(password);
/* Account fields */
ComboOption acctActive = ComboOption.getYesNoOption(locale, ((_selAccount != null) && _selAccount.isActive()));
String acctDesc = (_selAccount!=null)?_selAccount.getDescription() :"";
String acctPrivLbl = (_selAccount!=null)?_selAccount.getPrivateLabelName() : "";
boolean editPrvLabel = _editAccount && isSysAdmin; // only editable if sys-admin
ComboMap privLblList = null;
if (editPrvLabel) {
// editable ComboMap (isPrivateAdmin vs. isSysAdmin)
privLblList = isSysAdmin?
new ComboMap(BasicPrivateLabelLoader.getPrivateLabelNames(true)) :
new ComboMap();
if (!ListTools.containsKey(privLblList, acctPrivLbl)) {
privLblList.insert(acctPrivLbl);
}
if (isSysAdmin) {
if (!ListTools.containsKey(privLblList, "")) {
privLblList.insert("");
}
if (!ListTools.containsKey(privLblList, "*")) {
privLblList.insert("*");
}
}
} else {
// non-editable ComboMap
privLblList = new ComboMap();
privLblList.insert(acctPrivLbl);
}
out.println("<table class='"+CommonServlet.CSS_ADMIN_VIEW_TABLE+"' cellspacing='0' callpadding='0' border='0'>");
out.println(FormRow_TextField(PARM_ACCOUNT_SELECT , false , i18n.getString("SysAdminAccounts.accountID","Account ID")+":" , _selAccountID, 40, 40));
out.println(FormRow_TextField(PARM_ACCT_DESC , _editAccount , i18n.getString("SysAdminAccounts.accountDesc","Account Description")+":" , acctDesc, 40, 40));
out.println(FormRow_ComboBox (PARM_ACCT_ACTIVE , _editAccount , i18n.getString("SysAdminAccounts.active","Active")+":" , acctActive, ComboMap.getYesNoMap(locale), "", -1));
out.println(FormRow_TextField(PARM_ACCT_PASSWORD , _editAccount , i18n.getString("SysAdminAccounts.password","Password")+":" , password, 20, 20));
out.println(FormRow_ComboBox (PARM_ACCT_PRIVLABEL , editPrvLabel , i18n.getString("SysAdminAccounts.privateLabelName","PrivateLabel Name")+":" , acctPrivLbl, privLblList, "", -1));
if (accountProps) {
try {
String acctTempProps = "";
Resource resource = (_selAccount != null)? Resource.getResource(_selAccount, Resource.RESID_TemporaryProperties) : null;
if (resource != null) {
RTProperties resRtp = resource.getRTProperties();
acctTempProps = resRtp.toString(null, null, "");
}
out.println(FormRow_TextArea(PARM_ACCT_TEMP_PROPS, _editAccount , i18n.getString("SysAdminAccounts.accountProperties" ,"Account Properties")+":", acctTempProps, 7, 75));
} catch (DBException dbe) {
Print.logError("Unable to read Account Resource: " + dbe);
}
}
out.println("</table>");
/* end of form */
out.write("<hr>\n");
out.write("<span style='padding-left:10px'> </span>\n");
if (_editAccount) {
out.write("<input type='submit' name='"+PARM_SUBMIT_CHG+"' value='"+i18n.getString("SysAdminAccounts.change","Change")+"'>\n");
out.write("<span style='padding-left:10px'> </span>\n");
out.write("<input type='button' name='"+PARM_BUTTON_CANCEL+"' value='"+i18n.getString("SysAdminAccounts.cancel","Cancel")+"' onclick=\"javascript:openURL('"+editURL+"','_top');\">\n");
} else {
out.write("<input type='button' name='"+PARM_BUTTON_BACK+"' value='"+i18n.getString("SysAdminAccounts.back","Back")+"' onclick=\"javascript:openURL('"+editURL+"','_top');\">\n");
}
out.write("</form>\n");
}
}
};
/* write frame */
String onload = error? JS_alert(true,m) : null;
CommonServlet.writePageFrame(
reqState,
onload,null, // onLoad/onUnload
HTML_CSS, // Style sheets
HTML_JS, // Javascript
null, // Navigation
HTML_CONTENT); // Content
}
protected String Onclick_ConfirmLogin(Locale locale)
{
I18N i18n = I18N.getI18N(SysAdminAccounts.class, locale);
String confirmLogin = i18n.getString("SysAdminAccounts.confirmLogin",
"Are you sure you want to login to the selected Account?");
return "onclick=\"return confirm('"+confirmLogin+"');\"";
}
// ------------------------------------------------------------------------
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.editor.richcopy;
import com.intellij.codeInsight.daemon.impl.HighlightInfo;
import com.intellij.codeInsight.daemon.impl.HighlightInfoType;
import com.intellij.codeInsight.editorActions.CopyPastePostProcessor;
import com.intellij.codeInsight.editorActions.CopyPastePreProcessor;
import com.intellij.ide.highlighter.HighlighterFactory;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.colors.FontPreferences;
import com.intellij.openapi.editor.colors.TextAttributesKey;
import com.intellij.openapi.editor.ex.MarkupIterator;
import com.intellij.openapi.editor.ex.MarkupModelEx;
import com.intellij.openapi.editor.ex.RangeHighlighterEx;
import com.intellij.openapi.editor.ex.util.EditorUtil;
import com.intellij.openapi.editor.highlighter.EditorHighlighter;
import com.intellij.openapi.editor.highlighter.HighlighterIterator;
import com.intellij.openapi.editor.impl.ComplementaryFontsRegistry;
import com.intellij.openapi.editor.impl.DocumentMarkupModel;
import com.intellij.openapi.editor.impl.FontInfo;
import com.intellij.openapi.editor.markup.HighlighterLayer;
import com.intellij.openapi.editor.markup.MarkupModel;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.editor.richcopy.model.SyntaxInfo;
import com.intellij.openapi.editor.richcopy.settings.RichCopySettings;
import com.intellij.openapi.editor.richcopy.view.HtmlTransferableData;
import com.intellij.openapi.editor.richcopy.view.RawTextWithMarkup;
import com.intellij.openapi.editor.richcopy.view.RtfTransferableData;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.psi.PsiFile;
import com.intellij.psi.TokenType;
import com.intellij.util.ObjectUtils;
import com.intellij.util.text.CharArrayUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.awt.*;
import java.util.*;
import java.util.List;
/**
* Generates text with markup (in RTF and HTML formats) for interaction via clipboard with third-party applications.
*
* Interoperability with the following applications was tested:
* MS Office 2010 (Word, PowerPoint, Outlook), OpenOffice (Writer, Impress), Gmail, Mac TextEdit, Mac Mail.
*/
public class TextWithMarkupProcessor extends CopyPastePostProcessor<RawTextWithMarkup> {
private static final Logger LOG = Logger.getInstance(TextWithMarkupProcessor.class);
private List<RawTextWithMarkup> myResult;
@NotNull
@Override
public List<RawTextWithMarkup> collectTransferableData(PsiFile file, Editor editor, int[] startOffsets, int[] endOffsets) {
if (!RichCopySettings.getInstance().isEnabled()) {
return Collections.emptyList();
}
try {
RichCopySettings settings = RichCopySettings.getInstance();
List<Caret> carets = editor.getCaretModel().getAllCarets();
Caret firstCaret = carets.get(0);
final int indentSymbolsToStrip;
final int firstLineStartOffset;
if (Registry.is("editor.richcopy.strip.indents") && carets.size() == 1) {
Pair<Integer, Integer> p = calcIndentSymbolsToStrip(editor.getDocument(), firstCaret.getSelectionStart(), firstCaret.getSelectionEnd());
firstLineStartOffset = p.first;
indentSymbolsToStrip = p.second;
}
else {
firstLineStartOffset = firstCaret.getSelectionStart();
indentSymbolsToStrip = 0;
}
logInitial(editor, startOffsets, endOffsets, indentSymbolsToStrip, firstLineStartOffset);
CharSequence text = editor.getDocument().getCharsSequence();
EditorColorsScheme schemeToUse = settings.getColorsScheme(editor.getColorsScheme());
EditorHighlighter highlighter = HighlighterFactory.createHighlighter(file.getViewProvider().getVirtualFile(),
schemeToUse, file.getProject());
highlighter.setText(text);
MarkupModel markupModel = DocumentMarkupModel.forDocument(editor.getDocument(), file.getProject(), false);
Context context = new Context(text, schemeToUse, indentSymbolsToStrip);
int endOffset = 0;
Caret prevCaret = null;
for (Caret caret : carets) {
int caretSelectionStart = caret.getSelectionStart();
int caretSelectionEnd = caret.getSelectionEnd();
int startOffsetToUse;
int additionalShift = 0;
if (caret == firstCaret) {
startOffsetToUse = firstLineStartOffset;
}
else {
startOffsetToUse = caretSelectionStart;
assert prevCaret != null;
String prevCaretSelectedText = prevCaret.getSelectedText();
// Block selection fills short lines by white spaces
int fillStringLength = prevCaretSelectedText == null ? 0 : prevCaretSelectedText.length() - (prevCaret.getSelectionEnd() - prevCaret.getSelectionStart());
context.addCharacter(endOffset + fillStringLength);
additionalShift = fillStringLength + 1;
}
context.reset(endOffset - caretSelectionStart + additionalShift);
endOffset = caretSelectionEnd;
prevCaret = caret;
if (endOffset <= startOffsetToUse) {
continue;
}
MyMarkupIterator markupIterator = new MyMarkupIterator(text,
new CompositeRangeIterator(schemeToUse,
new HighlighterRangeIterator(highlighter, startOffsetToUse, endOffset),
new MarkupModelRangeIterator(markupModel, schemeToUse, startOffsetToUse, endOffset)),
schemeToUse);
try {
context.iterate(markupIterator, endOffset);
}
finally {
markupIterator.dispose();
}
}
SyntaxInfo syntaxInfo = context.finish();
logSyntaxInfo(syntaxInfo);
createResult(syntaxInfo, editor);
return ObjectUtils.notNull(myResult, Collections.<RawTextWithMarkup>emptyList());
}
catch (Exception e) {
// catching the exception so that the rest of copy/paste functionality can still work fine
LOG.error(e);
}
return Collections.emptyList();
}
@Override
public void processTransferableData(Project project,
Editor editor,
RangeMarker bounds,
int caretOffset,
Ref<Boolean> indented,
List<RawTextWithMarkup> values) {
}
void createResult(SyntaxInfo syntaxInfo, Editor editor) {
myResult = new ArrayList<>(2);
myResult.add(new HtmlTransferableData(syntaxInfo, EditorUtil.getTabSize(editor)));
myResult.add(new RtfTransferableData(syntaxInfo));
}
private void setRawText(String rawText) {
if (myResult == null) {
return;
}
for (RawTextWithMarkup data : myResult) {
data.setRawText(rawText);
}
myResult = null;
}
private static void logInitial(@NotNull Editor editor,
@NotNull int[] startOffsets,
@NotNull int[] endOffsets,
int indentSymbolsToStrip,
int firstLineStartOffset)
{
if (!LOG.isDebugEnabled()) {
return;
}
StringBuilder buffer = new StringBuilder();
Document document = editor.getDocument();
CharSequence text = document.getCharsSequence();
for (int i = 0; i < startOffsets.length; i++) {
int start = startOffsets[i];
int lineStart = document.getLineStartOffset(document.getLineNumber(start));
int end = endOffsets[i];
int lineEnd = document.getLineEndOffset(document.getLineNumber(end));
buffer.append(" region #").append(i).append(": ").append(start).append('-').append(end).append(", text at range ")
.append(lineStart).append('-').append(lineEnd).append(": \n'").append(text.subSequence(lineStart, lineEnd)).append("'\n");
}
if (buffer.length() > 0) {
buffer.setLength(buffer.length() - 1);
}
LOG.debug(String.format(
"Preparing syntax-aware text. Given: %s selection, indent symbols to strip=%d, first line start offset=%d, selected text:%n%s",
startOffsets.length > 1 ? "block" : "regular", indentSymbolsToStrip, firstLineStartOffset, buffer
));
}
private static void logSyntaxInfo(@NotNull SyntaxInfo info) {
if (LOG.isDebugEnabled()) {
LOG.debug("Constructed syntax info: " + info);
}
}
private static Pair<Integer/* start offset to use */, Integer /* indent symbols to strip */> calcIndentSymbolsToStrip(
@NotNull Document document, int startOffset, int endOffset)
{
int startLine = document.getLineNumber(startOffset);
int endLine = document.getLineNumber(endOffset);
CharSequence text = document.getCharsSequence();
int maximumCommonIndent = Integer.MAX_VALUE;
int firstLineStart = startOffset;
int firstLineEnd = startOffset;
for (int line = startLine; line <= endLine; line++) {
int lineStartOffset = document.getLineStartOffset(line);
int lineEndOffset = document.getLineEndOffset(line);
if (line == startLine) {
firstLineStart = lineStartOffset;
firstLineEnd = lineEndOffset;
}
int nonWsOffset = lineEndOffset;
for (int i = lineStartOffset; i < lineEndOffset && (i - lineStartOffset) < maximumCommonIndent && i < endOffset; i++) {
char c = text.charAt(i);
if (c != ' ' && c != '\t') {
nonWsOffset = i;
break;
}
}
if (nonWsOffset >= lineEndOffset) {
continue; // Blank line
}
int indent = nonWsOffset - lineStartOffset;
maximumCommonIndent = Math.min(maximumCommonIndent, indent);
if (maximumCommonIndent == 0) {
break;
}
}
int startOffsetToUse = Math.min(firstLineEnd, Math.max(startOffset, firstLineStart + maximumCommonIndent));
return Pair.create(startOffsetToUse, maximumCommonIndent);
}
private static class Context {
private final SyntaxInfo.Builder builder;
@NotNull private final CharSequence myText;
@NotNull private final Color myDefaultForeground;
@NotNull private final Color myDefaultBackground;
@Nullable private Color myBackground;
@Nullable private Color myForeground;
@Nullable private String myFontFamilyName;
private final int myIndentSymbolsToStrip;
private int myFontStyle = -1;
private int myStartOffset = -1;
private int myOffsetShift = 0;
private int myIndentSymbolsToStripAtCurrentLine;
Context(@NotNull CharSequence charSequence, @NotNull EditorColorsScheme scheme, int indentSymbolsToStrip) {
myText = charSequence;
myDefaultForeground = scheme.getDefaultForeground();
myDefaultBackground = scheme.getDefaultBackground();
// Java assumes screen resolution of 72dpi when calculating font size in pixels. External applications are supposedly using correct
// resolution, so we need to adjust font size for copied text to look the same in them.
// (See https://docs.oracle.com/javase/7/docs/webnotes/tsg/TSG-Desktop/html/java2d.html#gdlwn)
// Java on Mac is not affected by this issue.
int javaFontSize = scheme.getEditorFontSize();
float fontSize = SystemInfo.isMac || ApplicationManager.getApplication().isHeadlessEnvironment() ?
javaFontSize :
javaFontSize * 72f / Toolkit.getDefaultToolkit().getScreenResolution();
builder = new SyntaxInfo.Builder(myDefaultForeground, myDefaultBackground, fontSize);
myIndentSymbolsToStrip = indentSymbolsToStrip;
}
public void reset(int offsetShiftDelta) {
myStartOffset = -1;
myOffsetShift += offsetShiftDelta;
myIndentSymbolsToStripAtCurrentLine = 0;
}
public void iterate(MyMarkupIterator iterator, int endOffset) {
while (!iterator.atEnd()) {
iterator.advance();
int startOffset = iterator.getStartOffset();
if (startOffset >= endOffset) {
break;
}
if (myStartOffset < 0) {
myStartOffset = startOffset;
}
boolean whiteSpacesOnly = CharArrayUtil.isEmptyOrSpaces(myText, startOffset, iterator.getEndOffset());
processBackground(startOffset, iterator.getBackgroundColor());
if (!whiteSpacesOnly) {
processForeground(startOffset, iterator.getForegroundColor());
processFontFamilyName(startOffset, iterator.getFontFamilyName());
processFontStyle(startOffset, iterator.getFontStyle());
}
}
addTextIfPossible(endOffset);
}
private void processFontStyle(int startOffset, int fontStyle) {
if (fontStyle != myFontStyle) {
addTextIfPossible(startOffset);
builder.addFontStyle(fontStyle);
myFontStyle = fontStyle;
}
}
private void processFontFamilyName(int startOffset, String fontName) {
String fontFamilyName = FontMapper.getPhysicalFontName(fontName);
if (!fontFamilyName.equals(myFontFamilyName)) {
addTextIfPossible(startOffset);
builder.addFontFamilyName(fontFamilyName);
myFontFamilyName = fontFamilyName;
}
}
private void processForeground(int startOffset, Color foreground) {
if (myForeground == null && foreground != null) {
addTextIfPossible(startOffset);
myForeground = foreground;
builder.addForeground(foreground);
}
else if (myForeground != null) {
Color c = foreground == null ? myDefaultForeground : foreground;
if (!myForeground.equals(c)) {
addTextIfPossible(startOffset);
builder.addForeground(c);
myForeground = c;
}
}
}
private void processBackground(int startOffset, Color background) {
if (myBackground == null && background != null && !myDefaultBackground.equals(background)) {
addTextIfPossible(startOffset);
myBackground = background;
builder.addBackground(background);
}
else if (myBackground != null) {
Color c = background == null ? myDefaultBackground : background;
if (!myBackground.equals(c)) {
addTextIfPossible(startOffset);
builder.addBackground(c);
myBackground = c;
}
}
}
private void addTextIfPossible(int endOffset) {
if (endOffset <= myStartOffset) {
return;
}
for (int i = myStartOffset; i < endOffset; i++) {
char c = myText.charAt(i);
switch (c) {
case '\r':
if (i + 1 < myText.length() && myText.charAt(i + 1) == '\n') {
myIndentSymbolsToStripAtCurrentLine = myIndentSymbolsToStrip;
builder.addText(myStartOffset + myOffsetShift, i + myOffsetShift + 1);
myStartOffset = i + 2;
myOffsetShift--;
//noinspection AssignmentToForLoopParameter
i++;
break;
}
// Intended fall-through.
case '\n':
myIndentSymbolsToStripAtCurrentLine = myIndentSymbolsToStrip;
builder.addText(myStartOffset + myOffsetShift, i + myOffsetShift + 1);
myStartOffset = i + 1;
break;
// Intended fall-through.
case ' ':
case '\t':
if (myIndentSymbolsToStripAtCurrentLine > 0) {
myIndentSymbolsToStripAtCurrentLine--;
myStartOffset++;
continue;
}
default: myIndentSymbolsToStripAtCurrentLine = 0;
}
}
if (myStartOffset < endOffset) {
builder.addText(myStartOffset + myOffsetShift, endOffset + myOffsetShift);
myStartOffset = endOffset;
}
}
private void addCharacter(int position) {
builder.addText(position + myOffsetShift, position + myOffsetShift + 1);
}
@NotNull
public SyntaxInfo finish() {
return builder.build();
}
}
private static class MyMarkupIterator {
private final SegmentIterator mySegmentIterator;
private final RangeIterator myRangeIterator;
private int myCurrentFontStyle;
private Color myCurrentForegroundColor;
private Color myCurrentBackgroundColor;
private MyMarkupIterator(@NotNull CharSequence charSequence, @NotNull RangeIterator rangeIterator, @NotNull EditorColorsScheme colorsScheme) {
myRangeIterator = rangeIterator;
mySegmentIterator = new SegmentIterator(charSequence, colorsScheme.getFontPreferences());
}
public boolean atEnd() {
return myRangeIterator.atEnd() && mySegmentIterator.atEnd();
}
public void advance() {
if (mySegmentIterator.atEnd()) {
myRangeIterator.advance();
TextAttributes textAttributes = myRangeIterator.getTextAttributes();
myCurrentFontStyle = textAttributes == null ? Font.PLAIN : textAttributes.getFontType();
myCurrentForegroundColor = textAttributes == null ? null : textAttributes.getForegroundColor();
myCurrentBackgroundColor = textAttributes == null ? null : textAttributes.getBackgroundColor();
mySegmentIterator.reset(myRangeIterator.getRangeStart(), myRangeIterator.getRangeEnd(), myCurrentFontStyle);
}
mySegmentIterator.advance();
}
public int getStartOffset() {
return mySegmentIterator.getCurrentStartOffset();
}
public int getEndOffset() {
return mySegmentIterator.getCurrentEndOffset();
}
public int getFontStyle() {
return myCurrentFontStyle;
}
@NotNull
public String getFontFamilyName() {
return mySegmentIterator.getCurrentFontFamilyName();
}
@Nullable
public Color getForegroundColor() {
return myCurrentForegroundColor;
}
@Nullable
public Color getBackgroundColor() {
return myCurrentBackgroundColor;
}
public void dispose() {
myRangeIterator.dispose();
}
}
private static class CompositeRangeIterator implements RangeIterator {
private final @NotNull Color myDefaultForeground;
private final @NotNull Color myDefaultBackground;
private final IteratorWrapper[] myIterators;
private final TextAttributes myMergedAttributes = new TextAttributes();
private int overlappingRangesCount;
private int myCurrentStart;
private int myCurrentEnd;
// iterators have priority corresponding to their order in the parameter list - rightmost having the largest priority
public CompositeRangeIterator(@NotNull EditorColorsScheme colorsScheme, RangeIterator... iterators) {
myDefaultForeground = colorsScheme.getDefaultForeground();
myDefaultBackground = colorsScheme.getDefaultBackground();
myIterators = new IteratorWrapper[iterators.length];
for (int i = 0; i < iterators.length; i++) {
myIterators[i] = new IteratorWrapper(iterators[i], i);
}
}
@Override
public boolean atEnd() {
boolean validIteratorExists = false;
for (int i = 0; i < myIterators.length; i++) {
IteratorWrapper wrapper = myIterators[i];
if (wrapper == null) {
continue;
}
RangeIterator iterator = wrapper.iterator;
if (!iterator.atEnd() || overlappingRangesCount > 0 && (i >= overlappingRangesCount || iterator.getRangeEnd() > myCurrentEnd)) {
validIteratorExists = true;
}
}
return !validIteratorExists;
}
@Override
public void advance() {
int max = overlappingRangesCount == 0 ? myIterators.length : overlappingRangesCount;
for (int i = 0; i < max; i++) {
IteratorWrapper wrapper = myIterators[i];
if (wrapper == null) {
continue;
}
RangeIterator iterator = wrapper.iterator;
if (overlappingRangesCount > 0 && iterator.getRangeEnd() > myCurrentEnd) {
continue;
}
if (iterator.atEnd()) {
iterator.dispose();
myIterators[i] = null;
}
else {
iterator.advance();
}
}
Arrays.sort(myIterators, RANGE_SORTER);
myCurrentStart = Math.max(myIterators[0].iterator.getRangeStart(), myCurrentEnd);
myCurrentEnd = Integer.MAX_VALUE;
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < myIterators.length; i++) {
IteratorWrapper wrapper = myIterators[i];
if (wrapper == null) {
break;
}
RangeIterator iterator = wrapper.iterator;
int nearestBound;
if (iterator.getRangeStart() > myCurrentStart) {
nearestBound = iterator.getRangeStart();
}
else {
nearestBound = iterator.getRangeEnd();
}
myCurrentEnd = Math.min(myCurrentEnd, nearestBound);
}
for (overlappingRangesCount = 1; overlappingRangesCount < myIterators.length; overlappingRangesCount++) {
IteratorWrapper wrapper = myIterators[overlappingRangesCount];
if (wrapper == null || wrapper.iterator.getRangeStart() > myCurrentStart) {
break;
}
}
}
private final Comparator<IteratorWrapper> RANGE_SORTER = new Comparator<IteratorWrapper>() {
@Override
public int compare(IteratorWrapper o1, IteratorWrapper o2) {
if (o1 == null) {
return 1;
}
if (o2 == null) {
return -1;
}
int startDiff = Math.max(o1.iterator.getRangeStart(), myCurrentEnd) - Math.max(o2.iterator.getRangeStart(), myCurrentEnd);
if (startDiff != 0) {
return startDiff;
}
return o2.order - o1.order;
}
};
@Override
public int getRangeStart() {
return myCurrentStart;
}
@Override
public int getRangeEnd() {
return myCurrentEnd;
}
@Override
public TextAttributes getTextAttributes() {
TextAttributes ta = myIterators[0].iterator.getTextAttributes();
myMergedAttributes.setAttributes(ta.getForegroundColor(), ta.getBackgroundColor(), null, null, null, ta.getFontType());
for (int i = 1; i < overlappingRangesCount; i++) {
merge(myIterators[i].iterator.getTextAttributes());
}
return myMergedAttributes;
}
private void merge(TextAttributes attributes) {
Color myBackground = myMergedAttributes.getBackgroundColor();
if (myBackground == null || myDefaultBackground.equals(myBackground)) {
myMergedAttributes.setBackgroundColor(attributes.getBackgroundColor());
}
Color myForeground = myMergedAttributes.getForegroundColor();
if (myForeground == null || myDefaultForeground.equals(myForeground)) {
myMergedAttributes.setForegroundColor(attributes.getForegroundColor());
}
if (myMergedAttributes.getFontType() == Font.PLAIN) {
myMergedAttributes.setFontType(attributes.getFontType());
}
}
@Override
public void dispose() {
for (IteratorWrapper wrapper : myIterators) {
if (wrapper != null) {
wrapper.iterator.dispose();
}
}
}
private static class IteratorWrapper {
private final RangeIterator iterator;
private final int order;
private IteratorWrapper(RangeIterator iterator, int order) {
this.iterator = iterator;
this.order = order;
}
}
}
private static class MarkupModelRangeIterator implements RangeIterator {
private final boolean myUnsupportedModel;
private final int myStartOffset;
private final int myEndOffset;
private final EditorColorsScheme myColorsScheme;
private final Color myDefaultForeground;
private final Color myDefaultBackground;
private final MarkupIterator<RangeHighlighterEx> myIterator;
private int myCurrentStart;
private int myCurrentEnd;
private TextAttributes myCurrentAttributes;
private int myNextStart;
private int myNextEnd;
private TextAttributes myNextAttributes;
private MarkupModelRangeIterator(@Nullable MarkupModel markupModel,
@NotNull EditorColorsScheme colorsScheme,
int startOffset,
int endOffset) {
myStartOffset = startOffset;
myEndOffset = endOffset;
myColorsScheme = colorsScheme;
myDefaultForeground = colorsScheme.getDefaultForeground();
myDefaultBackground = colorsScheme.getDefaultBackground();
myUnsupportedModel = !(markupModel instanceof MarkupModelEx);
if (myUnsupportedModel) {
myIterator = null;
return;
}
myIterator = ((MarkupModelEx)markupModel).overlappingIterator(startOffset, endOffset);
try {
findNextSuitableRange();
}
catch (RuntimeException | Error e) {
myIterator.dispose();
throw e;
}
}
@Override
public boolean atEnd() {
return myUnsupportedModel || myNextAttributes == null;
}
@Override
public void advance() {
myCurrentStart = myNextStart;
myCurrentEnd = myNextEnd;
myCurrentAttributes = myNextAttributes;
findNextSuitableRange();
}
private void findNextSuitableRange() {
myNextAttributes = null;
while(myIterator.hasNext()) {
RangeHighlighterEx highlighter = myIterator.next();
if (highlighter == null || !highlighter.isValid() || !isInterestedInLayer(highlighter.getLayer())) {
continue;
}
// LINES_IN_RANGE highlighters are not supported currently
myNextStart = Math.max(highlighter.getStartOffset(), myStartOffset);
myNextEnd = Math.min(highlighter.getEndOffset(), myEndOffset);
if (myNextStart >= myEndOffset) {
break;
}
if (myNextStart < myCurrentEnd) {
continue; // overlapping ranges withing document markup model are not supported currently
}
TextAttributes attributes = null;
Object tooltip = highlighter.getErrorStripeTooltip();
if (tooltip instanceof HighlightInfo) {
HighlightInfo info = (HighlightInfo)tooltip;
TextAttributesKey key = info.forcedTextAttributesKey;
if (key == null) {
HighlightInfoType type = info.type;
key = type.getAttributesKey();
}
if (key != null) {
attributes = myColorsScheme.getAttributes(key);
}
}
if (attributes == null) {
continue;
}
Color foreground = attributes.getForegroundColor();
Color background = attributes.getBackgroundColor();
if ((foreground == null || myDefaultForeground.equals(foreground))
&& (background == null || myDefaultBackground.equals(background))
&& attributes.getFontType() == Font.PLAIN) {
continue;
}
myNextAttributes = attributes;
break;
}
}
private static boolean isInterestedInLayer(int layer) {
return layer != HighlighterLayer.CARET_ROW
&& layer != HighlighterLayer.SELECTION
&& layer != HighlighterLayer.ERROR
&& layer != HighlighterLayer.WARNING
&& layer != HighlighterLayer.ELEMENT_UNDER_CARET;
}
@Override
public int getRangeStart() {
return myCurrentStart;
}
@Override
public int getRangeEnd() {
return myCurrentEnd;
}
@Override
public TextAttributes getTextAttributes() {
return myCurrentAttributes;
}
@Override
public void dispose() {
if (myIterator != null) {
myIterator.dispose();
}
}
}
private static class HighlighterRangeIterator implements RangeIterator {
private static final TextAttributes EMPTY_ATTRIBUTES = new TextAttributes();
private final HighlighterIterator myIterator;
private final int myStartOffset;
private final int myEndOffset;
private int myCurrentStart;
private int myCurrentEnd;
private TextAttributes myCurrentAttributes;
public HighlighterRangeIterator(@NotNull EditorHighlighter highlighter, int startOffset, int endOffset) {
myStartOffset = startOffset;
myEndOffset = endOffset;
myIterator = highlighter.createIterator(startOffset);
}
@Override
public boolean atEnd() {
return myIterator.atEnd() || getCurrentStart() >= myEndOffset;
}
private int getCurrentStart() {
return Math.max(myIterator.getStart(), myStartOffset);
}
private int getCurrentEnd() {
return Math.min(myIterator.getEnd(), myEndOffset);
}
@Override
public void advance() {
myCurrentStart = getCurrentStart();
myCurrentEnd = getCurrentEnd();
myCurrentAttributes = myIterator.getTokenType() == TokenType.BAD_CHARACTER ? EMPTY_ATTRIBUTES : myIterator.getTextAttributes();
myIterator.advance();
}
@Override
public int getRangeStart() {
return myCurrentStart;
}
@Override
public int getRangeEnd() {
return myCurrentEnd;
}
@Override
public TextAttributes getTextAttributes() {
return myCurrentAttributes;
}
@Override
public void dispose() {
}
}
private interface RangeIterator {
boolean atEnd();
void advance();
int getRangeStart();
int getRangeEnd();
TextAttributes getTextAttributes();
void dispose();
}
private static class SegmentIterator {
private final CharSequence myCharSequence;
private final FontPreferences myFontPreferences;
private int myCurrentStartOffset;
private int myCurrentOffset;
private int myEndOffset;
private int myFontStyle;
private String myCurrentFontFamilyName;
private String myNextFontFamilyName;
private SegmentIterator(CharSequence charSequence, FontPreferences fontPreferences) {
myCharSequence = charSequence;
myFontPreferences = fontPreferences;
}
public void reset(int startOffset, int endOffset, int fontStyle) {
myCurrentOffset = startOffset;
myEndOffset = endOffset;
myFontStyle = fontStyle;
}
public boolean atEnd() {
return myCurrentOffset >= myEndOffset;
}
public void advance() {
myCurrentFontFamilyName = myNextFontFamilyName;
myCurrentStartOffset = myCurrentOffset;
for (; myCurrentOffset < myEndOffset; myCurrentOffset++) {
FontInfo fontInfo = ComplementaryFontsRegistry.getFontAbleToDisplay(myCharSequence.charAt(myCurrentOffset),
myFontStyle,
myFontPreferences, null);
String fontFamilyName = fontInfo.getFont().getFamily();
if (myCurrentFontFamilyName == null) {
myCurrentFontFamilyName = fontFamilyName;
}
else if (!myCurrentFontFamilyName.equals(fontFamilyName)) {
myNextFontFamilyName = fontFamilyName;
break;
}
}
}
public int getCurrentStartOffset() {
return myCurrentStartOffset;
}
public int getCurrentEndOffset() {
return myCurrentOffset;
}
public String getCurrentFontFamilyName() {
return myCurrentFontFamilyName;
}
}
public static class RawTextSetter implements CopyPastePreProcessor {
private final TextWithMarkupProcessor myProcessor;
public RawTextSetter(TextWithMarkupProcessor processor) {
myProcessor = processor;
}
@Nullable
@Override
public String preprocessOnCopy(PsiFile file, int[] startOffsets, int[] endOffsets, String text) {
myProcessor.setRawText(text);
return null;
}
@NotNull
@Override
public String preprocessOnPaste(Project project, PsiFile file, Editor editor, String text, RawText rawText) {
return text;
}
}
}
| |
package timeout.slang.com.ui.loading;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentTransaction;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewTreeObserver;
import android.view.animation.Animation;
import android.view.animation.TranslateAnimation;
import timeout.slang.com.R;
import timeout.slang.com.ui.FragmentBase;
/**
* A simple {@link Fragment} subclass.
*/
public class FragmentSplash extends Fragment implements Animation.AnimationListener {
/* ------------------------------------------------------------------------------------------
* Constants
* ------------------------------------------------------------------------------------------ */
/**
* Should be used when pushing this fragment onto the stack
*/
public static final String TAG = "FragmentSplash";
/**
* Minimum showtime of 2seconds
*/
private long MINIMUM_SHOWTIME = 2000;
/**
* 1s long animation
*/
private long ANIMATION_DURATION = 1000;
/* ------------------------------------------------------------------------------------------
* Private Members
* ------------------------------------------------------------------------------------------ */
/**
* The top (TimeOut) part of our loading view
*/
private View mTop;
/**
* The bottom (London) part of our loading view
*/
private View mBottom;
/**
* Top animation
*/
private Animation mTopAnimation;
/**
* Bottom animation
*/
private Animation mBottomAnimation;
/**
* Earliest time this view can finish
*/
private long mMinimumEndTime = -1;
/**
* Set if its been requested that this view finish
*/
private boolean mRequestFinish;
/* ------------------------------------------------------------------------------------------
* Construction & From FragmentBase
* ------------------------------------------------------------------------------------------ */
public FragmentSplash() {
// Required empty public constructor
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View v = inflater.inflate(R.layout.fragment_loading, container, false);
mTop = v.findViewById(R.id.top);
mBottom = v.findViewById(R.id.bottom);
// Add on predraw listener so we know when the user first sees the fragment
mBottom.getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() {
@Override
public boolean onPreDraw() {
mBottom.getViewTreeObserver().removeOnPreDrawListener(this);
setMinimumEndTime();
return true;
}
});
// Stop touch events getting to underlying screen
v.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
return true;
}
});
return v;
}
/* ------------------------------------------------------------------------------------------
* Public Methods
* ------------------------------------------------------------------------------------------ */
public synchronized void requestLoadingFragmentFinish() {
// Data has returned before we've even shown the fragment
if(mMinimumEndTime == -1) {
mRequestFinish = true;
} else {
startAnimationAndPop();
}
}
/* ------------------------------------------------------------------------------------------
* From AnimationListener
* ------------------------------------------------------------------------------------------ */
@Override
public void onAnimationStart(Animation animation) { }
@Override
public void onAnimationRepeat(Animation animation) { }
@Override
public void onAnimationEnd(Animation animation) {
if(animation == mTopAnimation) {
mTop.setVisibility(View.INVISIBLE);
} else if(animation == mBottomAnimation) {
mBottom.setVisibility(View.INVISIBLE);
}
// If both animations have finished then remove this fragment
if(mTop.getVisibility() == View.INVISIBLE && mBottom.getVisibility() == View.INVISIBLE) {
FragmentTransaction ft = getFragmentManager().beginTransaction();
ft.remove(this);
ft.commit();
}
}
/* ------------------------------------------------------------------------------------------
* Private Methods
* ------------------------------------------------------------------------------------------ */
/**
* Sets the minimum time at which this fragment can finish. Synchronized in case
* requestLoadingFragmentFinish is called off Main thread
*/
private synchronized void setMinimumEndTime() {
mMinimumEndTime = System.currentTimeMillis() + MINIMUM_SHOWTIME;
if (mRequestFinish == true) {
startAnimationAndPop();
}
}
private void startAnimationAndPop() {
(new AsyncStartAnimation()).execute();
}
private class AsyncStartAnimation extends AsyncTask<Void, Void, Void> {
@Override
protected Void doInBackground(Void... params) {
// Sleep for required time
long sleepTime = mMinimumEndTime - System.currentTimeMillis();
if(sleepTime > 0) {
try {
Thread.sleep(sleepTime);
} catch(Exception e){}
}
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
// Create top sliding animation
int offscreenTop = -1*(mTop.getMeasuredHeight());
mTopAnimation = createAndSetYAnimation( mTop, offscreenTop, FragmentSplash.this );
// Create bottom sliding animation
int offscreenBottom = mBottom.getMeasuredHeight();
mBottomAnimation = createAndSetYAnimation(mBottom, offscreenBottom, FragmentSplash.this );
// Start both the animations
mTop.startAnimation(mTopAnimation);
mBottom.startAnimation(mBottomAnimation);
}
private Animation createAndSetYAnimation(View v, int newY, Animation.AnimationListener listener) {
TranslateAnimation anim = new TranslateAnimation(0, 0, 0, newY );
anim.setDuration(ANIMATION_DURATION);
v.setAnimation(anim);
anim.setAnimationListener(listener);
return anim;
}
}
}
| |
// Copyright (C) 2008 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.reviewdb.client;
import com.google.gwtorm.client.Column;
import com.google.gwtorm.client.IntKey;
import com.google.gwtorm.client.StringKey;
/** Named group of one or more accounts, typically used for access controls. */
public final class AccountGroup {
/** Group name key */
public static class NameKey extends
StringKey<com.google.gwtorm.client.Key<?>> {
private static final long serialVersionUID = 1L;
@Column(id = 1)
protected String name;
protected NameKey() {
}
public NameKey(final String n) {
name = n;
}
@Override
public String get() {
return name;
}
@Override
protected void set(String newValue) {
name = newValue;
}
}
/** Globally unique identifier. */
public static class UUID extends
StringKey<com.google.gwtorm.client.Key<?>> {
private static final long serialVersionUID = 1L;
@Column(id = 1)
protected String uuid;
protected UUID() {
}
public UUID(final String n) {
uuid = n;
}
@Override
public String get() {
return uuid;
}
@Override
protected void set(String newValue) {
uuid = newValue;
}
/** Parse an AccountGroup.UUID out of a string representation. */
public static UUID parse(final String str) {
final UUID r = new UUID();
r.fromString(str);
return r;
}
}
/** @return true if the UUID is for a group managed within Gerrit. */
public static boolean isInternalGroup(AccountGroup.UUID uuid) {
return uuid.get().matches("^[0-9a-f]{40}$");
}
/** Synthetic key to link to within the database */
public static class Id extends IntKey<com.google.gwtorm.client.Key<?>> {
private static final long serialVersionUID = 1L;
@Column(id = 1)
protected int id;
protected Id() {
}
public Id(final int id) {
this.id = id;
}
@Override
public int get() {
return id;
}
@Override
protected void set(int newValue) {
id = newValue;
}
/** Parse an AccountGroup.Id out of a string representation. */
public static Id parse(final String str) {
final Id r = new Id();
r.fromString(str);
return r;
}
}
/** Unique name of this group within the system. */
@Column(id = 1)
protected NameKey name;
/** Unique identity, to link entities as {@link #name} can change. */
@Column(id = 2)
protected Id groupId;
/** A textual description of the group's purpose. */
@Column(id = 4, length = Integer.MAX_VALUE, notNull = false)
protected String description;
@Column(id = 7)
protected boolean visibleToAll;
/** Globally unique identifier name for this group. */
@Column(id = 9)
protected UUID groupUUID;
/**
* Identity of the group whose members can manage this group.
* <p>
* This can be a self-reference to indicate the group's members manage itself.
*/
@Column(id = 10)
protected UUID ownerGroupUUID;
protected AccountGroup() {
}
public AccountGroup(final AccountGroup.NameKey newName,
final AccountGroup.Id newId, final AccountGroup.UUID uuid) {
name = newName;
groupId = newId;
visibleToAll = false;
groupUUID = uuid;
ownerGroupUUID = groupUUID;
}
public AccountGroup.Id getId() {
return groupId;
}
public String getName() {
return name.get();
}
public AccountGroup.NameKey getNameKey() {
return name;
}
public void setNameKey(final AccountGroup.NameKey nameKey) {
name = nameKey;
}
public String getDescription() {
return description;
}
public void setDescription(final String d) {
description = d;
}
public AccountGroup.UUID getOwnerGroupUUID() {
return ownerGroupUUID;
}
public void setOwnerGroupUUID(final AccountGroup.UUID uuid) {
ownerGroupUUID = uuid;
}
public void setVisibleToAll(final boolean visibleToAll) {
this.visibleToAll = visibleToAll;
}
public boolean isVisibleToAll() {
return visibleToAll;
}
public AccountGroup.UUID getGroupUUID() {
return groupUUID;
}
public void setGroupUUID(AccountGroup.UUID uuid) {
groupUUID = uuid;
}
}
| |
package nachos.threads;
import nachos.machine.*;
import nachos.ag.BoatGrader;
import java.util.LinkedList;
import java.util.List;
import java.util.ArrayList;
import java.io.File;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.io.FileNotFoundException;
public class Boat {
static BoatGrader bg;
public static List<Integer> gg() {
List<Integer> list = new ArrayList<Integer>();
File file = new File("args.txt");
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader(file));
String text = null;
while ((text = reader.readLine()) != null) {
list.add(Integer.parseInt(text));
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (reader != null) {
reader.close();
}
} catch (IOException e) {
}
}
return list;
}
public static void selfTest() {
BoatGrader b = new BoatGrader();
// System.out.println("\n ***Testing Boats with only 2 children***");
// begin(0, 2, b);
// System.out.println("\n ***Testing Boats with only 3 children***");
// begin(0, 3, b);
// System.out.println("\n ***Testing Boats with 2 children, 1 adult***");
// begin(1, 2, b);
for (int i = 0; i <= 25; ++i) {
for (int j = 2; j <= 25; ++j) {
begin(i, j, b);
}
}
}
public static void begin(int adults, int children, BoatGrader b) {
System.err.println("***Testing Boats with " + children + " children, " + adults + " adults***");
// Store the externally generated autograder in a class
// variable to be accessible by children.
bg = b;
World world = new World(bg);
// Instantiate global variables here
// Create threads here. See section 3.4 of the Nachos for Java
// Walkthrough linked from the projects page.
LinkedList<KThread> allThreads = new LinkedList<KThread>();
for (int i = 0; i < adults; ++i) {
KThread t = new KThread(new Adult(world));
t.setName("Adult #" + i);
allThreads.add(t);
}
for (int i = 0; i < children; ++i) {
KThread t = new KThread(new Child(world));
t.setName("Child #" + i);
allThreads.add(t);
}
for (KThread t : allThreads) {
t.fork();
}
for (KThread t : allThreads) {
t.join();
}
Lib.assertTrue(world.molokai.getNumAdult() == adults);
Lib.assertTrue(world.molokai.getNumChild() == children);
System.err.println("***Test passed: Boats with " + children + " children, " + adults + " adults***");
}
static void AdultItinerary() {
bg.initializeAdult(); // Required for autograder interface. Must be the
// first thing called.
// DO NOT PUT ANYTHING ABOVE THIS LINE.
/*
* This is where you should put your solutions. Make calls to the
* BoatGrader to show that it is synchronized. For example:
* bg.AdultRowToMolokai(); indicates that an adult has rowed the boat
* across to Molokai
*/
}
static void ChildItinerary() {
bg.initializeChild(); // Required for autograder interface. Must be the
// first thing called.
// DO NOT PUT ANYTHING ABOVE THIS LINE.
}
static void SampleItinerary() {
// Please note that this isn't a valid solution (you can't fit
// all of them on the boat). Please also note that you may not
// have a single thread calculate a solution and then just play
// it back at the autograder -- you will be caught.
System.out.println("\n ***Everyone piles on the boat and goes to Molokai***");
bg.AdultRowToMolokai();
bg.ChildRideToMolokai();
bg.AdultRideToMolokai();
bg.ChildRideToMolokai();
}
static class World {
public World(BoatGrader grader) {
this.grader = grader;
this.boat = new TheBoat(this);
}
public BoatGrader grader;
public Island oahu = new Island();
public Island molokai = new Island();
public TheBoat boat;
public Lock mutex = new Lock();
private Condition announceNewComerCond = new Condition(mutex);
private boolean needBack = false;
public void announceNewComer(boolean needBack) {
this.needBack = needBack;
if (needBack) {
announceNewComerCond.wake();
} else {
announceNewComerCond.wakeAll();
}
}
public boolean waitNewComer() {
announceNewComerCond.sleep();
boolean needBack = this.needBack;
return needBack;
}
}
static class Island {
public int getNumAdult() {
return numAdult;
}
public int getNumChild() {
return numChild;
}
public void enter(Person p) {
mutex.acquire();
if (p.isAdult()) {
numAdult++;
} else {
numChild++;
}
mutex.release();
}
public void exit(Person p) {
mutex.acquire();
if (p.isAdult()) {
numAdult--;
} else {
numChild--;
}
Lib.assertTrue(numAdult >= 0 && numChild >= 0);
mutex.release();
}
private Lock mutex = new Lock();
private int numAdult = 0;
private int numChild = 0;
}
static class TheBoat {
public TheBoat(World world) {
this.world = world;
this.island = world.oahu;
mutex = world.mutex;
this.waitPilot = new Condition(mutex);
this.waitPassenger = new Condition(mutex);
this.waitPassengerDown = new Condition(mutex);
this.waitOahuArriveCond = new Condition(mutex);
this.waitMolokaiArriveCond = new Condition(mutex);
this.waitOahuLeaveCond = new Condition(mutex);
}
public void acquire() {
mutex.acquire();
}
public void release() {
mutex.release();
}
public int tryGetOn(Person other, boolean pilotOnly) {
int rc = 0;
if (other.getIsland() == island) {
if (pilot == null) {
pilot = other;
rc = 1;
} else if (!pilotOnly && passenger == null && pilot.isChild() && other.isChild()) {
passenger = other;
rc = 2;
}
}
return rc;
}
public void go(Person me) {
Lib.assertTrue(pilot == me);
if (pilot.isAdult()) {
if (island == world.oahu) {
world.grader.AdultRowToMolokai();
} else {
world.grader.AdultRowToOahu();
}
} else {
if (island == world.oahu) {
world.grader.ChildRowToMolokai();
} else {
world.grader.ChildRowToOahu();
}
}
if (passenger != null) {
if (passenger.isAdult()) {
if (island == world.oahu) {
world.grader.AdultRideToMolokai();
} else {
world.grader.AdultRideToOahu();
}
} else {
if (island == world.oahu) {
world.grader.ChildRideToMolokai();
} else {
world.grader.ChildRideToOahu();
}
}
}
Island nextIsland = island == world.oahu ? world.molokai : world.oahu;
Condition nextCondition = island == world.oahu ? waitMolokaiArriveCond: waitOahuArriveCond;
if (island == world.oahu) {
waitOahuLeaveCond.wakeAll();
}
this.island = nextIsland;
pilot.setIsland(nextIsland);
if (passenger != null) {
passenger.setIsland(nextIsland);
waitPassenger.wake();
waitPassengerDown.sleep();
}
this.pilot = null;
this.passenger = null;
nextCondition.wakeAll();
}
public void getOnPassenger(Person me) {
Lib.assertTrue(passenger == me);
waitPilot.wake();
waitPassenger.sleep();
}
public void getOffPassenger(Person me) {
Lib.assertTrue(passenger == me);
waitPassengerDown.wake();
}
public void waitGoPilot(Person me) {
Lib.assertTrue(pilot == me);
waitPilot.sleep();
}
public void waitOahuLeave(Person me) {
Lib.assertTrue(me.getIsland() == world.oahu);
if (island == world.oahu) {
waitOahuLeaveCond.sleep();
}
}
public void waitOahuArrive(Person me) {
Lib.assertTrue(me.getIsland() == world.oahu);
if (island != world.oahu) {
waitOahuArriveCond.sleep();
}
}
public void waitMolokaiArrive(Person me) {
Lib.assertTrue(me.getIsland() == world.molokai);
if (island != world.molokai) {
waitMolokaiArriveCond.sleep();
}
}
private World world;
private Island island;
private Lock mutex;
private Condition waitPilot;
private Condition waitPassenger;
private Condition waitPassengerDown;
private Condition waitOahuArriveCond;
private Condition waitMolokaiArriveCond;
private Condition waitOahuLeaveCond;
private Person pilot, passenger;
}
static class Person implements Runnable {
public Person(int type, World world) {
Lib.assertTrue(type >= 0 && type <= 1);
this.type = type;
this.world = world;
this.setIsland(world.oahu);
}
public boolean isAdult() {
return type == 0;
}
public boolean isChild() {
return type == 1;
}
public World getWorld() {
return world;
}
public TheBoat getBoat() {
return world.boat;
}
public Island getIsland() {
Lib.assertTrue(island != null);
return island;
}
public void setIsland(Island newIsland) {
if (island != null) {
island.exit(this);
}
newIsland.enter(this);
island = newIsland;
}
public void run() {
}
private int type = -1;
private Island island;
private World world;
}
static class Adult extends Person {
public Adult(World world) {
super(0, world);
}
public void run() {
getWorld().grader.initializeAdult();
TheBoat boat = getBoat();
boat.acquire();
// I'm on oahu
while (true) {
if (getIsland().getNumChild() < 2) {
int rc = boat.tryGetOn(this, true);
if (rc == 1) { // I have became the pilot
break;
}
}
boat.waitOahuLeave(this);
boat.waitOahuArrive(this);
}
boolean needBack = true;
if (getIsland().getNumAdult() == 1 && getIsland().getNumChild() == 0) {
needBack = false;
}
boat.go(this);
getWorld().announceNewComer(needBack);
boat.release();
}
}
static class Child extends Person {
public Child(World world) {
super(1, world);
}
public void run() {
getWorld().grader.initializeChild();
TheBoat boat = getBoat();
boat.acquire();
while (true) {
boolean exit;
if (getIsland() == getWorld().oahu) {
exit = onChildOahu();
} else {
exit = onChildMolokai();
}
if (exit) {
break;
}
}
boat.release();
}
private boolean onChildOahu() {
TheBoat boat = getBoat();
if (getIsland().getNumChild() == 1) {
boat.waitOahuLeave(this);
boat.waitOahuArrive(this);
return false;
}
int rc = boat.tryGetOn(this, false);
if (rc == 1) {
if (getIsland().getNumChild() > 1) {
boat.waitGoPilot(this);
}
boolean needBack = true;
if (getIsland().getNumChild() <= 2 && getIsland().getNumAdult() == 0) {
needBack = false;
}
boat.go(this);
if (!needBack) {
getWorld().announceNewComer(needBack);
}
this.isLastPilot = true;
this.lastNeedBack = needBack;
return !needBack;
} else if (rc == 2) {
boolean needBack = true;
if (getIsland().getNumChild() <= 2 && getIsland().getNumAdult() == 0) {
needBack = false;
}
boat.getOnPassenger(this);
this.isLastPassenger = true;
this.lastNeedBack = needBack;
return false;
} else {
boat.waitOahuLeave(this);
boat.waitOahuArrive(this);
}
return false;
}
private boolean onChildMolokai() {
TheBoat boat = getBoat();
boolean needBack;
if (isLastPilot) {
needBack = true;
this.isLastPilot = false;
} else if (isLastPassenger) {
boat.getOffPassenger(this);
if (!lastNeedBack) {
return true;
}
this.isLastPassenger = false;
needBack = getWorld().waitNewComer();
} else {
needBack = getWorld().waitNewComer();
}
if (needBack) {
int rc = boat.tryGetOn(this, true);
if (rc == 1) {
boat.go(this);
} else {
Lib.assertTrue(rc == 0);
}
return false;
} else {
return true;
}
}
private boolean isLastPilot = false;
private boolean isLastPassenger = false;
private boolean lastNeedBack = false;
}
}
| |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.db.evolutions;
import play.api.db.evolutions.DatabaseEvolutions;
import play.db.Database;
import java.util.*;
/** Utilities for working with evolutions. */
public class Evolutions {
/**
* Create an evolutions reader that reads evolution files from this class's own classloader.
*
* <p>Only useful in simple classloading environments, such as when the classloader structure is
* flat.
*
* @return the evolutions reader.
*/
public static play.api.db.evolutions.EvolutionsReader fromClassLoader() {
return fromClassLoader(Evolutions.class.getClassLoader());
}
/**
* Create an evolutions reader that reads evolution files from a classloader.
*
* @param classLoader The classloader to read from.
* @return the evolutions reader.
*/
public static play.api.db.evolutions.EvolutionsReader fromClassLoader(ClassLoader classLoader) {
return fromClassLoader(classLoader, "");
}
/**
* Create an evolutions reader that reads evolution files from a classloader.
*
* @param classLoader The classloader to read from.
* @param prefix A prefix that gets added to the resource file names, for example, this could be
* used to namespace evolutions in different environments to work with different databases.
* @return the evolutions reader.
*/
public static play.api.db.evolutions.EvolutionsReader fromClassLoader(
ClassLoader classLoader, String prefix) {
return new play.api.db.evolutions.ClassLoaderEvolutionsReader(classLoader, prefix);
}
/**
* Create an evolutions reader based on a simple map of database names to evolutions.
*
* @param evolutions The map of database names to evolutions.
* @return the evolutions reader.
*/
public static play.api.db.evolutions.EvolutionsReader fromMap(
Map<String, List<Evolution>> evolutions) {
return new SimpleEvolutionsReader(evolutions);
}
/**
* Create an evolutions reader for the default database from a list of evolutions.
*
* @param evolutions The list of evolutions.
* @return the evolutions reader.
*/
public static play.api.db.evolutions.EvolutionsReader forDefault(Evolution... evolutions) {
Map<String, List<Evolution>> map = new HashMap<String, List<Evolution>>();
map.put("default", Arrays.asList(evolutions));
return fromMap(map);
}
/**
* Apply evolutions for the given database.
*
* @param database The database to apply the evolutions to.
* @param reader The reader to read the evolutions.
* @param autocommit Whether autocommit should be used.
* @param schema The schema where all the play evolution tables are saved in
*/
public static void applyEvolutions(
Database database,
play.api.db.evolutions.EvolutionsReader reader,
boolean autocommit,
String schema) {
DatabaseEvolutions evolutions = new DatabaseEvolutions(database.asScala(), schema);
evolutions.evolve(evolutions.scripts(reader), autocommit);
}
/**
* Apply evolutions for the given database.
*
* @param database The database to apply the evolutions to.
* @param reader The reader to read the evolutions.
* @param autocommit Whether autocommit should be used.
* @param schema The schema where all the play evolution tables are saved in
* @param metaTable Table to keep evolutions' meta data
*/
public static void applyEvolutions(
Database database,
play.api.db.evolutions.EvolutionsReader reader,
boolean autocommit,
String schema,
String metaTable) {
DatabaseEvolutions evolutions = new DatabaseEvolutions(database.asScala(), schema, metaTable);
evolutions.evolve(evolutions.scripts(reader), autocommit);
}
/**
* Apply evolutions for the given database.
*
* @param database The database to apply the evolutions to.
* @param reader The reader to read the evolutions.
* @param schema The schema where all the play evolution tables are saved in
*/
public static void applyEvolutions(
Database database, play.api.db.evolutions.EvolutionsReader reader, String schema) {
applyEvolutions(database, reader, true, schema);
}
/**
* Apply evolutions for the given database.
*
* @param database The database to apply the evolutions to.
* @param reader The reader to read the evolutions.
* @param schema The schema where all the play evolution tables are saved in
* @param metaTable Table to keep evolutions' meta data
*/
public static void applyEvolutions(
Database database,
play.api.db.evolutions.EvolutionsReader reader,
String schema,
String metaTable) {
applyEvolutions(database, reader, true, schema, metaTable);
}
/**
* Apply evolutions for the given database.
*
* @param database The database to apply the evolutions to.
* @param reader The reader to read the evolutions.
* @param autocommit Whether autocommit should be used.
*/
public static void applyEvolutions(
Database database, play.api.db.evolutions.EvolutionsReader reader, boolean autocommit) {
applyEvolutions(database, reader, autocommit, "");
}
/**
* Apply evolutions for the given database.
*
* @param database The database to apply the evolutions to.
* @param reader The reader to read the evolutions.
*/
public static void applyEvolutions(
Database database, play.api.db.evolutions.EvolutionsReader reader) {
applyEvolutions(database, reader, true);
}
/**
* Apply evolutions for the given database.
*
* @param database The database to apply the evolutions to.
* @param schema The schema where all the play evolution tables are saved in
*/
public static void applyEvolutions(Database database, String schema) {
applyEvolutions(database, fromClassLoader(), schema);
}
/**
* Apply evolutions for the given database.
*
* @param database The database to apply the evolutions to.
* @param schema The schema where all the play evolution tables are saved in
* @param metaTable Table to keep evolutions' meta data
*/
public static void applyEvolutions(Database database, String schema, String metaTable) {
applyEvolutions(database, fromClassLoader(), schema, metaTable);
}
/**
* Apply evolutions for the given database.
*
* @param database The database to apply the evolutions to.
*/
public static void applyEvolutions(Database database) {
applyEvolutions(database, "");
}
/**
* Cleanup evolutions for the given database.
*
* <p>This will run the down scripts for all the applied evolutions.
*
* @param database The database to apply the evolutions to.
* @param autocommit Whether autocommit should be used.
* @param schema The schema where all the play evolution tables are saved in
*/
public static void cleanupEvolutions(Database database, boolean autocommit, String schema) {
DatabaseEvolutions evolutions = new DatabaseEvolutions(database.asScala(), schema);
evolutions.evolve(evolutions.resetScripts(), autocommit);
}
/**
* Cleanup evolutions for the given database.
*
* <p>This will run the down scripts for all the applied evolutions.
*
* @param database The database to apply the evolutions to.
* @param autocommit Whether autocommit should be used.
* @param schema The schema where all the play evolution tables are saved in
* @param metaTable Table to keep evolutions' meta data
*/
public static void cleanupEvolutions(
Database database, boolean autocommit, String schema, String metaTable) {
DatabaseEvolutions evolutions = new DatabaseEvolutions(database.asScala(), schema, metaTable);
evolutions.evolve(evolutions.resetScripts(), autocommit);
}
/**
* Cleanup evolutions for the given database.
*
* <p>This will run the down scripts for all the applied evolutions.
*
* @param database The database to apply the evolutions to.
* @param autocommit Whether autocommit should be used.
*/
public static void cleanupEvolutions(Database database, boolean autocommit) {
cleanupEvolutions(database, autocommit, "");
}
/**
* Cleanup evolutions for the given database.
*
* <p>This will run the down scripts for all the applied evolutions.
*
* @param database The database to apply the evolutions to.
* @param schema The schema where all the play evolution tables are saved in
*/
public static void cleanupEvolutions(Database database, String schema) {
cleanupEvolutions(database, true, schema);
}
/**
* Cleanup evolutions for the given database.
*
* <p>This will run the down scripts for all the applied evolutions.
*
* @param database The database to apply the evolutions to.
* @param schema The schema where all the play evolution tables are saved in
* @param metaTable Table to keep evolutions' meta data
*/
public static void cleanupEvolutions(Database database, String schema, String metaTable) {
cleanupEvolutions(database, true, schema, metaTable);
}
/**
* Cleanup evolutions for the given database.
*
* <p>This will run the down scripts for all the applied evolutions.
*
* @param database The database to apply the evolutions to.
*/
public static void cleanupEvolutions(Database database) {
cleanupEvolutions(database, "");
}
}
| |
/*
* Copyright 2013-2016, Kasra Faghihi
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.offbynull.portmapper.mappers.upnpigd;
import com.offbynull.portmapper.mapper.MappedPort;
import com.offbynull.portmapper.mapper.PortType;
import com.offbynull.portmapper.gateway.Bus;
import com.offbynull.portmapper.mapper.MapperIoUtils.BytesToResponseTransformer;
import com.offbynull.portmapper.mapper.MapperIoUtils.TcpRequest;
import static com.offbynull.portmapper.mapper.MapperIoUtils.performTcpRequests;
import com.offbynull.portmapper.mappers.upnpigd.externalmessages.AddPinholeUpnpIgdRequest;
import com.offbynull.portmapper.mappers.upnpigd.externalmessages.AddPinholeUpnpIgdResponse;
import com.offbynull.portmapper.mappers.upnpigd.externalmessages.DeletePinholeUpnpIgdRequest;
import com.offbynull.portmapper.mappers.upnpigd.externalmessages.DeletePinholeUpnpIgdResponse;
import com.offbynull.portmapper.mappers.upnpigd.externalmessages.UpdatePinholeUpnpIgdRequest;
import com.offbynull.portmapper.mappers.upnpigd.externalmessages.UpdatePinholeUpnpIgdResponse;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.URL;
import java.util.Collections;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.Range;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Port mapper implementation that interfaces with a UPnP-IGD IPv6 firewall service.
* <p>
* Note that this port mapper doesn't care what the service type is. So long as the service type exposes AddPinhole, DeletePinhole, and
* RefreshPinhole actions (and defines them as they're defined in WANIPv6FirewallControl:1), this port mapper will be able to call those
* actions to expose ports.
* @author Kasra Faghihi
*/
public final class FirewallUpnpIgdPortMapper extends UpnpIgdPortMapper {
private static final Logger LOG = LoggerFactory.getLogger(FirewallUpnpIgdPortMapper.class);
private final InetSocketAddress controlUrlAddress;
/**
* Constructs a {@link FirewallUpnpIgdPortMapper} object.
* @param networkBus bus to network component
* @param internalAddress local address accessing gateway device
* @param controlUrl service control URL
* @param serverName server name (can be {@code null}
* @param serviceType service type
* @param externalPortRange external port range
* @param leaseDurationRange lease duration range
* @throws NullPointerException if any argument other than {@code severName} is {@code null}
* @throws IllegalArgumentException if {@code 0 > leaseDurationRange > 0xFFFFFFFFL || 0 > externalPortRange > 0xFFFFL} (note that
* a 0 lease duration means either default value or infinite, and a 0 external port means wildcard), or if {@code controlUrl}'s protocol
* was not {@code http}
*/
public FirewallUpnpIgdPortMapper(Bus networkBus, InetAddress internalAddress, URL controlUrl, String serverName, String serviceType,
Range<Long> externalPortRange, Range<Long> leaseDurationRange) {
super(networkBus, internalAddress, controlUrl, serverName, serviceType, externalPortRange, leaseDurationRange);
controlUrlAddress = getAddressFromUrl(controlUrl);
}
public MappedPort mapPort(PortType portType, int internalPort, int externalPort, long lifetime) throws InterruptedException {
LOG.debug("Attempting to map {} Internal:{} External:{} Lifetime:{}", portType, internalPort, externalPort, lifetime);
Validate.notNull(portType);
Validate.inclusiveBetween(1, 65535, internalPort);
Validate.inclusiveBetween(0L, Long.MAX_VALUE, lifetime);
Bus networkBus = getNetworkBus();
URL controlUrl = getControlUrl();
String serviceType = getServiceType();
InetAddress internalAddress = getInternalAddress();
// attempt to map 5 times -- first attempt should be 3 tries to map the externalPort passed in... anything after that is 1 attempt
// to map a randomized externalPort
long[] retryDurations = new long[] {5000L, 5000L, 5000L};
for (int i = 0; i < 5; i++) {
Range<Long> externalPortRange = getExternalPortRange();
Range<Long> leaseDurationRange = getLeaseDurationRange();
long leaseDuration;
if (leaseDurationRange.isBefore(lifetime)) {
leaseDuration = leaseDurationRange.getMaximum();
} else if (leaseDurationRange.isAfter(lifetime)) {
leaseDuration = leaseDurationRange.getMinimum();
} else {
leaseDuration = lifetime;
}
Validate.validState(externalPortRange.contains((long) externalPort),
"Router reports external port mappings as %s", externalPortRange);
TcpRequest mapHttpRequest = new TcpRequest(
internalAddress,
controlUrlAddress,
new AddPinholeUpnpIgdRequest(
controlUrl.getAuthority(),
controlUrl.getFile(),
serviceType,
null,
externalPort,
internalAddress,
internalPort,
portType,
leaseDuration),
new BasicRequestTransformer(),
new BytesToResponseTransformer() {
public Object create(byte[] buffer) {
return new AddPinholeUpnpIgdResponse(buffer);
}
});
performTcpRequests(
networkBus,
Collections.singleton(mapHttpRequest),
retryDurations);
if (mapHttpRequest.getResponse() != null) {
// server responded, so we're good to go
String key = ((AddPinholeUpnpIgdResponse) mapHttpRequest.getResponse()).getUniqueId();
MappedPort mappedPort = new FirewallMappedPort(key, internalPort, externalPort, portType, leaseDuration);
LOG.debug("Map successful {}", mappedPort);
return mappedPort;
}
// choose another external port for next try -- next try only make 1 attempt
retryDurations = new long[] {5000L};
externalPort = RandomUtils.nextInt(
externalPortRange.getMinimum().intValue(), // should never be < 1
externalPortRange.getMaximum().intValue() + 1); // should never be > 65535
}
throw new IllegalStateException();
}
public void unmapPort(MappedPort mappedPort) throws InterruptedException {
LOG.debug("Attempting to unmap {}", mappedPort);
Validate.notNull(mappedPort);
Validate.isTrue(mappedPort instanceof FirewallMappedPort);
Bus networkBus = getNetworkBus();
URL controlUrl = getControlUrl();
String serviceType = getServiceType();
String key = ((FirewallMappedPort) mappedPort).getKey();
InetAddress internalAddress = getInternalAddress();
TcpRequest httpRequest = new TcpRequest(
internalAddress,
controlUrlAddress,
new DeletePinholeUpnpIgdRequest(
controlUrl.getAuthority(),
controlUrl.getFile(),
serviceType,
key),
new BasicRequestTransformer(),
new BytesToResponseTransformer() {
public Object create(byte[] buffer) {
return new DeletePinholeUpnpIgdResponse(buffer);
}
});
performTcpRequests(
networkBus,
Collections.singleton(httpRequest),
5000L, 5000L, 5000L);
if (httpRequest.getResponse() == null) {
throw new IllegalStateException("No response/invalid response to unmapping");
}
LOG.debug("Unmap successful {}", mappedPort);
}
public MappedPort refreshPort(MappedPort mappedPort, long lifetime) throws InterruptedException {
LOG.debug("Attempting to refresh mapping {} for {}", mappedPort, lifetime);
Validate.notNull(mappedPort);
Validate.isTrue(mappedPort instanceof FirewallMappedPort);
Bus networkBus = getNetworkBus();
URL controlUrl = getControlUrl();
String serviceType = getServiceType();
Range<Long> leaseDurationRange = getLeaseDurationRange();
long leaseDuration;
if (leaseDurationRange.isBefore(lifetime)) {
leaseDuration = leaseDurationRange.getMaximum();
} else if (leaseDurationRange.isAfter(lifetime)) {
leaseDuration = leaseDurationRange.getMinimum();
} else {
leaseDuration = lifetime;
}
String key = ((FirewallMappedPort) mappedPort).getKey();
InetAddress internalAddress = getInternalAddress();
TcpRequest httpRequest = new TcpRequest(
internalAddress,
controlUrlAddress,
new UpdatePinholeUpnpIgdRequest(
controlUrl.getAuthority(),
controlUrl.getFile(),
serviceType,
key,
leaseDuration),
new BasicRequestTransformer(),
new BytesToResponseTransformer() {
public Object create(byte[] buffer) {
return new UpdatePinholeUpnpIgdResponse(buffer);
}
});
performTcpRequests(
networkBus,
Collections.singleton(httpRequest),
5000L, 5000L, 5000L);
if (httpRequest.getResponse() == null) {
throw new IllegalStateException("No response/invalid response to refresh");
}
FirewallMappedPort newMappedPort = new FirewallMappedPort(key, mappedPort.getInternalPort(), mappedPort.getExternalPort(),
mappedPort.getPortType(), leaseDuration);
LOG.warn("Mapping refreshed {}: ", mappedPort, newMappedPort);
return newMappedPort;
}
public String toString() {
return "FirewallUpnpIgdPortMapper{super=" + super.toString() + '}';
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.browsers;
import com.intellij.openapi.components.PersistentStateComponent;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.components.State;
import com.intellij.openapi.components.Storage;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.SmartList;
import com.intellij.util.xmlb.SkipDefaultValuesSerializationFilters;
import com.intellij.util.xmlb.XmlSerializer;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
@State(name = "WebBrowsersConfiguration", storages = @Storage("web-browsers.xml"))
public class WebBrowserManager extends SimpleModificationTracker implements PersistentStateComponent<Element> {
private static final Logger LOG = Logger.getInstance(WebBrowserManager.class);
// default standard browser ID must be constant across all IDE versions on all machines for all users
private static final UUID PREDEFINED_CHROME_ID = UUID.fromString("98CA6316-2F89-46D9-A9E5-FA9E2B0625B3");
// public, but only internal use
public static final UUID PREDEFINED_FIREFOX_ID = UUID.fromString("A7BB68E0-33C0-4D6F-A81A-AAC1FDB870C8");
private static final UUID PREDEFINED_SAFARI_ID = UUID.fromString("E5120D43-2C3F-47EF-9F26-65E539E05186");
private static final UUID PREDEFINED_OPERA_ID = UUID.fromString("53E2F627-B1A7-4DFA-BFA7-5B83CC034776");
private static final UUID PREDEFINED_YANDEX_ID = UUID.fromString("B1B2EC2C-20BD-4EE2-89C4-616DB004BCD4");
private static final UUID PREDEFINED_EXPLORER_ID = UUID.fromString("16BF23D4-93E0-4FFC-BFD6-CB13575177B0");
private static final List<ConfigurableWebBrowser> PREDEFINED_BROWSERS = Arrays.asList(
new ConfigurableWebBrowser(PREDEFINED_CHROME_ID, BrowserFamily.CHROME),
new ConfigurableWebBrowser(PREDEFINED_FIREFOX_ID, BrowserFamily.FIREFOX),
new ConfigurableWebBrowser(PREDEFINED_SAFARI_ID, BrowserFamily.SAFARI),
new ConfigurableWebBrowser(PREDEFINED_OPERA_ID, BrowserFamily.OPERA),
new ConfigurableWebBrowser(PREDEFINED_YANDEX_ID, BrowserFamily.CHROME, "Yandex", SystemInfo.isWindows ? "browser" : (SystemInfo.isMac ? "Yandex" : "yandex"), false, BrowserFamily.CHROME.createBrowserSpecificSettings()),
new ConfigurableWebBrowser(PREDEFINED_EXPLORER_ID, BrowserFamily.EXPLORER)
);
private List<ConfigurableWebBrowser> browsers;
private boolean myShowBrowserHover = true;
DefaultBrowserPolicy defaultBrowserPolicy = DefaultBrowserPolicy.SYSTEM;
public WebBrowserManager() {
browsers = new ArrayList<ConfigurableWebBrowser>(PREDEFINED_BROWSERS);
}
public static WebBrowserManager getInstance() {
return ServiceManager.getService(WebBrowserManager.class);
}
public static boolean isYandexBrowser(@NotNull WebBrowser browser) {
return browser.getFamily().equals(BrowserFamily.CHROME) && (browser.getId().equals(PREDEFINED_YANDEX_ID) || checkNameAndPath("Yandex", browser));
}
public static boolean isDartium(@NotNull WebBrowser browser) {
return browser.getFamily().equals(BrowserFamily.CHROME) && checkNameAndPath("Dartium", browser);
}
static boolean checkNameAndPath(@NotNull String what, @NotNull WebBrowser browser) {
if (StringUtil.containsIgnoreCase(browser.getName(), what)) {
return true;
}
String path = browser.getPath();
if (path != null) {
int index = path.lastIndexOf('/');
return index > 0 ? path.indexOf(what, index + 1) != -1 : path.contains(what);
}
return false;
}
boolean isPredefinedBrowser(@NotNull ConfigurableWebBrowser browser) {
UUID id = browser.getId();
for (ConfigurableWebBrowser predefinedBrowser : PREDEFINED_BROWSERS) {
if (id.equals(predefinedBrowser.getId())) {
return true;
}
}
return false;
}
@NotNull
public DefaultBrowserPolicy getDefaultBrowserPolicy() {
return defaultBrowserPolicy;
}
@Override
public Element getState() {
Element state = new Element("state");
if (defaultBrowserPolicy != DefaultBrowserPolicy.SYSTEM) {
state.setAttribute("default", defaultBrowserPolicy.name().toLowerCase(Locale.ENGLISH));
}
if (!myShowBrowserHover) {
state.setAttribute("showHover", "false");
}
for (ConfigurableWebBrowser browser : browsers) {
Element entry = new Element("browser");
entry.setAttribute("id", browser.getId().toString());
entry.setAttribute("name", browser.getName());
entry.setAttribute("family", browser.getFamily().name());
String path = browser.getPath();
if (path != null && !path.equals(browser.getFamily().getExecutionPath())) {
entry.setAttribute("path", path);
}
if (!browser.isActive()) {
entry.setAttribute("active", "false");
}
BrowserSpecificSettings specificSettings = browser.getSpecificSettings();
if (specificSettings != null) {
Element settingsElement = new Element("settings");
XmlSerializer.serializeInto(specificSettings, settingsElement, new SkipDefaultValuesSerializationFilters());
if (!JDOMUtil.isEmpty(settingsElement)) {
entry.addContent(settingsElement);
}
}
state.addContent(entry);
}
return state;
}
@Nullable
private static BrowserFamily readFamily(String value) {
try {
return BrowserFamily.valueOf(value);
}
catch (RuntimeException e) {
LOG.warn(e);
for (BrowserFamily family : BrowserFamily.values()) {
if (family.getName().equalsIgnoreCase(value)) {
return family;
}
}
return null;
}
}
@Nullable
private static UUID readId(String value, @NotNull BrowserFamily family, @NotNull List<ConfigurableWebBrowser> existingBrowsers) {
if (StringUtil.isEmpty(value)) {
UUID id;
switch (family) {
case CHROME:
id = PREDEFINED_CHROME_ID;
break;
case EXPLORER:
id = PREDEFINED_EXPLORER_ID;
break;
case FIREFOX:
id = PREDEFINED_FIREFOX_ID;
break;
case OPERA:
id = PREDEFINED_OPERA_ID;
break;
case SAFARI:
id = PREDEFINED_SAFARI_ID;
break;
default:
return null;
}
for (ConfigurableWebBrowser browser : existingBrowsers) {
if (browser.getId() == id) {
// duplicated entry, skip
return null;
}
}
return id;
}
else {
try {
return UUID.fromString(value);
}
catch (Exception e) {
LOG.warn(e);
}
}
return null;
}
@Override
public void loadState(Element element) {
String defaultValue = element.getAttributeValue("default");
if (!StringUtil.isEmpty(defaultValue)) {
try {
defaultBrowserPolicy = DefaultBrowserPolicy.valueOf(defaultValue.toUpperCase(Locale.ENGLISH));
}
catch (IllegalArgumentException e) {
LOG.warn(e);
}
}
myShowBrowserHover = !"false".equals(element.getAttributeValue("showHover"));
List<ConfigurableWebBrowser> list = new ArrayList<ConfigurableWebBrowser>();
for (Element child : element.getChildren("browser")) {
BrowserFamily family = readFamily(child.getAttributeValue("family"));
if (family == null) {
continue;
}
UUID id = readId(child.getAttributeValue("id"), family, list);
if (id == null) {
continue;
}
Element settingsElement = child.getChild("settings");
BrowserSpecificSettings specificSettings = family.createBrowserSpecificSettings();
if (specificSettings != null && settingsElement != null) {
try {
XmlSerializer.deserializeInto(specificSettings, settingsElement);
}
catch (Exception e) {
LOG.warn(e);
}
}
String activeValue = child.getAttributeValue("active");
String path = StringUtil.nullize(child.getAttributeValue("path"), true);
if (path == null) {
path = family.getExecutionPath();
}
list.add(new ConfigurableWebBrowser(id,
family,
StringUtil.notNullize(child.getAttributeValue("name"), family.getName()),
path,
activeValue == null || Boolean.parseBoolean(activeValue),
specificSettings));
}
// add removed/new predefined browsers
int n = list.size();
pb: for (ConfigurableWebBrowser predefinedBrowser : PREDEFINED_BROWSERS) {
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < n; i++) {
if (list.get(i).getId().equals(predefinedBrowser.getId())) {
continue pb;
}
}
list.add(predefinedBrowser);
}
setList(list);
}
@NotNull
public List<WebBrowser> getBrowsers() {
return Collections.<WebBrowser>unmodifiableList(browsers);
}
@NotNull
List<ConfigurableWebBrowser> getList() {
return browsers;
}
void setList(@NotNull List<ConfigurableWebBrowser> value) {
browsers = value;
incModificationCount();
}
@NotNull
public List<WebBrowser> getActiveBrowsers() {
return getBrowsers(Conditions.<WebBrowser>alwaysTrue(), true);
}
@NotNull
public List<WebBrowser> getBrowsers(@NotNull Condition<WebBrowser> condition) {
return getBrowsers(condition, true);
}
@NotNull
public List<WebBrowser> getBrowsers(@NotNull Condition<WebBrowser> condition, boolean onlyActive) {
List<WebBrowser> result = new SmartList<WebBrowser>();
for (ConfigurableWebBrowser browser : browsers) {
if ((!onlyActive || browser.isActive()) && condition.value(browser)) {
result.add(browser);
}
}
return result;
}
public void setBrowserSpecificSettings(@NotNull WebBrowser browser, @NotNull BrowserSpecificSettings specificSettings) {
((ConfigurableWebBrowser)browser).setSpecificSettings(specificSettings);
}
public void setBrowserPath(@NotNull WebBrowser browser, @Nullable String path, boolean isActive) {
((ConfigurableWebBrowser)browser).setPath(path);
((ConfigurableWebBrowser)browser).setActive(isActive);
}
public WebBrowser addBrowser(final @NotNull UUID id,
final @NotNull BrowserFamily family,
final @NotNull String name,
final @Nullable String path,
final boolean active,
final BrowserSpecificSettings specificSettings) {
final ConfigurableWebBrowser browser = new ConfigurableWebBrowser(id, family, name, path, active, specificSettings);
browsers.add(browser);
incModificationCount();
return browser;
}
@Nullable
private static UUID parseUuid(@NotNull String id) {
if (id.indexOf('-') == -1) {
return null;
}
try {
return UUID.fromString(id);
}
catch (IllegalArgumentException ignored) {
return null;
}
}
@Nullable
/**
* @param idOrFamilyName UUID or, due to backward compatibility, browser family name or JS debugger engine ID
*/
public WebBrowser findBrowserById(@Nullable String idOrFamilyName) {
if (StringUtil.isEmpty(idOrFamilyName)) {
return null;
}
UUID id = parseUuid(idOrFamilyName);
if (id == null) {
for (ConfigurableWebBrowser browser : browsers) {
if (browser.getFamily().name().equalsIgnoreCase(idOrFamilyName) ||
browser.getFamily().getName().equalsIgnoreCase(idOrFamilyName)) {
return browser;
}
}
return null;
}
for (ConfigurableWebBrowser browser : browsers) {
if (browser.getId().equals(id)) {
return browser;
}
}
return null;
}
@NotNull
public WebBrowser getFirstBrowser(@NotNull BrowserFamily family) {
for (ConfigurableWebBrowser browser : browsers) {
if (browser.isActive() && family.equals(browser.getFamily())) {
return browser;
}
}
for (ConfigurableWebBrowser browser : browsers) {
if (family.equals(browser.getFamily())) {
return browser;
}
}
throw new IllegalStateException("Must be at least one browser per family");
}
public boolean isActive(@NotNull WebBrowser browser) {
return !(browser instanceof ConfigurableWebBrowser) || ((ConfigurableWebBrowser)browser).isActive();
}
@Nullable
public WebBrowser getFirstActiveBrowser() {
for (ConfigurableWebBrowser browser : browsers) {
if (browser.isActive() && browser.getPath() != null) {
return browser;
}
}
return null;
}
public void setShowBrowserHover(boolean showBrowserHover) {
myShowBrowserHover = showBrowserHover;
}
public boolean isShowBrowserHover() {
return myShowBrowserHover;
}
}
| |
/*
* EarthTimeTracerDialog.java
*
*
* Copyright 2006-2015 James F. Bowring and www.Earth-Time.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.earthtime.dialogs;
import org.earthtime.dialogs.DialogEditor;
/**
*
* @author James F. Bowring
*/
public class EarthTimeTracerDialog extends DialogEditor {
// Fields
private String[] tracerList;
private String selectedTracerName;
/** Creates new form EarthTimeTracerDialog
* @param parent
* @param modal
* @param tracerList
*/
public EarthTimeTracerDialog
(java.awt.Frame parent,
boolean modal,
String[] tracerList) {
super(parent, modal);
setLocationRelativeTo(parent);
setTracerList(tracerList);
setSelectedTracerName("");
initComponents();
tracers_list.setListData(tracerList);
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
tracers_scrollPane = new javax.swing.JScrollPane();
tracers_list = new javax.swing.JList<String>();
buttonsPanel = new javax.swing.JPanel();
save_button = new javax.swing.JButton();
close_button = new javax.swing.JButton();
chooseTracer_label = new javax.swing.JLabel();
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
setTitle("Current Tracers from Earth-Time.org");
setResizable(false);
tracers_list.setSelectionMode(javax.swing.ListSelectionModel.SINGLE_SELECTION);
tracers_list.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
tracers_listMouseClicked(evt);
}
});
tracers_scrollPane.setViewportView(tracers_list);
buttonsPanel.setBackground(new java.awt.Color(252, 236, 235));
buttonsPanel.setBorder(javax.swing.BorderFactory.createBevelBorder(javax.swing.border.BevelBorder.LOWERED));
save_button.setForeground(new java.awt.Color(255, 51, 0));
save_button.setText("OK");
save_button.setMargin(new java.awt.Insets(0, 1, 0, 1));
save_button.setPreferredSize(new java.awt.Dimension(140, 23));
save_button.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
save_buttonActionPerformed(evt);
}
});
close_button.setForeground(new java.awt.Color(255, 51, 0));
close_button.setText("Cancel");
close_button.setMargin(new java.awt.Insets(0, 1, 0, 1));
close_button.setPreferredSize(new java.awt.Dimension(140, 23));
close_button.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
close_buttonActionPerformed(evt);
}
});
org.jdesktop.layout.GroupLayout buttonsPanelLayout = new org.jdesktop.layout.GroupLayout(buttonsPanel);
buttonsPanel.setLayout(buttonsPanelLayout);
buttonsPanelLayout.setHorizontalGroup(
buttonsPanelLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(buttonsPanelLayout.createSequentialGroup()
.add(17, 17, 17)
.add(save_button, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 115, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED, 80, Short.MAX_VALUE)
.add(close_button, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 115, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
buttonsPanelLayout.setVerticalGroup(
buttonsPanelLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(buttonsPanelLayout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(save_button, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 28, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.add(close_button, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 28, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
);
chooseTracer_label.setText("Choose a tracer to import from Earth-Time.org:");
org.jdesktop.layout.GroupLayout layout = new org.jdesktop.layout.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(buttonsPanel, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.add(layout.createSequentialGroup()
.add(55, 55, 55)
.add(tracers_scrollPane, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 238, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.addContainerGap(55, Short.MAX_VALUE))
.add(layout.createSequentialGroup()
.add(chooseTracer_label, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 328, Short.MAX_VALUE)
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(org.jdesktop.layout.GroupLayout.TRAILING, layout.createSequentialGroup()
.addContainerGap()
.add(chooseTracer_label)
.add(26, 26, 26)
.add(tracers_scrollPane, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED, 73, Short.MAX_VALUE)
.add(buttonsPanel, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void close_buttonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_close_buttonActionPerformed
close();
}//GEN-LAST:event_close_buttonActionPerformed
private void save_buttonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_save_buttonActionPerformed
OK();
close();
}//GEN-LAST:event_save_buttonActionPerformed
private void tracers_listMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_tracers_listMouseClicked
if (evt.getClickCount() == 2){
OK();
close();
}
}//GEN-LAST:event_tracers_listMouseClicked
private void OK(){
if (tracers_list.getSelectedIndex() > -1)
setSelectedTracerName((String)tracers_list.getSelectedValue());
}
/**
*
* @return
*/
public String[] getTracerList() {
return tracerList;
}
/**
*
* @param tracerList
*/
public void setTracerList(String[] tracerList) {
this.tracerList = tracerList;
}
/**
*
* @return
*/
public String getSelectedTracerName() {
return selectedTracerName;
}
/**
*
* @param selectedTracerName
*/
public void setSelectedTracerName(String selectedTracerName) {
this.selectedTracerName = selectedTracerName;
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JPanel buttonsPanel;
private javax.swing.JLabel chooseTracer_label;
private javax.swing.JButton close_button;
private javax.swing.JButton save_button;
private javax.swing.JList<String> tracers_list;
private javax.swing.JScrollPane tracers_scrollPane;
// End of variables declaration//GEN-END:variables
}
| |
package net.wolfesoftware.jax;
import java.lang.reflect.Field;
import java.util.*;
import java.util.Map.Entry;
import net.wolfesoftware.jax.util.Util;
public class JaxcOptions
{
public static final String[] DEFAULT_classPath = { "." };
public String[] classPath = DEFAULT_classPath;
public static final boolean DEFAULT_javaCompatabilityMode = false;
public boolean javaCompatabilityMode = DEFAULT_javaCompatabilityMode;
private static final HashMap<String, String> aliases = new HashMap<String, String>();
static {
aliases.put("cp", "classPath");
aliases.put("javaMode", "javaCompatabilityMode");
}
private static final HashSet<String> fieldNames = new HashSet<String>();
static {
for (Field field : JaxcOptions.class.getDeclaredFields()) {
Class<?> fieldType = field.getType();
if (!(fieldType == String[].class || fieldType == boolean.class))
continue;
String fieldName = field.getName();
if (fieldName.startsWith("DEFAULT_"))
continue;
fieldNames.add(fieldName);
}
}
public String toString()
{
ArrayList<String> options = new ArrayList<String>();
for (String fieldName : fieldNames) {
try {
Field field = JaxcOptions.class.getField(fieldName);
Field defaultField = JaxcOptions.class.getField("DEFAULT_" + fieldName);
Class<?> fieldType = defaultField.getType();
if (fieldType == String[].class) {
String[] values = (String[])field.get(this);
String[] defaultValue = (String[])defaultField.get(null);
if (Arrays.equals(values, defaultValue))
continue;
for (String value : values)
options.add("-" + fieldName + "=" + value);
} else if (fieldType == boolean.class) {
boolean value = field.getBoolean(this);
boolean defaultBooleanValue = defaultField.getBoolean(this);
if (value == defaultBooleanValue)
continue;
options.add("-" + fieldName);
} else
throw null;
} catch (NoSuchFieldException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
return Util.join(options, " ");
}
/**
* Removes options from args and returns an options object.
* @param args list of string arguments like from <code>main(String[] args)</code>.
* Options are removed from the list leaving only non-option arguments. Array-based
* List implementations are recommended as random access is performed and 'removal'
* is actually implemented as a {@link List#clear() clear} and
* {@link List#addAll(Collection) addAll}.
* @return a {@link JaxcOptions} object representing the options removed from args
* @throws IllegalArgumentException if a parameter is unrecognized or needs a value it didn't get.
*/
@SuppressWarnings("unchecked")
public static JaxcOptions parse(List<String> args) throws IllegalArgumentException
{
// results
HashMap<String, Object> argsMap = new HashMap<String, Object>();
ArrayList<String> keepArgs = new ArrayList<String>();
// error collecting
LinkedList<String> unknownArgs = new LinkedList<String>();
LinkedList<String> needValueArgs = new LinkedList<String>();
LinkedList<String> needNoValueArgs = new LinkedList<String>();
LinkedList<String> duplicateArgs = new LinkedList<String>();
// analyze all the arguments
for (int i = 0; i < args.size(); i++) {
String arg = args.get(i);
if (!arg.startsWith("-")) {
// not a switch. leave this one in args
keepArgs.add(arg);
continue;
}
// split on equals
int equalsIndex = arg.indexOf('=');
String argName, argValue;
if (equalsIndex != -1) {
argName = arg.substring(1, equalsIndex);
argValue = arg.substring(equalsIndex + 1);
} else {
argName = arg.substring(1);
argValue = null;
}
// check name and resolve any alias
if (!fieldNames.contains(argName)) {
String realArgName = aliases.get(argName);
if (realArgName == null) {
// bad name
unknownArgs.add(argName);
continue;
}
argName = realArgName;
}
Class<?> fieldType = getFieldType(argName);
boolean needsValue;
if (fieldType == String[].class)
needsValue = true;
else if (fieldType == boolean.class)
needsValue = false;
else
throw null;
// get argValue from next arg if needed
if (needsValue) {
if (argValue == null) {
if (i == args.size() - 1) {
needValueArgs.add(arg);
continue;
}
String nextArg = args.get(i + 1);
if (nextArg.startsWith("-")) {
needValueArgs.add(arg);
continue;
}
argValue = nextArg;
i++;
}
} else {
if (argValue != null) {
// value specified even though you don't need one.
needNoValueArgs.add(arg);
continue;
}
}
// try to store the value
Object cachedValue = argsMap.get(argName);
if (fieldType == String[].class) {
LinkedList<String> argValues = (LinkedList<String>)cachedValue;
if (argValues == null) {
argValues = new LinkedList<String>();
argsMap.put(argName, argValues);
}
argValues.add(argValue);
} else if (fieldType == boolean.class) {
if (cachedValue != null) {
duplicateArgs.add(arg);
continue;
}
argsMap.put(argName, true);
} else
throw null;
}
// report problems
ArrayList<String> errorMessages = new ArrayList<String>();
for (String unknownArg : unknownArgs)
errorMessages.add("Unknown option: " + unknownArg);
for (String needValueArg : needValueArgs)
errorMessages.add("Option needs value: " + needValueArg);
for (String needNoValueArg : needNoValueArgs)
errorMessages.add("Option doesn't take value: " + needNoValueArg);
for (String duplicateArg : duplicateArgs)
errorMessages.add("Duplicate option: " + duplicateArg);
if (!errorMessages.isEmpty())
throw new IllegalArgumentException(Util.join(errorMessages, "\n"));
// remove all the switches and leave only the non-switches.
args.clear();
args.addAll(keepArgs);
// create an options object, set the fields as needed, and return it
JaxcOptions options = new JaxcOptions();
try {
for (Entry<String, Object> kvp : argsMap.entrySet()) {
Field field = JaxcOptions.class.getField(kvp.getKey());
Class<?> fieldType = field.getType();
if (fieldType == String[].class) {
LinkedList<String> valueList = (LinkedList)kvp.getValue();
field.set(options, valueList.toArray(new String[valueList.size()]));
} else if (fieldType == boolean.class) {
field.set(options, true);
} else
throw null;
}
} catch (NoSuchFieldException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
return options;
}
private static Class<?> getFieldType(String name)
{
try {
return JaxcOptions.class.getField(name).getType();
} catch (NoSuchFieldException e) {
throw new RuntimeException(e);
}
}
}
| |
package com.nsysmon.demo;
import com.nsysmon.NSysMon;
import com.nsysmon.data.ACorrelationId;
import com.nsysmon.measure.ASimpleMeasurement;
import com.nsysmon.measure.jdbc.NSysMonDataSource;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.sql.DataSource;
import java.io.IOException;
import java.io.PrintWriter;
import java.sql.*;
import java.util.Random;
import java.util.UUID;
import java.util.logging.Logger;
/**
* @author arno
*/
public class AppServlet extends HttpServlet {
private static final DataSource dataSource = createDataSource();
static Connection conn;
static {
try {
// store__ the connection to keep the in-memory database
conn = getConnection();
conn.createStatement().execute("create table A (oid number primary key)");
conn.commit();
} catch (SQLException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
@Override protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
boolean onlySmallTree = req.getRequestURL().toString().contains("/small");
final PrintWriter out = resp.getWriter();
out.println("<html><head><title>N-SysMon demo content</title></head><body>");
out.println("<h1>N-SysMon demo content</h1>");
if (onlySmallTree) {
out.println("generating small content");
}else{
out.println("generating huge content");
}
out.println("</body></html>");
final ASimpleMeasurement parMeasurement = NSysMon.get().start("parallel", false);
sleep();
NSysMon.get().measure("a", m -> {
return sleep();
});
NSysMon.get().measure("b", m -> {
doQuery(); doQuery(); doQuery(); return sleep();
});
NSysMon.get().measure("q", m -> {
doQuery(); doQuery(); doQuery(); doQuery(); doQuery(); doQuery(); doQuery(); doQuery(); return sleep();
});
NSysMon.get().measure("b", m -> {
return sleep();
});
NSysMon.get().measure("b", m -> {
return sleep();
});
NSysMon.get().measure("b", m -> {
return sleep();
});
NSysMon.get().measure("b", m -> {
return sleep();
});
NSysMon.get().measure("a", m -> {
return sleep();
});
NSysMon.get().measure("a", m -> {
doQuery(); return sleep();
});
NSysMon.get().measure("b", m -> {
NSysMon.get().measure("x", m1 -> {
doQuery(); return sleep();
});
doQuery(); return sleep();
});
parMeasurement.finish();
NSysMon.get().measure("c", m -> {
return sleep();
});
NSysMon.get().measure("d", m -> {
doQueryWithMultipleParameters();
return sleep();
});
correlations("e");
if (!onlySmallTree) {
hugeTree(8, 6);
}
}
private void correlations(String nameOfStartingNode) {
// mainReason -> subReason -> subReason
final String mainReasonT1_1 = UUID.randomUUID().toString();
final String subReasonT1_1 = UUID.randomUUID().toString();
final String subReasonT1_1_1 = UUID.randomUUID().toString();
NSysMon.get().measure(nameOfStartingNode + 0, m1 -> {
NSysMon.get().startFlow(new ACorrelationId("mainReasonT1_1", mainReasonT1_1, null));
NSysMon.get().measure(nameOfStartingNode + 1, m2 -> {
NSysMon.get().joinFlow(new ACorrelationId("subReasonT1_1", subReasonT1_1, mainReasonT1_1));
NSysMon.get().measure(nameOfStartingNode + 2, m3 -> {
NSysMon.get().joinFlow(new ACorrelationId("subReasonT1_1_1", subReasonT1_1_1, subReasonT1_1));
});
});
});
// mainReason -> subReason -> subReason1
// -> subReason2
final String mainReasonT2_1 = UUID.randomUUID().toString();
final String subReasonT2_1 = UUID.randomUUID().toString();
final String subReasonT2_1_1 = UUID.randomUUID().toString();
final String subReasonT2_1_2 = UUID.randomUUID().toString();
NSysMon.get().measure(nameOfStartingNode + 1, m1 -> {
NSysMon.get().startFlow(new ACorrelationId("mainReasonT2_1", mainReasonT2_1, null));
NSysMon.get().measure(nameOfStartingNode + 2, m2 -> {
NSysMon.get().joinFlow(new ACorrelationId("subReasonT2_1", subReasonT2_1, mainReasonT2_1));
NSysMon.get().measure(nameOfStartingNode + 3, m3 -> {
NSysMon.get().joinFlow(new ACorrelationId("subReasonT2_1_1", subReasonT2_1_1, subReasonT2_1));
NSysMon.get().joinFlow(new ACorrelationId("subReasonT2_1_2", subReasonT2_1_2, subReasonT2_1));
});
});
});
// mainReason -> subReason -> subReason1 -> subReason1_1
// -> subReason2 -> subReason2_1
// -> subReason2 -> subReason2_2
final String mainReasonT3_1 = UUID.randomUUID().toString();
final String subReasonT3_1 = UUID.randomUUID().toString();
final String subReasonT3_2 = UUID.randomUUID().toString();
final String subReasonT3_2_1 = UUID.randomUUID().toString();
final String subReasonT3_1_1 = UUID.randomUUID().toString();
final String subReasonT3_2_2 = UUID.randomUUID().toString();
NSysMon.get().measure(nameOfStartingNode + 1, m1 -> {
NSysMon.get().startFlow(new ACorrelationId("mainReasonT3_1", mainReasonT3_1, null));
NSysMon.get().measure(nameOfStartingNode + 2, m2 -> {
NSysMon.get().joinFlow(new ACorrelationId("subReasonT3_1", subReasonT3_1, mainReasonT3_1));
NSysMon.get().joinFlow(new ACorrelationId("subReasonT3_2", subReasonT3_2, mainReasonT3_1));
NSysMon.get().measure(nameOfStartingNode + 3, m3 -> {
NSysMon.get().joinFlow(new ACorrelationId("subReasonT3_1_1", subReasonT3_1_1, subReasonT3_1));
NSysMon.get().joinFlow(new ACorrelationId("subReasonT3_2_1", subReasonT3_2_1, subReasonT3_2));
NSysMon.get().joinFlow(new ACorrelationId("subReasonT3_2_2", subReasonT3_2_2, subReasonT3_2));
});
});
});
}
private void hugeTree(final int width, final int depth) {
for(int i=0; i<width; i++) {
if(depth > 0) {
NSysMon.get().measure("Q-" + depth + "-" + i, m -> {
hugeTree(width, depth-1);
});
}
}
}
private Object sleep() {
try {
Thread.sleep(20 + new Random().nextInt(20));
} catch (InterruptedException exc) {
throw new RuntimeException(exc);
}
return null;
}
private void doQuery() {
try {
final Connection conn = getConnection();
try {
final PreparedStatement ps = conn.prepareStatement("select * from A where oid < ? and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1 and 1=1");
try {
ps.setLong(1, 25);
final ResultSet rs = ps.executeQuery();
while (rs.next()) {
//Just ignore the result, goal is only to generat the sql
}
}
finally {
ps.close();
}
} finally {
conn.commit();
conn.close();
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
private void doQueryWithMultipleParameters() {
try {
final Connection conn = getConnection();
try {
final PreparedStatement ps = conn.prepareStatement("select * from A where oid < ? and oid < ? and oid < ? and 1=? and 2=? and 3=? and 4=? and 5=? and 6=?");
try {
ps.setLong(1, 25);
ps.setLong(2, 25);
ps.setLong(3, 25);
ps.setLong(4, 1);
ps.setLong(5, 2);
ps.setLong(6, 3);
ps.setString(7, "str4");
ps.setInt(8, 5);
ps.setByte(9, (byte)6);
final ResultSet rs = ps.executeQuery();
while (rs.next()) {
//Just ignore the result, goal is only to generat the sql
}
}
finally {
ps.close();
}
} finally {
conn.commit();
conn.close();
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
private static Connection getConnection() throws SQLException {
return dataSource.getConnection();
// final Connection result = DriverManager.getConnection("nsysmon:qualifier=123:jdbc:h2:memgc:demo", "sa", "");
// result.setAutoCommit(false);
// return result;
}
private static DataSource createDataSource() {
final DataSource inner = new DataSource() {
@Override public Connection getConnection() throws SQLException {
final Connection result = DriverManager.getConnection("jdbc:h2:mem:demo", "sa", "");
result.setAutoCommit(false);
return result;
}
@Override
public Connection getConnection(String username, String password) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public PrintWriter getLogWriter() throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void setLogWriter(PrintWriter out) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void setLoginTimeout(int seconds) throws SQLException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public int getLoginTimeout() throws SQLException {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
public Logger getParentLogger() throws SQLFeatureNotSupportedException {
return null;
}
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
};
return new NSysMonDataSource(inner, "234", NSysMon.get());
}
}
| |
package ca.uhn.fhir.rest.server.interceptor;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.containsStringIgnoringCase;
import static org.hamcrest.Matchers.matchesPattern;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.stringContainsInOrder;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.concurrent.TimeUnit;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.*;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.springframework.web.cors.CorsConfiguration;
import com.phloc.commons.collections.iterate.ArrayEnumeration;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.api.BundleInclusionRule;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.dstu2.composite.HumanNameDt;
import ca.uhn.fhir.model.dstu2.composite.IdentifierDt;
import ca.uhn.fhir.model.dstu2.resource.*;
import ca.uhn.fhir.model.dstu2.resource.OperationOutcome.Issue;
import ca.uhn.fhir.model.dstu2.valueset.IdentifierUseEnum;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.UriDt;
import ca.uhn.fhir.rest.annotation.*;
import ca.uhn.fhir.rest.api.*;
import ca.uhn.fhir.rest.server.*;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.util.*;
public class ResponseHighlightingInterceptorTest {
private static ResponseHighlighterInterceptor ourInterceptor = new ResponseHighlighterInterceptor();
private static CloseableHttpClient ourClient;
private static FhirContext ourCtx = FhirContext.forDstu2();
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(ResponseHighlightingInterceptorTest.class);
private static int ourPort;
private static Server ourServer;
private static RestfulServer ourServlet;
@AfterClass
public static void afterClassClearContext() {
TestUtil.clearAllStaticFieldsForUnitTest();
}
@Before
public void before() {
ourInterceptor.setShowRequestHeaders(new ResponseHighlighterInterceptor().isShowRequestHeaders());
ourInterceptor.setShowResponseHeaders(new ResponseHighlighterInterceptor().isShowResponseHeaders());
}
/**
* See #464
*/
@Test
public void testPrettyPrintDefaultsToTrue() throws Exception {
ourServlet.setDefaultPrettyPrint(false);
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1");
httpGet.addHeader("Accept", "text/html");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info(responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertThat(responseContent, (stringContainsInOrder("<body>", "<pre>", "<div", "</pre>")));
}
/**
* See #464
*/
@Test
public void testPrettyPrintDefaultsToTrueWithExplicitTrue() throws Exception {
ourServlet.setDefaultPrettyPrint(false);
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_pretty=true");
httpGet.addHeader("Accept", "text/html");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info(responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertThat(responseContent, (stringContainsInOrder("<body>", "<pre>", "<div", "</pre>")));
}
/**
* See #464
*/
@Test
public void testPrettyPrintDefaultsToTrueWithExplicitFalse() throws Exception {
ourServlet.setDefaultPrettyPrint(false);
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_pretty=false");
httpGet.addHeader("Accept", "text/html");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info(responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertThat(responseContent, not(stringContainsInOrder("<body>", "<pre>", "\n", "</pre>")));
}
@Test
public void testForceResponseTime() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=html/json");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info(responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent.replace('\n', ' ').replace('\r', ' '), matchesPattern(".*Response generated in [0-9]+ms.*"));
}
@Test
public void testShowNeither() throws Exception {
ourInterceptor.setShowRequestHeaders(false);
ourInterceptor.setShowResponseHeaders(false);
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=html/json");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info(responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsStringIgnoringCase("Accept")));
assertThat(responseContent, not(containsStringIgnoringCase("Content-Type")));
}
@Test
public void testShowResponse() throws Exception {
ourInterceptor.setShowResponseHeaders(true);
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=html/json");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info(responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsStringIgnoringCase("Accept")));
assertThat(responseContent, (containsStringIgnoringCase("Content-Type")));
}
@Test
public void testShowRequest() throws Exception {
ourInterceptor.setShowRequestHeaders(true);
ourInterceptor.setShowResponseHeaders(false);
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=html/json");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info(responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, (containsStringIgnoringCase("Accept")));
assertThat(responseContent, not(containsStringIgnoringCase("Content-Type")));
}
@Test
public void testShowRequestAndResponse() throws Exception {
ourInterceptor.setShowRequestHeaders(true);
ourInterceptor.setShowResponseHeaders(true);
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=html/json");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info(responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, (containsStringIgnoringCase("Accept")));
assertThat(responseContent, (containsStringIgnoringCase("Content-Type")));
}
@Test
public void testGetInvalidResource() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Foobar/123");
httpGet.addHeader("Accept", "text/html");
CloseableHttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info("Resp: {}", responseContent);
assertEquals(404, status.getStatusLine().getStatusCode());
assertThat(responseContent, stringContainsInOrder("<span class='hlTagName'>OperationOutcome</span>", "Unknown resource type 'Foobar' - Server knows how to handle"));
}
@Test
public void testGetInvalidResourceNoAcceptHeader() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Foobar/123");
CloseableHttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info("Resp: {}", responseContent);
assertEquals(404, status.getStatusLine().getStatusCode());
assertThat(responseContent, not(stringContainsInOrder("<span class='hlTagName'>OperationOutcome</span>", "Unknown resource type 'Foobar' - Server knows how to handle")));
assertThat(responseContent, (stringContainsInOrder("Unknown resource type 'Foobar'")));
assertThat(status.getFirstHeader("Content-Type").getValue(), containsString("application/xml+fhir"));
}
@Test
public void testGetRoot() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/");
httpGet.addHeader("Accept", "text/html");
CloseableHttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info("Resp: {}", responseContent);
assertEquals(400, status.getStatusLine().getStatusCode());
assertThat(responseContent, stringContainsInOrder("<span class='hlTagName'>OperationOutcome</span>", "This is the base URL of FHIR server. Unable to handle this request, as it does not contain a resource type or operation name."));
}
@Test
public void testHighlightException() throws Exception {
ResponseHighlighterInterceptor ic = ourInterceptor;
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("text/html,application/xhtml+xml,application/xml;q=0.9");
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
reqDetails.setServer(new RestfulServer(ourCtx));
reqDetails.setServletRequest(req);
// This can be null depending on the exception type
// reqDetails.setParameters(null);
ResourceNotFoundException exception = new ResourceNotFoundException("Not found");
exception.setOperationOutcome(new OperationOutcome().addIssue(new Issue().setDiagnostics("Hello")));
assertFalse(ic.handleException(reqDetails, exception, req, resp));
String output = sw.getBuffer().toString();
ourLog.info(output);
assertThat(output, containsString("<span class='hlTagName'>OperationOutcome</span>"));
}
@Test
public void testHighlightNormalResponseForcePrettyPrint() throws Exception {
ResponseHighlighterInterceptor ic = ourInterceptor;
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("text/html,application/xhtml+xml,application/xml;q=0.9");
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
HashMap<String, String[]> params = new HashMap<String, String[]>();
params.put(Constants.PARAM_PRETTY, new String[] { Constants.PARAM_PRETTY_VALUE_TRUE });
reqDetails.setParameters(params);
reqDetails.setServer(new RestfulServer(ourCtx));
reqDetails.setServletRequest(req);
assertFalse(ic.outgoingResponse(reqDetails, resource, req, resp));
String output = sw.getBuffer().toString();
ourLog.info(output);
assertThat(output, containsString("<span class='hlTagName'>Patient</span>"));
assertThat(output, stringContainsInOrder("<body>", "<pre>", "<div", "</pre>"));
}
@Test
public void testHighlightForceRaw() throws Exception {
ResponseHighlighterInterceptor ic = ourInterceptor;
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("text/html,application/xhtml+xml,application/xml;q=0.9");
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
HashMap<String, String[]> params = new HashMap<String, String[]>();
params.put(Constants.PARAM_PRETTY, new String[] { Constants.PARAM_PRETTY_VALUE_TRUE });
params.put(Constants.PARAM_FORMAT, new String[] { Constants.CT_XML });
params.put(ResponseHighlighterInterceptor.PARAM_RAW, new String[] { ResponseHighlighterInterceptor.PARAM_RAW_TRUE });
reqDetails.setParameters(params);
reqDetails.setServer(new RestfulServer(ourCtx));
reqDetails.setServletRequest(req);
// true means it decided to not handle the request..
assertTrue(ic.outgoingResponse(reqDetails, resource, req, resp));
}
@Test
public void testDontHighlightWhenOriginHeaderPresent() throws Exception {
ResponseHighlighterInterceptor ic = ourInterceptor;
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("text/html,application/xhtml+xml,application/xml;q=0.9");
}
});
when(req.getHeader(Constants.HEADER_ORIGIN)).thenAnswer(new Answer<String>() {
@Override
public String answer(InvocationOnMock theInvocation) throws Throwable {
return "http://example.com";
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
HashMap<String, String[]> params = new HashMap<String, String[]>();
reqDetails.setParameters(params);
reqDetails.setServer(new RestfulServer(ourCtx));
reqDetails.setServletRequest(req);
// true means it decided to not handle the request..
assertTrue(ic.outgoingResponse(reqDetails, resource, req, resp));
}
/**
* See #346
*/
@Test
public void testHighlightForceHtmlCt() throws Exception {
ResponseHighlighterInterceptor ic = ourInterceptor;
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("application/xml+fhir");
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
HashMap<String, String[]> params = new HashMap<String, String[]>();
params.put(Constants.PARAM_FORMAT, new String[] { Constants.FORMAT_HTML });
reqDetails.setParameters(params);
reqDetails.setServer(new RestfulServer(ourCtx));
reqDetails.setServletRequest(req);
// false means it decided to handle the request..
assertFalse(ic.outgoingResponse(reqDetails, resource, req, resp));
}
/**
* See #346
*/
@Test
public void testHighlightForceHtmlFormat() throws Exception {
ResponseHighlighterInterceptor ic = ourInterceptor;
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("application/xml+fhir");
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
HashMap<String, String[]> params = new HashMap<String, String[]>();
params.put(Constants.PARAM_FORMAT, new String[] { Constants.CT_HTML });
reqDetails.setParameters(params);
reqDetails.setServer(new RestfulServer(ourCtx));
reqDetails.setServletRequest(req);
// false means it decided to handle the request..
assertFalse(ic.outgoingResponse(reqDetails, resource, req, resp));
}
@Test
public void testHighlightNormalResponse() throws Exception {
ResponseHighlighterInterceptor ic = ourInterceptor;
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("text/html,application/xhtml+xml,application/xml;q=0.9");
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
reqDetails.setParameters(new HashMap<String, String[]>());
reqDetails.setServer(new RestfulServer(ourCtx));
reqDetails.setServletRequest(req);
assertFalse(ic.outgoingResponse(reqDetails, resource, req, resp));
String output = sw.getBuffer().toString();
ourLog.info(output);
assertThat(output, containsString("<span class='hlTagName'>Patient</span>"));
assertThat(output, stringContainsInOrder("<body>", "<pre>", "<div", "</pre>"));
assertThat(output, containsString("<a href=\"?_format=json\">"));
}
/**
* Browsers declare XML but not JSON in their accept header, we should still respond using JSON if that's the default
*/
@Test
public void testHighlightProducesDefaultJsonWithBrowserRequest() throws Exception {
ResponseHighlighterInterceptor ic = ourInterceptor;
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("text/html,application/xhtml+xml,application/xml;q=0.9");
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
reqDetails.setParameters(new HashMap<String, String[]>());
RestfulServer server = new RestfulServer(ourCtx);
server.setDefaultResponseEncoding(EncodingEnum.JSON);
reqDetails.setServer(server);
reqDetails.setServletRequest(req);
assertFalse(ic.outgoingResponse(reqDetails, resource, req, resp));
String output = sw.getBuffer().toString();
ourLog.info(output);
assertThat(output, containsString("resourceType"));
}
@Test
public void testHighlightProducesDefaultJsonWithBrowserRequest2() throws Exception {
ResponseHighlighterInterceptor ic = ourInterceptor;
HttpServletRequest req = mock(HttpServletRequest.class);
when(req.getHeaders(Constants.HEADER_ACCEPT)).thenAnswer(new Answer<Enumeration<String>>() {
@Override
public Enumeration<String> answer(InvocationOnMock theInvocation) throws Throwable {
return new ArrayEnumeration<String>("text/html;q=0.8,application/xhtml+xml,application/xml;q=0.9");
}
});
HttpServletResponse resp = mock(HttpServletResponse.class);
StringWriter sw = new StringWriter();
when(resp.getWriter()).thenReturn(new PrintWriter(sw));
Patient resource = new Patient();
resource.addName().addFamily("FAMILY");
ServletRequestDetails reqDetails = new TestServletRequestDetails();
reqDetails.setRequestType(RequestTypeEnum.GET);
reqDetails.setParameters(new HashMap<String, String[]>());
RestfulServer server = new RestfulServer(ourCtx);
server.setDefaultResponseEncoding(EncodingEnum.JSON);
reqDetails.setServer(server);
reqDetails.setServletRequest(req);
// True here means the interceptor didn't handle the request, because HTML wasn't the top ranked accept header
assertTrue(ic.outgoingResponse(reqDetails, resource, req, resp));
}
@Test
public void testSearchWithSummaryParam() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?_query=searchWithWildcardRetVal&_summary=count");
httpGet.addHeader("Accept", "html");
CloseableHttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
ourLog.info("Resp: {}", responseContent);
assertEquals(200, status.getStatusLine().getStatusCode());
assertThat(responseContent, not(containsString("entry")));
}
@Test
public void testBinaryReadAcceptMissing() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Binary/foo");
HttpResponse status = ourClient.execute(httpGet);
byte[] responseContent = IOUtils.toByteArray(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("foo", status.getFirstHeader("content-type").getValue());
assertEquals("Attachment;", status.getFirstHeader("Content-Disposition").getValue());
assertArrayEquals(new byte[] { 1, 2, 3, 4 }, responseContent);
}
@Test
public void testBinaryReadAcceptBrowser() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Binary/foo");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
httpGet.addHeader("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8");
HttpResponse status = ourClient.execute(httpGet);
byte[] responseContent = IOUtils.toByteArray(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("foo", status.getFirstHeader("content-type").getValue());
assertEquals("Attachment;", status.getFirstHeader("Content-Disposition").getValue());
assertArrayEquals(new byte[] { 1, 2, 3, 4 }, responseContent);
}
@Test
public void testBinaryReadAcceptFhirJson() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Binary/foo");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
httpGet.addHeader("Accept", Constants.CT_FHIR_JSON);
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_JSON + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertNull(status.getFirstHeader("Content-Disposition"));
assertEquals("{\"resourceType\":\"Binary\",\"id\":\"1\",\"contentType\":\"foo\",\"content\":\"AQIDBA==\"}", responseContent);
}
@Test
public void testForceApplicationJson() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=application/json");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_JSON + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html")));
}
@Test
public void testForceApplicationJsonFhir() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=application/json+fhir");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_JSON + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html")));
}
@Test
public void testForceApplicationJsonPlusFhir() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=" + UrlUtil.escape("application/json+fhir"));
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_JSON + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html")));
}
@Test
public void testForceJson() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=json");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_JSON + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html")));
}
@Test
public void testForceHtmlJson() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=html/json");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent(), StandardCharsets.UTF_8);
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, containsString("html"));
assertThat(responseContent, containsString(">{<"));
assertThat(responseContent, not(containsString("<")));
ourLog.info(responseContent);
}
@Test
public void testForceHtmlXml() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=html/xml");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals("text/html;charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, containsString("html"));
assertThat(responseContent, not(containsString(">{<")));
assertThat(responseContent, containsString("<"));
}
@Test
public void testForceApplicationXml() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=application/xml");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_XML + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html")));
}
@Test
public void testForceApplicationXmlFhir() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=application/xml+fhir");
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_XML + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html")));
}
@Test
public void testForceApplicationXmlPlusFhir() throws Exception {
HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient/1?_format=" + UrlUtil.escape("application/xml+fhir"));
httpGet.addHeader("User-Agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1");
HttpResponse status = ourClient.execute(httpGet);
String responseContent = IOUtils.toString(status.getEntity().getContent());
IOUtils.closeQuietly(status.getEntity().getContent());
assertEquals(200, status.getStatusLine().getStatusCode());
assertEquals(Constants.CT_FHIR_XML + ";charset=utf-8", status.getFirstHeader("content-type").getValue().replace(" ", "").toLowerCase());
assertThat(responseContent, not(containsString("html")));
}
@BeforeClass
public static void beforeClass() throws Exception {
ourPort = PortUtil.findFreePort();
ourServer = new Server(ourPort);
DummyPatientResourceProvider patientProvider = new DummyPatientResourceProvider();
ServletHandler proxyHandler = new ServletHandler();
ourServlet = new RestfulServer(ourCtx);
/*
* Enable CORS
*/
CorsConfiguration config = new CorsConfiguration();
CorsInterceptor corsInterceptor = new CorsInterceptor(config);
config.addAllowedHeader("Origin");
config.addAllowedHeader("Accept");
config.addAllowedHeader("X-Requested-With");
config.addAllowedHeader("Content-Type");
config.addAllowedHeader("Access-Control-Request-Method");
config.addAllowedHeader("Access-Control-Request-Headers");
config.addAllowedOrigin("*");
config.addExposedHeader("Location");
config.addExposedHeader("Content-Location");
config.setAllowedMethods(Arrays.asList("GET","POST","PUT","DELETE","OPTIONS"));
ourServlet.registerInterceptor(corsInterceptor);
ourServlet.registerInterceptor(ourInterceptor);
ourServlet.setResourceProviders(patientProvider, new DummyBinaryResourceProvider());
ourServlet.setBundleInclusionRule(BundleInclusionRule.BASED_ON_RESOURCE_PRESENCE);
ServletHolder servletHolder = new ServletHolder(ourServlet);
proxyHandler.addServletWithMapping(servletHolder, "/*");
ourServer.setHandler(proxyHandler);
ourServer.start();
PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS);
HttpClientBuilder builder = HttpClientBuilder.create();
builder.setConnectionManager(connectionManager);
ourClient = builder.build();
}
public static class DummyBinaryResourceProvider implements IResourceProvider {
@Override
public Class<? extends IResource> getResourceType() {
return Binary.class;
}
@Read
public Binary read(@IdParam IdDt theId) {
Binary retVal = new Binary();
retVal.setId("1");
retVal.setContent(new byte[] { 1, 2, 3, 4 });
retVal.setContentType(theId.getIdPart());
return retVal;
}
@Search
public List<Binary> search() {
Binary retVal = new Binary();
retVal.setId("1");
retVal.setContent(new byte[] { 1, 2, 3, 4 });
retVal.setContentType("text/plain");
return Collections.singletonList(retVal);
}
}
public static class DummyPatientResourceProvider implements IResourceProvider {
private Patient createPatient1() {
Patient patient = new Patient();
patient.addIdentifier();
patient.getIdentifier().get(0).setUse(IdentifierUseEnum.OFFICIAL);
patient.getIdentifier().get(0).setSystem(new UriDt("urn:hapitest:mrns"));
patient.getIdentifier().get(0).setValue("00001");
patient.addName();
patient.getName().get(0).addFamily("Test");
patient.getName().get(0).addGiven("PatientOne");
patient.getId().setValue("1");
return patient;
}
@Search(queryName = "findPatientsWithAbsoluteIdSpecified")
public List<Patient> findPatientsWithAbsoluteIdSpecified() {
Patient p = new Patient();
p.addIdentifier().setSystem("foo");
p.setId("http://absolute.com/Patient/123/_history/22");
Organization o = new Organization();
o.setId("http://foo.com/Organization/222/_history/333");
p.getManagingOrganization().setResource(o);
return Collections.singletonList(p);
}
@Search(queryName = "findPatientsWithNoIdSpecified")
public List<Patient> findPatientsWithNoIdSpecified() {
Patient p = new Patient();
p.addIdentifier().setSystem("foo");
return Collections.singletonList(p);
}
public Map<String, Patient> getIdToPatient() {
Map<String, Patient> idToPatient = new HashMap<String, Patient>();
{
Patient patient = createPatient1();
idToPatient.put("1", patient);
}
{
Patient patient = new Patient();
patient.getIdentifier().add(new IdentifierDt());
patient.getIdentifier().get(0).setUse(IdentifierUseEnum.OFFICIAL);
patient.getIdentifier().get(0).setSystem(new UriDt("urn:hapitest:mrns"));
patient.getIdentifier().get(0).setValue("00002");
patient.getName().add(new HumanNameDt());
patient.getName().get(0).addFamily("Test");
patient.getName().get(0).addGiven("PatientTwo");
patient.getId().setValue("2");
idToPatient.put("2", patient);
}
return idToPatient;
}
/**
* Retrieve the resource by its identifier
*
* @param theId
* The resource identity
* @return The resource
*/
@Read()
public Patient getResourceById(@IdParam IdDt theId) {
String key = theId.getIdPart();
Patient retVal = getIdToPatient().get(key);
return retVal;
}
/**
* Retrieve the resource by its identifier
*
* @param theId
* The resource identity
* @return The resource
*/
@Search()
public List<Patient> getResourceById(@RequiredParam(name = "_id") String theId) {
Patient patient = getIdToPatient().get(theId);
if (patient != null) {
return Collections.singletonList(patient);
} else {
return Collections.emptyList();
}
}
@Override
public Class<Patient> getResourceType() {
return Patient.class;
}
@Search(queryName = "searchWithWildcardRetVal")
public List<? extends IResource> searchWithWildcardRetVal() {
Patient p = new Patient();
p.setId("1234");
p.addName().addFamily("searchWithWildcardRetVal");
return Collections.singletonList(p);
}
}
class TestServletRequestDetails extends ServletRequestDetails {
@Override
public String getServerBaseForRequest() {
return "/baseDstu3";
}
}
}
| |
/*
* Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.awt.windows;
import java.awt.Image;
import java.awt.Graphics2D;
import java.awt.Transparency;
import java.awt.color.ColorSpace;
import java.awt.datatransfer.DataFlavor;
import java.awt.datatransfer.FlavorTable;
import java.awt.datatransfer.Transferable;
import java.awt.datatransfer.UnsupportedFlavorException;
import java.awt.geom.AffineTransform;
import java.awt.image.BufferedImage;
import java.awt.image.ColorModel;
import java.awt.image.ComponentColorModel;
import java.awt.image.DataBuffer;
import java.awt.image.DataBufferByte;
import java.awt.image.DataBufferInt;
import java.awt.image.DirectColorModel;
import java.awt.image.ImageObserver;
import java.awt.image.Raster;
import java.awt.image.WritableRaster;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.io.File;
import java.net.URL;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.SortedMap;
import sun.awt.Mutex;
import sun.awt.datatransfer.DataTransferer;
import sun.awt.datatransfer.ToolkitThreadBlockedHandler;
import sun.awt.image.ImageRepresentation;
import sun.awt.image.ToolkitImage;
import java.util.ArrayList;
import java.io.ByteArrayOutputStream;
/**
* Platform-specific support for the data transfer subsystem.
*
* @author David Mendenhall
* @author Danila Sinopalnikov
*
* @since 1.3.1
*/
public class WDataTransferer extends DataTransferer {
private static final String[] predefinedClipboardNames = {
"",
"TEXT",
"BITMAP",
"METAFILEPICT",
"SYLK",
"DIF",
"TIFF",
"OEM TEXT",
"DIB",
"PALETTE",
"PENDATA",
"RIFF",
"WAVE",
"UNICODE TEXT",
"ENHMETAFILE",
"HDROP",
"LOCALE",
"DIBV5"
};
private static final Map <String, Long> predefinedClipboardNameMap;
static {
Map <String,Long> tempMap =
new HashMap <> (predefinedClipboardNames.length, 1.0f);
for (int i = 1; i < predefinedClipboardNames.length; i++) {
tempMap.put(predefinedClipboardNames[i], Long.valueOf(i));
}
predefinedClipboardNameMap =
Collections.synchronizedMap(tempMap);
}
/**
* from winuser.h
*/
public static final int CF_TEXT = 1;
public static final int CF_METAFILEPICT = 3;
public static final int CF_DIB = 8;
public static final int CF_ENHMETAFILE = 14;
public static final int CF_HDROP = 15;
public static final int CF_LOCALE = 16;
public static final long CF_HTML = registerClipboardFormat("HTML Format");
public static final long CFSTR_INETURL = registerClipboardFormat("UniformResourceLocator");
public static final long CF_PNG = registerClipboardFormat("PNG");
public static final long CF_JFIF = registerClipboardFormat("JFIF");
public static final long CF_FILEGROUPDESCRIPTORW = registerClipboardFormat("FileGroupDescriptorW");
public static final long CF_FILEGROUPDESCRIPTORA = registerClipboardFormat("FileGroupDescriptor");
//CF_FILECONTENTS supported as mandatory associated clipboard
private static final Long L_CF_LOCALE =
predefinedClipboardNameMap.get(predefinedClipboardNames[CF_LOCALE]);
private static final DirectColorModel directColorModel =
new DirectColorModel(24,
0x00FF0000, /* red mask */
0x0000FF00, /* green mask */
0x000000FF); /* blue mask */
private static final int[] bandmasks = new int[] {
directColorModel.getRedMask(),
directColorModel.getGreenMask(),
directColorModel.getBlueMask() };
/**
* Singleton constructor
*/
private WDataTransferer() {
}
private static WDataTransferer transferer;
public static WDataTransferer getInstanceImpl() {
if (transferer == null) {
synchronized (WDataTransferer.class) {
if (transferer == null) {
transferer = new WDataTransferer();
}
}
}
return transferer;
}
public SortedMap <Long, DataFlavor> getFormatsForFlavors(
DataFlavor[] flavors, FlavorTable map)
{
SortedMap <Long, DataFlavor> retval =
super.getFormatsForFlavors(flavors, map);
// The Win32 native code does not support exporting LOCALE data, nor
// should it.
retval.remove(L_CF_LOCALE);
return retval;
}
public String getDefaultUnicodeEncoding() {
return "utf-16le";
}
public byte[] translateTransferable(Transferable contents,
DataFlavor flavor,
long format) throws IOException
{
byte[] bytes = null;
if (format == CF_HTML) {
if (contents.isDataFlavorSupported(DataFlavor.selectionHtmlFlavor)) {
// if a user provides data represented by
// DataFlavor.selectionHtmlFlavor format, we use this
// type to store the data in the native clipboard
bytes = super.translateTransferable(contents,
DataFlavor.selectionHtmlFlavor,
format);
} else if (contents.isDataFlavorSupported(DataFlavor.allHtmlFlavor)) {
// if we cannot get data represented by the
// DataFlavor.selectionHtmlFlavor format
// but the DataFlavor.allHtmlFlavor format is avialable
// we belive that the user knows how to represent
// the data and how to mark up selection in a
// system specific manner. Therefor, we use this data
bytes = super.translateTransferable(contents,
DataFlavor.allHtmlFlavor,
format);
} else {
// handle other html flavor types, including custom and
// fragment ones
bytes = HTMLCodec.convertToHTMLFormat(super.translateTransferable(contents, flavor, format));
}
} else {
// we handle non-html types basing on their
// flavors
bytes = super.translateTransferable(contents, flavor, format);
}
return bytes;
}
// The stream is closed as a closable object
public Object translateStream(InputStream str,
DataFlavor flavor, long format,
Transferable localeTransferable)
throws IOException
{
if (format == CF_HTML && flavor.isFlavorTextType()) {
str = new HTMLCodec(str,
EHTMLReadMode.getEHTMLReadMode(flavor));
}
return super.translateStream(str, flavor, format,
localeTransferable);
}
public Object translateBytes(byte[] bytes, DataFlavor flavor, long format,
Transferable localeTransferable) throws IOException
{
if (format == CF_FILEGROUPDESCRIPTORA || format == CF_FILEGROUPDESCRIPTORW) {
if (bytes == null || !DataFlavor.javaFileListFlavor.equals(flavor)) {
throw new IOException("data translation failed");
}
String st = new String(bytes, 0, bytes.length, "UTF-16LE");
String[] filenames = st.split("\0");
if( 0 == filenames.length ){
return null;
}
// Convert the strings to File objects
File[] files = new File[filenames.length];
for (int i = 0; i < filenames.length; ++i) {
files[i] = new File(filenames[i]);
//They are temp-files from memory Stream, so they have to be removed on exit
files[i].deleteOnExit();
}
// Turn the list of Files into a List and return
return Arrays.asList(files);
}
if (format == CFSTR_INETURL &&
URL.class.equals(flavor.getRepresentationClass()))
{
String charset = getDefaultTextCharset();
if (localeTransferable != null && localeTransferable.
isDataFlavorSupported(javaTextEncodingFlavor))
{
try {
charset = new String((byte[])localeTransferable.
getTransferData(javaTextEncodingFlavor), "UTF-8");
} catch (UnsupportedFlavorException cannotHappen) {
}
}
return new URL(new String(bytes, charset));
}
return super.translateBytes(bytes , flavor, format,
localeTransferable);
}
public boolean isLocaleDependentTextFormat(long format) {
return format == CF_TEXT || format == CFSTR_INETURL;
}
public boolean isFileFormat(long format) {
return format == CF_HDROP || format == CF_FILEGROUPDESCRIPTORA || format == CF_FILEGROUPDESCRIPTORW;
}
protected Long getFormatForNativeAsLong(String str) {
Long format = predefinedClipboardNameMap.get(str);
if (format == null) {
format = Long.valueOf(registerClipboardFormat(str));
}
return format;
}
protected String getNativeForFormat(long format) {
return (format < predefinedClipboardNames.length)
? predefinedClipboardNames[(int)format]
: getClipboardFormatName(format);
}
private final ToolkitThreadBlockedHandler handler =
new WToolkitThreadBlockedHandler();
public ToolkitThreadBlockedHandler getToolkitThreadBlockedHandler() {
return handler;
}
/**
* Calls the Win32 RegisterClipboardFormat function to register
* a non-standard format.
*/
private static native long registerClipboardFormat(String str);
/**
* Calls the Win32 GetClipboardFormatName function which is
* the reverse operation of RegisterClipboardFormat.
*/
private static native String getClipboardFormatName(long format);
public boolean isImageFormat(long format) {
return format == CF_DIB || format == CF_ENHMETAFILE ||
format == CF_METAFILEPICT || format == CF_PNG ||
format == CF_JFIF;
}
protected byte[] imageToPlatformBytes(Image image, long format)
throws IOException {
String mimeType = null;
if (format == CF_PNG) {
mimeType = "image/png";
} else if (format == CF_JFIF) {
mimeType = "image/jpeg";
}
if (mimeType != null) {
return imageToStandardBytes(image, mimeType);
}
int width = 0;
int height = 0;
if (image instanceof ToolkitImage) {
ImageRepresentation ir = ((ToolkitImage)image).getImageRep();
ir.reconstruct(ImageObserver.ALLBITS);
width = ir.getWidth();
height = ir.getHeight();
} else {
width = image.getWidth(null);
height = image.getHeight(null);
}
// Fix for 4919639.
// Some Windows native applications (e.g. clipbrd.exe) do not handle
// 32-bpp DIBs correctly.
// As a workaround we switched to 24-bpp DIBs.
// MSDN prescribes that the bitmap array for a 24-bpp should consist of
// 3-byte triplets representing blue, green and red components of a
// pixel respectively. Additionally each scan line must be padded with
// zeroes to end on a LONG data-type boundary. LONG is always 32-bit.
// We render the given Image to a BufferedImage of type TYPE_3BYTE_BGR
// with non-default scanline stride and pass the resulting data buffer
// to the native code to fill the BITMAPINFO structure.
int mod = (width * 3) % 4;
int pad = mod > 0 ? 4 - mod : 0;
ColorSpace cs = ColorSpace.getInstance(ColorSpace.CS_sRGB);
int[] nBits = {8, 8, 8};
int[] bOffs = {2, 1, 0};
ColorModel colorModel =
new ComponentColorModel(cs, nBits, false, false,
Transparency.OPAQUE, DataBuffer.TYPE_BYTE);
WritableRaster raster =
Raster.createInterleavedRaster(DataBuffer.TYPE_BYTE, width, height,
width * 3 + pad, 3, bOffs, null);
BufferedImage bimage = new BufferedImage(colorModel, raster, false, null);
// Some Windows native applications (e.g. clipbrd.exe) do not understand
// top-down DIBs.
// So we flip the image vertically and create a bottom-up DIB.
AffineTransform imageFlipTransform =
new AffineTransform(1, 0, 0, -1, 0, height);
Graphics2D g2d = bimage.createGraphics();
try {
g2d.drawImage(image, imageFlipTransform, null);
} finally {
g2d.dispose();
}
DataBufferByte buffer = (DataBufferByte)raster.getDataBuffer();
byte[] imageData = buffer.getData();
return imageDataToPlatformImageBytes(imageData, width, height, format);
}
private static final byte [] UNICODE_NULL_TERMINATOR = new byte [] {0,0};
protected ByteArrayOutputStream convertFileListToBytes(ArrayList<String> fileList)
throws IOException
{
ByteArrayOutputStream bos = new ByteArrayOutputStream();
if(fileList.isEmpty()) {
//store empty unicode string (null terminator)
bos.write(UNICODE_NULL_TERMINATOR);
} else {
for (int i = 0; i < fileList.size(); i++) {
byte[] bytes = fileList.get(i).getBytes(getDefaultUnicodeEncoding());
//store unicode string with null terminator
bos.write(bytes, 0, bytes.length);
bos.write(UNICODE_NULL_TERMINATOR);
}
}
// According to MSDN the byte array have to be double NULL-terminated.
// The array contains Unicode characters, so each NULL-terminator is
// a pair of bytes
bos.write(UNICODE_NULL_TERMINATOR);
return bos;
}
/**
* Returns a byte array which contains data special for the given format
* and for the given image data.
*/
private native byte[] imageDataToPlatformImageBytes(byte[] imageData,
int width, int height,
long format);
/**
* Translates either a byte array or an input stream which contain
* platform-specific image data in the given format into an Image.
*/
protected Image platformImageBytesToImage(byte[] bytes, long format)
throws IOException {
String mimeType = null;
if (format == CF_PNG) {
mimeType = "image/png";
} else if (format == CF_JFIF) {
mimeType = "image/jpeg";
}
if (mimeType != null) {
return standardImageBytesToImage(bytes, mimeType);
}
int[] imageData = platformImageBytesToImageData(bytes, format);
if (imageData == null) {
throw new IOException("data translation failed");
}
int len = imageData.length - 2;
int width = imageData[len];
int height = imageData[len + 1];
DataBufferInt buffer = new DataBufferInt(imageData, len);
WritableRaster raster = Raster.createPackedRaster(buffer, width,
height, width,
bandmasks, null);
return new BufferedImage(directColorModel, raster, false, null);
}
/**
* Translates a byte array which contains platform-specific image data in
* the given format into an integer array which contains pixel values in
* ARGB format. The two last elements in the array specify width and
* height of the image respectively.
*/
private native int[] platformImageBytesToImageData(byte[] bytes,
long format)
throws IOException;
protected native String[] dragQueryFile(byte[] bytes);
}
final class WToolkitThreadBlockedHandler extends Mutex
implements ToolkitThreadBlockedHandler {
public void enter() {
if (!isOwned()) {
throw new IllegalMonitorStateException();
}
unlock();
startSecondaryEventLoop();
lock();
}
public void exit() {
if (!isOwned()) {
throw new IllegalMonitorStateException();
}
WToolkit.quitSecondaryEventLoop();
}
private native void startSecondaryEventLoop();
}
enum EHTMLReadMode {
HTML_READ_ALL,
HTML_READ_FRAGMENT,
HTML_READ_SELECTION;
public static EHTMLReadMode getEHTMLReadMode (DataFlavor df) {
EHTMLReadMode mode = HTML_READ_SELECTION;
String parameter = df.getParameter("document");
if ("all".equals(parameter)) {
mode = HTML_READ_ALL;
} else if ("fragment".equals(parameter)) {
mode = HTML_READ_FRAGMENT;
}
return mode;
}
}
/**
* on decode: This stream takes an InputStream which provides data in CF_HTML format,
* strips off the description and context to extract the original HTML data.
*
* on encode: static convertToHTMLFormat is responsible for HTML clipboard header creation
*/
class HTMLCodec extends InputStream {
//static section
public static final String ENCODING = "UTF-8";
public static final String VERSION = "Version:";
public static final String START_HTML = "StartHTML:";
public static final String END_HTML = "EndHTML:";
public static final String START_FRAGMENT = "StartFragment:";
public static final String END_FRAGMENT = "EndFragment:";
public static final String START_SELECTION = "StartSelection:"; //optional
public static final String END_SELECTION = "EndSelection:"; //optional
public static final String START_FRAGMENT_CMT = "<!--StartFragment-->";
public static final String END_FRAGMENT_CMT = "<!--EndFragment-->";
public static final String SOURCE_URL = "SourceURL:";
public static final String DEF_SOURCE_URL = "about:blank";
public static final String EOLN = "\r\n";
private static final String VERSION_NUM = "1.0";
private static final int PADDED_WIDTH = 10;
private static String toPaddedString(int n, int width) {
String string = "" + n;
int len = string.length();
if (n >= 0 && len < width) {
char[] array = new char[width - len];
Arrays.fill(array, '0');
StringBuffer buffer = new StringBuffer(width);
buffer.append(array);
buffer.append(string);
string = buffer.toString();
}
return string;
}
/**
* convertToHTMLFormat adds the MS HTML clipboard header to byte array that
* contains the parameters pairs.
*
* The consequence of parameters is fixed, but some or all of them could be
* omitted. One parameter per one text line.
* It looks like that:
*
* Version:1.0\r\n -- current supported version
* StartHTML:000000192\r\n -- shift in array to the first byte after the header
* EndHTML:000000757\r\n -- shift in array of last byte for HTML syntax analysis
* StartFragment:000000396\r\n -- shift in array jast after <!--StartFragment-->
* EndFragment:000000694\r\n -- shift in array before start <!--EndFragment-->
* StartSelection:000000398\r\n -- shift in array of the first char in copied selection
* EndSelection:000000692\r\n -- shift in array of the last char in copied selection
* SourceURL:http://sun.com/\r\n -- base URL for related referenses
* <HTML>...<BODY>...<!--StartFragment-->.....................<!--EndFragment-->...</BODY><HTML>
* ^ ^ ^ ^^ ^
* \ StartHTML | \-StartSelection | \EndFragment EndHTML/
* \-StartFragment \EndSelection
*
*Combinations with tags sequence
*<!--StartFragment--><HTML>...<BODY>...</BODY><HTML><!--EndFragment-->
* or
*<HTML>...<!--StartFragment-->...<BODY>...</BODY><!--EndFragment--><HTML>
* are vailid too.
*/
public static byte[] convertToHTMLFormat(byte[] bytes) {
// Calculate section offsets
String htmlPrefix = "";
String htmlSuffix = "";
{
//we have extend the fragment to full HTML document correctly
//to avoid HTML and BODY tags doubling
String stContext = new String(bytes);
String stUpContext = stContext.toUpperCase();
if( -1 == stUpContext.indexOf("<HTML") ) {
htmlPrefix = "<HTML>";
htmlSuffix = "</HTML>";
if( -1 == stUpContext.indexOf("<BODY") ) {
htmlPrefix = htmlPrefix +"<BODY>";
htmlSuffix = "</BODY>" + htmlSuffix;
};
};
}
String stBaseUrl = DEF_SOURCE_URL;
int nStartHTML =
VERSION.length() + VERSION_NUM.length() + EOLN.length()
+ START_HTML.length() + PADDED_WIDTH + EOLN.length()
+ END_HTML.length() + PADDED_WIDTH + EOLN.length()
+ START_FRAGMENT.length() + PADDED_WIDTH + EOLN.length()
+ END_FRAGMENT.length() + PADDED_WIDTH + EOLN.length()
+ SOURCE_URL.length() + stBaseUrl.length() + EOLN.length()
;
int nStartFragment = nStartHTML + htmlPrefix.length();
int nEndFragment = nStartFragment + bytes.length - 1;
int nEndHTML = nEndFragment + htmlSuffix.length();
StringBuilder header = new StringBuilder(
nStartFragment
+ START_FRAGMENT_CMT.length()
);
//header
header.append(VERSION);
header.append(VERSION_NUM);
header.append(EOLN);
header.append(START_HTML);
header.append(toPaddedString(nStartHTML, PADDED_WIDTH));
header.append(EOLN);
header.append(END_HTML);
header.append(toPaddedString(nEndHTML, PADDED_WIDTH));
header.append(EOLN);
header.append(START_FRAGMENT);
header.append(toPaddedString(nStartFragment, PADDED_WIDTH));
header.append(EOLN);
header.append(END_FRAGMENT);
header.append(toPaddedString(nEndFragment, PADDED_WIDTH));
header.append(EOLN);
header.append(SOURCE_URL);
header.append(stBaseUrl);
header.append(EOLN);
//HTML
header.append(htmlPrefix);
byte[] headerBytes = null, trailerBytes = null;
try {
headerBytes = header.toString().getBytes(ENCODING);
trailerBytes = htmlSuffix.getBytes(ENCODING);
} catch (UnsupportedEncodingException cannotHappen) {
}
byte[] retval = new byte[headerBytes.length + bytes.length +
trailerBytes.length];
System.arraycopy(headerBytes, 0, retval, 0, headerBytes.length);
System.arraycopy(bytes, 0, retval, headerBytes.length,
bytes.length - 1);
System.arraycopy(trailerBytes, 0, retval,
headerBytes.length + bytes.length - 1,
trailerBytes.length);
retval[retval.length-1] = 0;
return retval;
}
////////////////////////////////////
//decoder instance data and methods:
private final BufferedInputStream bufferedStream;
private boolean descriptionParsed = false;
private boolean closed = false;
// InputStreamReader uses an 8K buffer. The size is not customizable.
public static final int BYTE_BUFFER_LEN = 8192;
// CharToByteUTF8.getMaxBytesPerChar returns 3, so we should not buffer
// more chars than 3 times the number of bytes we can buffer.
public static final int CHAR_BUFFER_LEN = BYTE_BUFFER_LEN / 3;
private static final String FAILURE_MSG =
"Unable to parse HTML description: ";
private static final String INVALID_MSG =
" invalid";
//HTML header mapping:
private long iHTMLStart,// StartHTML -- shift in array to the first byte after the header
iHTMLEnd, // EndHTML -- shift in array of last byte for HTML syntax analysis
iFragStart,// StartFragment -- shift in array jast after <!--StartFragment-->
iFragEnd, // EndFragment -- shift in array before start <!--EndFragment-->
iSelStart, // StartSelection -- shift in array of the first char in copied selection
iSelEnd; // EndSelection -- shift in array of the last char in copied selection
private String stBaseURL; // SourceURL -- base URL for related referenses
private String stVersion; // Version -- current supported version
//Stream reader markers:
private long iStartOffset,
iEndOffset,
iReadCount;
private EHTMLReadMode readMode;
public HTMLCodec(
InputStream _bytestream,
EHTMLReadMode _readMode) throws IOException
{
bufferedStream = new BufferedInputStream(_bytestream, BYTE_BUFFER_LEN);
readMode = _readMode;
}
public synchronized String getBaseURL() throws IOException
{
if( !descriptionParsed ) {
parseDescription();
}
return stBaseURL;
}
public synchronized String getVersion() throws IOException
{
if( !descriptionParsed ) {
parseDescription();
}
return stVersion;
}
/**
* parseDescription parsing HTML clipboard header as it described in
* comment to convertToHTMLFormat
*/
private void parseDescription() throws IOException
{
stBaseURL = null;
stVersion = null;
// initialization of array offset pointers
// to the same "uninitialized" state.
iHTMLEnd =
iHTMLStart =
iFragEnd =
iFragStart =
iSelEnd =
iSelStart = -1;
bufferedStream.mark(BYTE_BUFFER_LEN);
String astEntries[] = new String[] {
//common
VERSION,
START_HTML,
END_HTML,
START_FRAGMENT,
END_FRAGMENT,
//ver 1.0
START_SELECTION,
END_SELECTION,
SOURCE_URL
};
BufferedReader bufferedReader = new BufferedReader(
new InputStreamReader(
bufferedStream,
ENCODING
),
CHAR_BUFFER_LEN
);
long iHeadSize = 0;
long iCRSize = EOLN.length();
int iEntCount = astEntries.length;
boolean bContinue = true;
for( int iEntry = 0; iEntry < iEntCount; ++iEntry ){
String stLine = bufferedReader.readLine();
if( null==stLine ) {
break;
}
//some header entries are optional, but the order is fixed.
for( ; iEntry < iEntCount; ++iEntry ){
if( !stLine.startsWith(astEntries[iEntry]) ) {
continue;
}
iHeadSize += stLine.length() + iCRSize;
String stValue = stLine.substring(astEntries[iEntry].length()).trim();
if( null!=stValue ) {
try{
switch( iEntry ){
case 0:
stVersion = stValue;
break;
case 1:
iHTMLStart = Integer.parseInt(stValue);
break;
case 2:
iHTMLEnd = Integer.parseInt(stValue);
break;
case 3:
iFragStart = Integer.parseInt(stValue);
break;
case 4:
iFragEnd = Integer.parseInt(stValue);
break;
case 5:
iSelStart = Integer.parseInt(stValue);
break;
case 6:
iSelEnd = Integer.parseInt(stValue);
break;
case 7:
stBaseURL = stValue;
break;
};
} catch ( NumberFormatException e ) {
throw new IOException(FAILURE_MSG + astEntries[iEntry]+ " value " + e + INVALID_MSG);
}
}
break;
}
}
//some entries could absent in HTML header,
//so we have find they by another way.
if( -1 == iHTMLStart )
iHTMLStart = iHeadSize;
if( -1 == iFragStart )
iFragStart = iHTMLStart;
if( -1 == iFragEnd )
iFragEnd = iHTMLEnd;
if( -1 == iSelStart )
iSelStart = iFragStart;
if( -1 == iSelEnd )
iSelEnd = iFragEnd;
//one of possible modes
switch( readMode ){
case HTML_READ_ALL:
iStartOffset = iHTMLStart;
iEndOffset = iHTMLEnd;
break;
case HTML_READ_FRAGMENT:
iStartOffset = iFragStart;
iEndOffset = iFragEnd;
break;
case HTML_READ_SELECTION:
default:
iStartOffset = iSelStart;
iEndOffset = iSelEnd;
break;
}
bufferedStream.reset();
if( -1 == iStartOffset ){
throw new IOException(FAILURE_MSG + "invalid HTML format.");
}
int curOffset = 0;
while (curOffset < iStartOffset){
curOffset += bufferedStream.skip(iStartOffset - curOffset);
}
iReadCount = curOffset;
if( iStartOffset != iReadCount ){
throw new IOException(FAILURE_MSG + "Byte stream ends in description.");
}
descriptionParsed = true;
}
public synchronized int read() throws IOException {
if( closed ){
throw new IOException("Stream closed");
}
if( !descriptionParsed ){
parseDescription();
}
if( -1 != iEndOffset && iReadCount >= iEndOffset ) {
return -1;
}
int retval = bufferedStream.read();
if( retval == -1 ) {
return -1;
}
++iReadCount;
return retval;
}
public synchronized void close() throws IOException {
if( !closed ){
closed = true;
bufferedStream.close();
}
}
}
| |
package br.ufsc.ftsm.method.msm;
import java.util.ArrayDeque;
import java.util.Queue;
import br.ufsc.core.trajectory.SemanticTrajectory;
import br.ufsc.ftsm.base.TrajectorySimilarityCalculator;
import br.ufsc.ftsm.related.MSM;
import br.ufsc.ftsm.related.MSM.MSMSemanticParameter;
public class FTSMQMSM extends TrajectorySimilarityCalculator<SemanticTrajectory> {
private MSMSemanticParameter param;
public FTSMQMSM(MSM.MSMSemanticParameter param){
this.param = param;
}
public double getSimilarity(SemanticTrajectory R, SemanticTrajectory S) {
double[] resultT1;
double[] resultT2;
int n = R.length();
int m = S.length();
SemanticTrajectory T1;
SemanticTrajectory T2;
if (n <= m) {
T1 = R;
T2 = S;
resultT1 = new double[n];
resultT2 = new double[m];
} else {
T1 = S;
T2 = R;
resultT1 = new double[m];
resultT2 = new double[n];
n = T1.length();
m = T2.length();
}
double dist[] = new double[n];
dist[0] = 0;
for (int i = 1; i < n; i++) {
Object iPoint = param.semantic.getData(T1, i);
Object iPreviousPoint = param.semantic.getData(T1, i - 1);
dist[i] = dist[i - 1] + param.semantic.distance(iPoint, iPreviousPoint);
}
Queue<NodeQMSM> queue = new ArrayDeque<>();
Queue<IntervalQMSM> toCompare = new ArrayDeque<>();
toCompare.add(new IntervalQMSM(0, (m - 1)));
NodeQMSM root = new NodeQMSM(0, (n / 2), (n - 1), toCompare);
queue.add(root);
while (!queue.isEmpty()) {
NodeQMSM node = queue.poll();
if (!node.isLeaf) {
// prunning step
double threshold = ((Number) param.threshlod).doubleValue();
double radius = Math.max(dist[node.mid] - dist[node.begin], (dist[node.end] - dist[node.mid]))
+ threshold;
Queue<IntervalQMSM> matchingList = new ArrayDeque<>();
for (IntervalQMSM interval : node.toCompare) {
int k = interval.begin;
int start = -1;
//int end = -1;
while (k <= interval.end) {
Object kPoint = param.semantic.getData(T2, k);
Object midPoint = param.semantic.getData(T1, node.mid);
if (param.semantic.distance(kPoint, midPoint) <= radius) {
if (start == -1) {
start = k;
}
} else {
if (start != -1) {
//end = k - 1;
matchingList.add(new IntervalQMSM(start, k-1));
}
start = -1;
//end = -1;
}
k++;
}
if (start != -1) {
//end = k - 1;
matchingList.add(new IntervalQMSM(start, k-1));
}
}
// splitting step
if (!matchingList.isEmpty()) {
int total = node.end - node.begin;
if (total == 1) {
queue.add(new NodeQMSM(node.begin, node.begin, node.begin, matchingList));
queue.add(new NodeQMSM(node.end, node.end, node.end, matchingList));
} else if (total == 2) {
queue.add(new NodeQMSM(node.begin, node.begin, node.begin, matchingList));
queue.add(new NodeQMSM(node.mid, node.mid, node.mid, matchingList));
queue.add(new NodeQMSM(node.end, node.end, node.end, matchingList));
} else {
int n2 = node.begin + node.end;
int q2 = n2 / 2;
int q1 = (node.begin + q2) / 2;
int q3 = ((q2 + 1) + node.end) / 2;
int mid1 = (node.begin + q1) / 2;
int begin2 = q1 + 1;
int mid2 = (begin2 + q2) / 2;
int begin3 = q2 + 1;
int mid3 = (begin3 + q3) / 2;
int begin4 = q3 + 1;
int mid4 = (begin4 + node.end) / 2;
queue.add(new NodeQMSM(node.begin, mid1, q1, matchingList));
queue.add(new NodeQMSM(begin2, mid2, q2, matchingList));
queue.add(new NodeQMSM(begin3, mid3, q3, matchingList));
queue.add(new NodeQMSM(begin4, mid4, node.end, matchingList));
}
}
} else {
// matching step
for (IntervalQMSM interval : node.toCompare) {
int k = interval.begin;
while (k <= interval.end) {
Object kPoint = param.semantic.getData(T2, k);
Object midPoint = param.semantic.getData(T1, node.mid);
double threshold = ((Number) param.threshlod).doubleValue();
if (param.semantic.distance(kPoint, midPoint) <= threshold) {
resultT1[node.mid] = 1;
resultT2[k] = 1;
}
k++;
}
}
}
}
double parityAB = 0.0;
for (int j = 0; j < resultT1.length; j++) {
parityAB += resultT1[j];
}
double parityBA = 0.0;
for (int j = 0; j < resultT2.length; j++) {
parityBA += resultT2[j];
}
double similarity = (parityAB + parityBA) / (n + m);
return similarity;
}
@Override
public String parametrization() {
return param.toString();
}
}
class IntervalQMSM {
int begin;
int end;
public IntervalQMSM(int begin, int end) {
this.begin = begin;
this.end = end;
}
}
class NodeQMSM {
int begin;
int end;
int mid;
boolean isLeaf;
Queue<IntervalQMSM> toCompare;
public NodeQMSM(int begin, int mid, int end, Queue<IntervalQMSM> toCompare) {
this.mid = mid;
this.begin = begin;
this.end = end;
this.toCompare = toCompare;
isLeaf = end - begin == 0 ? true : false;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.core;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import jakarta.servlet.ServletException;
import jakarta.servlet.ServletRequestEvent;
import jakarta.servlet.ServletRequestListener;
import jakarta.servlet.http.HttpServlet;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import org.junit.Assert;
import org.junit.Test;
import org.apache.catalina.Context;
import org.apache.catalina.connector.Response;
import org.apache.catalina.startup.Tomcat;
import org.apache.catalina.startup.TomcatBaseTest;
import org.apache.tomcat.util.buf.ByteChunk;
import org.apache.tomcat.util.descriptor.web.ErrorPage;
public class TestStandardHostValve extends TomcatBaseTest {
@Test
public void testErrorPageHandling() throws Exception {
// Set up a container
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
// Add the error page
Tomcat.addServlet(ctx, "error", new ErrorServlet());
ctx.addServletMappingDecoded("/error", "error");
// Add the error handling page
Tomcat.addServlet(ctx, "report", new ReportServlet());
ctx.addServletMappingDecoded("/report/*", "report");
// And the handling for 500 responses
ErrorPage errorPage500 = new ErrorPage();
errorPage500.setErrorCode(Response.SC_INTERNAL_SERVER_ERROR);
errorPage500.setLocation("/report/500");
ctx.addErrorPage(errorPage500);
// And the default error handling
ErrorPage errorPageDefault = new ErrorPage();
errorPageDefault.setLocation("/report/default");
ctx.addErrorPage(errorPageDefault);
tomcat.start();
doTestErrorPageHandling(500, "/500");
doTestErrorPageHandling(501, "/default");
}
@Test(expected=IllegalArgumentException.class)
public void testInvalidErrorPage() throws Exception {
// Set up a container
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
// Add a broken error page configuration
ErrorPage errorPage500 = new ErrorPage();
errorPage500.setErrorCode("java.lang.Exception");
errorPage500.setLocation("/report/500");
ctx.addErrorPage(errorPage500);
}
@Test
public void testSRLAfterError() throws Exception {
// Set up a container
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
// Add the error page
Tomcat.addServlet(ctx, "error", new ErrorServlet());
ctx.addServletMappingDecoded("/error", "error");
final List<String> result = new ArrayList<>();
// Add the request listener
ServletRequestListener servletRequestListener = new ServletRequestListener() {
@Override
public void requestDestroyed(ServletRequestEvent sre) {
result.add("Visit requestDestroyed");
}
@Override
public void requestInitialized(ServletRequestEvent sre) {
result.add("Visit requestInitialized");
}
};
((StandardContext) ctx).addApplicationEventListener(servletRequestListener);
tomcat.start();
// Request a page that triggers an error
ByteChunk bc = new ByteChunk();
int rc = getUrl("http://localhost:" + getPort() + "/error?errorCode=400", bc, null);
Assert.assertEquals(400, rc);
Assert.assertTrue(result.contains("Visit requestInitialized"));
Assert.assertTrue(result.contains("Visit requestDestroyed"));
}
private void doTestErrorPageHandling(int error, String report)
throws Exception {
// Request a page that triggers an error
ByteChunk bc = new ByteChunk();
int rc = getUrl("http://localhost:" + getPort() +
"/error?errorCode=" + error, bc, null);
Assert.assertEquals(error, rc);
Assert.assertEquals(report, bc.toString());
}
@Test
public void testIncompleteResponse() throws Exception {
// Set up a container
Tomcat tomcat = getTomcatInstance();
// No file system docBase required
Context ctx = tomcat.addContext("", null);
// Add the error page
Tomcat.addServlet(ctx, "error", new ExceptionServlet());
ctx.addServletMappingDecoded("/error", "error");
// Add the error handling page
Tomcat.addServlet(ctx, "report", new ReportServlet());
ctx.addServletMappingDecoded("/report/*", "report");
// And the handling for 500 responses
ErrorPage errorPage500 = new ErrorPage();
errorPage500.setErrorCode(Response.SC_INTERNAL_SERVER_ERROR);
errorPage500.setLocation("/report/500");
ctx.addErrorPage(errorPage500);
// And the default error handling
ErrorPage errorPageDefault = new ErrorPage();
errorPageDefault.setLocation("/report/default");
ctx.addErrorPage(errorPageDefault);
tomcat.start();
// Request a page that triggers an error
ByteChunk bc = new ByteChunk();
Throwable t = null;
try {
getUrl("http://localhost:" + getPort() + "/error", bc, null);
System.out.println(bc.toString());
} catch (IOException ioe) {
t = ioe;
}
Assert.assertNotNull(t);
}
private static class ErrorServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
int error = Integer.parseInt(req.getParameter("errorCode"));
resp.sendError(error);
}
}
private static class ExceptionServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
resp.flushBuffer();
throw new IOException();
}
}
private static class ReportServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
String pathInfo = req.getPathInfo();
resp.setContentType("text/plain");
resp.getWriter().print(pathInfo);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.wasp.meta;
import com.alibaba.wasp.DeserializationException;
import com.alibaba.wasp.EntityGroupInfo;
import com.alibaba.wasp.EntityGroupLocation;
import com.alibaba.wasp.FConstants;
import com.alibaba.wasp.MetaException;
import com.alibaba.wasp.ServerName;
import com.alibaba.wasp.TableNotFoundException;
import com.alibaba.wasp.storage.StorageActionManager;
import com.alibaba.wasp.storage.StorageTableNotFoundException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm;
import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern;
/**
* implement FMetaServices, we use many scan to get TableInfo and EGinfo Without
* Retry
*
*/
public class FMetaServicesImplWithoutRetry extends FMetaServices {
public static final Log LOG = LogFactory
.getLog(FMetaServicesImplWithoutRetry.class);
protected String metaTable;
StorageActionManager hbaseActionManager;
public FMetaServicesImplWithoutRetry() throws MetaException {
}
public FMetaServicesImplWithoutRetry(Configuration conf) throws MetaException {
this.setConf(conf);
this.connect();
}
/**
* Use HTable to connect to the meta table, will use the HTable to
* create\drop\alter\get Table and so on.
*/
@Override
public boolean connect() {
try {
Configuration conf = getConf();
metaTable = conf.get(FConstants.METASTORE_TABLE,
FConstants.DEFAULT_METASTORE_TABLE);
hbaseActionManager = new StorageActionManager(conf);
} catch (Exception e) {
return false;
}
return true;
}
@Override
public void close() {
hbaseActionManager.close();
}
private HTableInterface getHTable() throws MetaException {
try {
return hbaseActionManager.getTable(metaTable);
} catch (StorageTableNotFoundException e) {
throw new MetaException(e);
}
}
private void closeHTable(HTableInterface htable) throws MetaException {
if (htable == null) {
return;
}
try {
htable.close();
} catch (IOException e) {
throw new MetaException(e);
}
}
@Override
public HTableDescriptor getStorageTableDesc(FTable tbl) {
String htablename = StorageTableNameBuilder.buildEntityTableName(tbl
.getTableName());
HTableDescriptor desc = new HTableDescriptor(htablename);
Set<String> set = RowBuilder.buildFamilyName(tbl);
Iterator<String> iterator = set.iterator();
while (iterator.hasNext()) {
String fami = iterator.next();
HColumnDescriptor hcolumn = new HColumnDescriptor(fami);
hcolumn.setBloomFilterType(BloomType.ROW);
hcolumn.setCompressionType(Algorithm.GZ);
desc.addFamily(hcolumn);
}
return desc;
}
@Override
public HTableDescriptor getStorageTableDesc(String storageTable)
throws MetaException {
try {
return hbaseActionManager.getStorageTableDesc(storageTable);
} catch (IOException e) {
LOG.error("getStorageTableDesc ", e);
throw new MetaException(e);
}
}
@Override
public HTableDescriptor[] listStorageTables() throws MetaException {
try {
return hbaseActionManager.listStorageTables();
} catch (IOException e) {
LOG.error("listStorageTables ", e);
throw new MetaException(e);
}
}
// create table in HBase
@Override
public void createStorageTable(HTableDescriptor desc) throws MetaException {
try {
hbaseActionManager.createStorageTable(desc);
} catch (IOException e) {
LOG.error("listStorageTables ", e);
throw new MetaException(e);
}
}
// delete tables in HBase
@Override
public void deleteStorageTables(List<HTableDescriptor> deleteEntityTables)
throws MetaException {
for (HTableDescriptor htable : deleteEntityTables) {
try {
hbaseActionManager.deleteStorageTable(htable.getName());
} catch (IOException e) {
LOG.error("deleteStorageTables " + htable.getNameAsString(), e);
}
}
}
// delete table in HBase
@Override
public void deleteStorageTable(String deleteStorageTable)
throws MetaException {
try {
hbaseActionManager.deleteStorageTable(Bytes.toBytes(deleteStorageTable));
} catch (IOException e) {
LOG.error("deleteStorageTables " + deleteStorageTable, e);
}
}
// Check table exists in HBase
@Override
public boolean storageTableExists(String deleteStorageTable)
throws MetaException {
try {
return hbaseActionManager.storageTableExists(deleteStorageTable);
} catch (IOException e) {
LOG.error("storageTableExists ", e);
throw new MetaException(e);
}
}
@Override
public void createTable(FTable tbl) throws MetaException {
byte[] rowKey = Bytes.toBytes(FConstants.TABLEROW_PREFIX_STR
+ tbl.getTableName());
Put put = new Put(rowKey);
put.add(FConstants.CATALOG_FAMILY, FConstants.TABLEINFO, tbl.toByte());
boolean a = checkAndPut(FConstants.CATALOG_FAMILY, FConstants.TABLEINFO,
null, put);
if (!a) {
throw new MetaException(tbl.getTableName() + " is already exists.");
}
}
@Override
public void dropTable(String tableName) throws MetaException {
byte[] rowKey = Bytes.toBytes(FConstants.TABLEROW_PREFIX_STR + tableName);
Get get = new Get(rowKey);
if (!exists(get)) {
throw new MetaException(tableName + " is not exists.");
}
Delete delete = new Delete(rowKey);
delete(delete);
}
@Override
public void alterTable(String tableName, FTable newFTable)
throws MetaException {
byte[] rowKey = Bytes.toBytes(FConstants.TABLEROW_PREFIX_STR + tableName);
Get get = new Get(rowKey);
if (!exists(get)) {
throw new MetaException(tableName + " is not exists.");
}
if (tableName.equals(newFTable.getTableName())) {
Put put = new Put(rowKey);
put.add(FConstants.CATALOG_FAMILY, FConstants.TABLEINFO,
newFTable.toByte());
put(put);
} else {
// rename (1) tableName exists (2)newFTable.getTableName() is not exists
rowKey = Bytes.toBytes(FConstants.TABLEROW_PREFIX_STR
+ newFTable.getTableName());
get = new Get(rowKey);
if (exists(get)) {
throw new MetaException(tableName + " is already exists.");
}
// put into a new row
Put put = new Put(rowKey);
put.add(FConstants.CATALOG_FAMILY, FConstants.TABLEINFO,
newFTable.toByte());
put(put);
// delete the former row
rowKey = Bytes.toBytes(FConstants.TABLEROW_PREFIX_STR + tableName);
Delete delete = new Delete(rowKey);
delete(delete);
}
}
@Override
public FTable getTable(String tableName) throws MetaException {
byte[] rowKey = Bytes.toBytes(FConstants.TABLEROW_PREFIX_STR + tableName);
return getTableByRow(rowKey);
}
public FTable getTable(byte[] tableName) throws MetaException {
byte[] rowKey = Bytes.add(FConstants.TABLEROW_PREFIX, tableName);
return getTableByRow(rowKey);
}
public FTable getTableByRow(byte[] rowKey) throws MetaException {
Get get = new Get(rowKey);
Result rs = get(get);
byte[] value = rs.getValue(FConstants.CATALOG_FAMILY, FConstants.TABLEINFO);
FTable ftable = FTable.convert(value);
if (ftable == null) {
return null;
}
LinkedHashMap<String, Index> indexs = parseIndex(rs);
ftable.setIndex(indexs);
return ftable;
}
@Override
public List<FTable> getTables(String regex) throws MetaException {
List<FTable> allTables = getAllTables();
Pattern pattern = Pattern.compile(regex);
List<FTable> matched = new LinkedList<FTable>();
for (FTable table : allTables) {
if (pattern.matcher(table.getTableName()).matches()) {
matched.add(table);
}
}
return matched;
}
@Override
public List<FTable> getAllTables() throws MetaException {
List<FTable> tables = new LinkedList<FTable>();
Scan scan = new Scan(FConstants.TABLEROW_PREFIX);
scan.addFamily(FConstants.CATALOG_FAMILY);
int rows = getConf().getInt(HConstants.HBASE_META_SCANNER_CACHING,
HConstants.DEFAULT_HBASE_META_SCANNER_CACHING);
scan.setCaching(rows);
FilterList allFilters = new FilterList();
allFilters.addFilter(new PrefixFilter(FConstants.TABLEROW_PREFIX));
scan.setFilter(allFilters);
HTableInterface htable = null;
try {
htable = getHTable();
ResultScanner scanner = htable.getScanner(scan);
for (Result r = scanner.next(); r != null; r = scanner.next()) {
byte[] value = r.getValue(FConstants.CATALOG_FAMILY,
FConstants.TABLEINFO);
FTable ftable = FTable.convert(value);
if (ftable == null) {
continue;
}
LinkedHashMap<String, Index> indexs = parseIndex(r);
ftable.setIndex(indexs);
tables.add(ftable);
}
} catch (IOException e) {
throw new MetaException(e);
} finally {
closeHTable(htable);
}
return tables;
}
@Override
public List<FTable> getChildTables(String tableName) throws MetaException {
List<FTable> allTables = getAllTables();
List<FTable> childTables = new LinkedList<FTable>();
for (FTable t : allTables) {
if (t.getParentName() != null && t.getParentName().equals(tableName)) {
childTables.add(t);
}
}
return childTables;
}
@Override
public boolean tableExists(String tableName) throws MetaException {
byte[] rowKey = Bytes.toBytes(FConstants.TABLEROW_PREFIX_STR + tableName);
Get get = new Get(rowKey);
return exists(get);
}
// ///////////////////////////////////////////////////
// FTable operation
// ///////////////////////////////////////////////////
@Override
public void addIndex(String tableName, Index index) throws MetaException {
byte[] rowKey = Bytes.toBytes(FConstants.TABLEROW_PREFIX_STR + tableName);
Get get = new Get(rowKey);
// check if the table exists
if (!exists(get)) {
throw new MetaException(tableName + " is not exists.");
}
Put put = new Put(rowKey);
byte[] cq = Bytes.toBytes(FConstants.INDEXQUALIFIER_PREFIX_STR
+ index.getIndexName());
put.add(FConstants.CATALOG_FAMILY, cq, index.toByte());
put(put);
}
@Override
public void deleteIndex(String tableName, Index index) throws MetaException {
byte[] rowKey = Bytes.toBytes(FConstants.TABLEROW_PREFIX_STR + tableName);
Get get = new Get(rowKey);
// check if the table exists
if (!exists(get)) {
throw new MetaException(tableName + " is not exists.");
}
Delete delete = new Delete(rowKey);
byte[] cq = Bytes.toBytes(FConstants.INDEXQUALIFIER_PREFIX_STR
+ index.getIndexName());
delete.deleteColumns(FConstants.CATALOG_FAMILY, cq);
delete(delete);
}
@Override
public LinkedHashMap<String, Index> getAllIndex(String tableName)
throws MetaException {
byte[] rowKey = Bytes.toBytes(FConstants.TABLEROW_PREFIX_STR + tableName);
Get get = new Get(rowKey);
get.addFamily(FConstants.CATALOG_FAMILY);
// check if the table exists
if (!exists(get)) {
throw new MetaException(tableName + " is not exists.");
}
Result rs = get(get);
return parseIndex(rs);
}
public LinkedHashMap<String, Index> parseIndex(Result rs) {
LinkedHashMap<String, Index> indexs = new LinkedHashMap<String, Index>();
NavigableMap<byte[], NavigableMap<byte[], byte[]>> familyMap = rs
.getNoVersionMap();
if (familyMap == null) {
return indexs;
}
NavigableMap<byte[], byte[]> kvs = familyMap.get(FConstants.CATALOG_FAMILY);
for (Map.Entry<byte[], byte[]> kv : kvs.entrySet()) {
byte[] cq = kv.getKey();
byte[] value = kv.getValue();
if (Bytes.startsWith(cq, FConstants.INDEXQUALIFIER_PREFIX)) {
Index index = Index.convert(value);
indexs.put(index.getIndexName(), index);
}
}
return indexs;
}
@Override
public Index getIndex(String tableName, String indexName)
throws MetaException {
byte[] rowKey = Bytes.toBytes(FConstants.TABLEROW_PREFIX_STR + tableName);
Get get = new Get(rowKey);
// check if the table exists
if (!exists(get)) {
throw new MetaException(tableName + " is not exists.");
}
byte[] cq = Bytes.toBytes(FConstants.INDEXQUALIFIER_PREFIX_STR + indexName);
get.addColumn(FConstants.CATALOG_FAMILY, cq);
Result rs = get(get);
byte[] value = rs.getValue(FConstants.CATALOG_FAMILY, cq);
return Index.convert(value);
}
public static byte[] getRowKey(EntityGroupInfo entityGroupInfo) {
return entityGroupInfo.getEntityGroupName();
}
// ///////////////////////////////////////////////////
// EntityGroup operation
// ///////////////////////////////////////////////////
@Override
public void addEntityGroup(EntityGroupInfo entityGroupInfo)
throws MetaException {
byte[] rowKey = getRowKey(entityGroupInfo);
Put put = new Put(rowKey);
put.add(FConstants.CATALOG_FAMILY, FConstants.EGINFO,
entityGroupInfo.toByte());
put(put);
}
@Override
public void addEntityGroup(List<EntityGroupInfo> entityGroupInfos)
throws MetaException {
List<Put> allPut = new LinkedList<Put>();
for (EntityGroupInfo egi : entityGroupInfos) {
byte[] rowKey = getRowKey(egi);
LOG.debug(" Put rowKey : " + Bytes.toString(rowKey));
Put put = new Put(rowKey);
put.add(FConstants.CATALOG_FAMILY, FConstants.EGINFO, egi.toByte());
allPut.add(put);
}
put(allPut);
}
@Override
public boolean exists(EntityGroupInfo entityGroupInfo) throws MetaException {
byte[] rowKey = getRowKey(entityGroupInfo);
Get get = new Get(rowKey);
return exists(get);
}
@Override
public void deleteEntityGroup(EntityGroupInfo entityGroupInfo)
throws MetaException {
byte[] rowKey = getRowKey(entityGroupInfo);
Delete delete = new Delete(rowKey);
delete(delete);
}
@Override
public void deleteEntityGroups(String tableName) throws MetaException {
// parent and child share EntityGroup, we just delete EntityGroup for root
// table
FTable table = getTable(tableName);
if (table.isChildTable()) {
LOG.info("Delete a child's EntityGroups, do nothing " + tableName);
return;
}
List<EntityGroupInfo> entityGroupInfos = getTableEntityGroups(tableName);
List<Delete> allDelete = new LinkedList<Delete>();
for (EntityGroupInfo egi : entityGroupInfos) {
byte[] rowKey = getRowKey(egi);
Delete delete = new Delete(rowKey);
allDelete.add(delete);
}
if (allDelete != null && allDelete.size() > 0) {
delete(allDelete);
}
}
public FTable getRootTable(String tableName) throws MetaException {
return getRootTable(Bytes.toBytes(tableName));
}
public FTable getRootTable(byte[] tableName) throws MetaException {
FTable table = getTable(tableName);
if (table.isRootTable()) {
return table;
} else {
return getTable(table.getParentName());
}
}
public List<EntityGroupInfo> getTableEntityGroups(String tableName,
boolean root) throws MetaException {
if (root) {
return getTableEntityGroups(Bytes.toBytes(tableName));
} else {
FTable table = getTable(tableName);
return getTableEntityGroups(Bytes.toBytes(table.getParentName()));
}
}
@Override
public List<EntityGroupInfo> getTableEntityGroups(String tableName)
throws MetaException {
FTable table = getTable(tableName);
if (table.isChildTable()) {
return getTableEntityGroups(Bytes.toBytes(table.getParentName()));
} else if (table.isRootTable()) {
return getTableEntityGroups(Bytes.toBytes(tableName));
}
return null;
}
@Override
public List<EntityGroupInfo> getTableEntityGroups(final byte[] tableByte)
throws MetaException {
final List<EntityGroupInfo> entityGroupInfos = new LinkedList<EntityGroupInfo>();
final byte[] startKey = tableByte;
FilterList allFilters = new FilterList();
allFilters.addFilter(new PrefixFilter(tableByte));
FMetaVisitor visitor = new FMetaVisitor() {
@Override
public boolean visit(Result r) throws IOException {
if (r == null || r.isEmpty()) {
return true;
}
byte[] value = r.getValue(FConstants.CATALOG_FAMILY, FConstants.EGINFO);
EntityGroupInfo eginfo = EntityGroupInfo.parseFromOrNull(value);
if (eginfo == null) {
return true;
}
if (!Bytes.equals(eginfo.getTableName(), tableByte)) {
// this is another table, we can exit search.
return false;
}
entityGroupInfos.add(eginfo);
// Returning true means "keep scanning"
return true;
}
};
fullScan(visitor, startKey, null, allFilters);
return entityGroupInfos;
}
@Override
public void modifyEntityGroupInfo(EntityGroupInfo entityGroupInfo)
throws MetaException {
addEntityGroup(entityGroupInfo);
}
@Override
public EntityGroupInfo getEntityGroupInfo(EntityGroupInfo entityGroupInfo)
throws MetaException {
byte[] rowKey = getRowKey(entityGroupInfo);
Get get = new Get(rowKey);
get.addColumn(FConstants.CATALOG_FAMILY, FConstants.EGINFO);
Result rs = get(get);
byte[] value = rs.getValue(FConstants.CATALOG_FAMILY, FConstants.EGINFO);
return EntityGroupInfo.parseFromOrNull(value);
}
@Override
public ServerName getEntityGroupLocation(EntityGroupInfo entityGroupInfo)
throws MetaException {
byte[] rowKey = getRowKey(entityGroupInfo);
Get get = new Get(rowKey);
get.addColumn(FConstants.CATALOG_FAMILY, FConstants.EGLOCATION);
Result rs = get(get);
byte[] value = rs
.getValue(FConstants.CATALOG_FAMILY, FConstants.EGLOCATION);
try {
return ServerName.convert(value);
} catch (DeserializationException de) {
throw new MetaException(de);
}
}
@Override
public Pair<EntityGroupInfo, ServerName> getEntityGroupAndLocation(
byte[] entityGroupName) throws MetaException {
Get get = new Get(entityGroupName);
get.addColumn(FConstants.CATALOG_FAMILY, FConstants.EGINFO);
get.addColumn(FConstants.CATALOG_FAMILY, FConstants.EGLOCATION);
Result rs = get(get);
byte[] infoValue = rs
.getValue(FConstants.CATALOG_FAMILY, FConstants.EGINFO);
byte[] locationValue = rs.getValue(FConstants.CATALOG_FAMILY,
FConstants.EGLOCATION);
try {
return new Pair<EntityGroupInfo, ServerName>(
EntityGroupInfo.parseFromOrNull(infoValue),
ServerName.convert(locationValue));
} catch (DeserializationException de) {
throw new MetaException(de);
}
}
@Override
public Map<EntityGroupInfo, ServerName> fullScan(
final Set<String> disabledTables,
final boolean excludeOfflinedSplitParents) throws MetaException {
final Map<EntityGroupInfo, ServerName> entityGroups = new TreeMap<EntityGroupInfo, ServerName>();
FMetaVisitor v = new FMetaVisitor() {
@Override
public boolean visit(Result r) throws IOException {
try {
if (r == null || r.isEmpty()) {
return true;
}
byte[] infoValue = r.getValue(FConstants.CATALOG_FAMILY,
FConstants.EGINFO);
byte[] locationValue = r.getValue(FConstants.CATALOG_FAMILY,
FConstants.EGLOCATION);
Pair<EntityGroupInfo, ServerName> eginfoSN = new Pair<EntityGroupInfo, ServerName>(
EntityGroupInfo.parseFromOrNull(infoValue),
ServerName.convert(locationValue));
EntityGroupInfo eginfo = eginfoSN.getFirst();
if (eginfo == null) {
return true;
}
if (eginfo.getTableNameAsString() == null) {
return true;
}
if (disabledTables.contains(eginfo.getTableNameAsString())) {
return true;
}
// Are we to include split parents in the list?
if (excludeOfflinedSplitParents && eginfo.isSplitParent()) {
return true;
}
entityGroups.put(eginfo, eginfoSN.getSecond());
return true;
} catch (DeserializationException de) {
LOG.warn("Failed parse " + r, de);
return true;
}
}
};
fullScan(v);
return entityGroups;
}
@Override
public List<EntityGroupInfo> getAllEntityGroupInfos() throws MetaException {
final List<EntityGroupInfo> entityGroups = new ArrayList<EntityGroupInfo>();
FMetaVisitor v = new FMetaVisitor() {
@Override
public boolean visit(Result r) throws IOException {
if (r == null || r.isEmpty()) {
return true;
}
EntityGroupInfo eginfo = EntityGroupInfo.getEntityGroupInfo(r);
if (eginfo == null) {
return true;
}
if (LOG.isDebugEnabled()) {
LOG.debug("EntityGroupInfo : " + eginfo.toString());
}
entityGroups.add(eginfo);
return true;
}
};
fullScan(v);
return entityGroups;
}
@Override
public List<Result> fullScan() throws MetaException {
CollectAllVisitor v = new CollectAllVisitor();
fullScan(v);
return v.getResults();
}
@Override
public Map<EntityGroupInfo, Result> getOfflineSplitParents()
throws MetaException {
final Map<EntityGroupInfo, Result> offlineSplitParents = new HashMap<EntityGroupInfo, Result>();
// This visitor collects offline split parents in the .FMETA. table
FMetaVisitor visitor = new FMetaVisitor() {
@Override
public boolean visit(Result r) throws IOException {
if (r == null || r.isEmpty()) {
return true;
}
EntityGroupInfo info = EntityGroupInfo.getEntityGroupInfo(r);
if (info == null) {
return true; // Keep scanning
}
if (info.isOffline() && info.isSplit()) {
offlineSplitParents.put(info, r);
}
// Returning true means "keep scanning"
return true;
}
};
// Run full scan of .FMETA. catalog table passing in our custom visitor
fullScan(visitor);
return offlineSplitParents;
}
@Override
public NavigableMap<EntityGroupInfo, Result> getServerUserEntityGroups(
final ServerName serverName) throws MetaException {
final NavigableMap<EntityGroupInfo, Result> egis = new TreeMap<EntityGroupInfo, Result>();
// Fill the above egis map with entries from .FMETA. that have the passed
// servername.
CollectingVisitor<Result> v = new CollectingVisitor<Result>() {
@Override
void add(Result r) {
if (r == null || r.isEmpty())
return;
ServerName sn = ServerName.getServerName(r);
if (sn != null && sn.equals(serverName))
this.results.add(r);
}
};
fullScan(v);
List<Result> results = v.getResults();
if (results != null && !results.isEmpty()) {
// Convert results to Map keyed by HRI
for (Result r : results) {
Pair<EntityGroupInfo, ServerName> p = EntityGroupInfo
.getEntityGroupInfoAndServerName(r);
if (p != null && p.getFirst() != null)
egis.put(p.getFirst(), r);
}
}
return egis;
}
@Override
public void fullScan(final FMetaVisitor visitor) throws MetaException {
fullScan(visitor, null, null);
}
@Override
public void fullScan(final FMetaVisitor visitor, final byte[] startrow,
final byte[] endrow) throws MetaException {
fullScan(visitor, startrow, endrow, null);
}
@Override
public void fullScan(final FMetaVisitor visitor, final byte[] startrow,
final byte[] endrow, FilterList allFilters) throws MetaException {
Scan scan = new Scan();
if (startrow != null) {
scan.setStartRow(startrow);
}
if (endrow != null) {
scan.setStopRow(endrow);
}
int caching = getConf().getInt(HConstants.HBASE_META_SCANNER_CACHING,
HConstants.DEFAULT_HBASE_META_SCANNER_CACHING);
scan.setCaching(caching);
if (allFilters != null) {
scan.setFilter(allFilters);
}
scan.addFamily(FConstants.CATALOG_FAMILY);
HTableInterface metaTable = getHTable();
try {
ResultScanner scanner = metaTable.getScanner(scan);
try {
Result data;
while ((data = scanner.next()) != null) {
if (data.isEmpty()) {
continue;
}
if (Bytes.startsWith(data.getRow(), FConstants.TABLEROW_PREFIX)) {
continue;
}
// Break if visit returns false.
if (!visitor.visit(data))
break;
}
} finally {
scanner.close();
metaTable.close();
}
} catch (IOException e) {
throw new MetaException(e);
}
return;
}
@Override
public void updateEntityGroupLocation(EntityGroupInfo entityGroupInfo,
ServerName sn) throws MetaException {
byte[] rowKey = getRowKey(entityGroupInfo);
Put put = new Put(rowKey);
addLocation(put, sn);
put(put);
}
@Override
@SuppressWarnings("deprecation")
public EntityGroupLocation scanEntityGroupLocation(final byte[] tableName,
final byte[] row) throws MetaException {
FTable root = getRootTable(tableName);
final byte[] rootTableName = Bytes.toBytes(root.getTableName());
byte[] metaKey = EntityGroupInfo.createEntityGroupName(rootTableName, row,
FConstants.NINES, false);
try {
Result r = this.getHTable().getRowOrBefore(metaKey,
FConstants.CATALOG_FAMILY);
String rowString = Bytes.toString(r.getRow());
if (!rowString.startsWith(Bytes.toString(rootTableName))) {
final List<EntityGroupLocation> results = new ArrayList<EntityGroupLocation>();
FMetaVisitor visitor = new FMetaVisitor() {
@Override
public boolean visit(Result r) throws IOException {
if (r == null || r.isEmpty()) {
return true;
}
EntityGroupInfo info = EntityGroupInfo.getEntityGroupInfo(r);
if (info == null) {
return true; // Keep scanning
}
if (info.isOffline()) {
return true; // Keep scanning
}
if (info.isSplit()) {
return true; // Keep scanning
}
if (LOG.isDebugEnabled()) {
LOG.debug("EntityGroupInfo : " + info.toString());
}
if (Bytes.equals(info.getTableName(), rootTableName)) {
// find it, so end search
ServerName sn = ServerName.getServerName(r);
EntityGroupLocation egLoc = new EntityGroupLocation(info,
sn.getHostname(), sn.getPort());
results.add(egLoc);
return false;
}
return true;
}
};
// Run full scan of _FMETA_ catalog table passing in our custom visitor
fullScan(visitor, rootTableName, null);
return results.size() == 0 ? null : results.get(0);
} else {
EntityGroupInfo info = EntityGroupInfo.getEntityGroupInfo(r);
if (info == null) {
throw new TableNotFoundException(Bytes.toString(tableName));
}
ServerName sn = ServerName.getServerName(r);
EntityGroupLocation egLoc = new EntityGroupLocation(info,
sn.getHostname(), sn.getPort());
return egLoc;
}
} catch (IOException e) {
throw new MetaException(e);
}
}
@Override
public List<EntityGroupLocation> getEntityGroupLocations(
final byte[] tableName) throws MetaException {
final List<EntityGroupLocation> egLocations = new LinkedList<EntityGroupLocation>();
final byte[] startKey = tableName;
FilterList allFilters = new FilterList();
allFilters.addFilter(new PrefixFilter(tableName));
FMetaVisitor visitor = new FMetaVisitor() {
@Override
public boolean visit(Result r) throws IOException {
if (r == null || r.isEmpty()) {
return true;
}
byte[] value = r.getValue(FConstants.CATALOG_FAMILY, FConstants.EGINFO);
EntityGroupInfo eginfo = EntityGroupInfo.parseFromOrNull(value);
if (eginfo == null) {
return true;
}
if (!Bytes.equals(eginfo.getTableName(), tableName)) {
// this is another table, we can exit search.
return false;
}
ServerName sn = ServerName.getServerName(r);
EntityGroupLocation egLoc = new EntityGroupLocation(eginfo,
sn.getHostname(), sn.getPort());
egLocations.add(egLoc);
// Returning true means "keep scanning"
return true;
}
};
fullScan(visitor, startKey, null, allFilters);
return egLocations;
}
/**
* @param tableName
* @return Return list of EntityGroupInfos and server addresses.
* @throws java.io.IOException
* @throws InterruptedException
*/
@Override
public List<Pair<EntityGroupInfo, ServerName>> getTableEntityGroupsAndLocations(
final byte[] tableName, final boolean excludeOfflinedSplitParents)
throws MetaException {
byte[] startrow = tableName;
FilterList allFilters = new FilterList();
allFilters.addFilter(new PrefixFilter(tableName));
final List<Pair<EntityGroupInfo, ServerName>> entityGroupInfos = new ArrayList<Pair<EntityGroupInfo, ServerName>>();
FMetaVisitor visitor = new FMetaVisitor() {
@Override
public boolean visit(Result r) throws IOException {
byte[] value = r.getValue(FConstants.CATALOG_FAMILY, FConstants.EGINFO);
EntityGroupInfo egi = EntityGroupInfo.parseFromOrNull(value);
if (egi == null) {
LOG.warn("No serialized EntityGroupInfo in " + r);
return true; // Keep scanning
}
if (excludeOfflinedSplitParents && egi.isSplitParent()) {
return true; // Keep scanning
}
if (!Bytes.equals(egi.getTableName(), tableName)) {
// this is another table, we can exit search.
return false;
}
ServerName sn = ServerName.getServerName(r);
entityGroupInfos.add(new Pair<EntityGroupInfo, ServerName>(egi, sn));
// Returning true means "keep scanning"
return true;
}
};
// Run full scan of .FMETA. catalog table passing in our custom visitor
fullScan(visitor, startrow, null, allFilters);
return entityGroupInfos;
}
public void offlineParentInMeta(EntityGroupInfo parent,
final EntityGroupInfo a, final EntityGroupInfo b) throws MetaException {
EntityGroupInfo copyOfParent = new EntityGroupInfo(parent);
copyOfParent.setOffline(true);
copyOfParent.setSplit(true);
addEntityGroupToMeta(copyOfParent, a, b);
LOG.info("Offlined parent entityGroup "
+ parent.getEntityGroupNameAsString() + " in META");
}
/**
* Adds a (single) META row for the specified new entityGroup and its
* daughters. Note that this does not add its daughter's as different rows,
* but adds information about the daughters in the same row as the parent. Use
* {@link #offlineParentInMeta(com.alibaba.wasp.EntityGroupInfo, com.alibaba.wasp.EntityGroupInfo, com.alibaba.wasp.EntityGroupInfo)}
* and {@link #addDaughter(com.alibaba.wasp.EntityGroupInfo, com.alibaba.wasp.ServerName)} if you want to do
* that.
*
* @param entityGroupInfo
* EntityGroupInfo information
* @param splitA
* first split daughter of the parent EntityGroupInfo
* @param splitB
* second split daughter of the parent EntityGroupInfo
* @throws java.io.IOException
* if problem connecting or updating meta
*/
public void addEntityGroupToMeta(EntityGroupInfo entityGroupInfo,
EntityGroupInfo splitA, EntityGroupInfo splitB) throws MetaException {
Put put = makePutFromEntityGroupInfo(entityGroupInfo);
addDaughtersToPut(put, splitA, splitB);
put(put);
if (LOG.isDebugEnabled()) {
LOG.debug("Added entityGroup "
+ entityGroupInfo.getEntityGroupNameAsString() + " to META");
}
}
/**
* @param entityGroupInfo
* @param sn
* @throws com.alibaba.wasp.MetaException
*/
@Override
public void addDaughter(final EntityGroupInfo entityGroupInfo,
final ServerName sn) throws MetaException {
Put put = new Put(getRowKey(entityGroupInfo));
addEntityGroupInfo(put, entityGroupInfo);
if (sn != null)
addLocation(put, sn);
put(put);
LOG.info("Added daughter "
+ entityGroupInfo.getEntityGroupNameAsString()
+ (sn == null ? ", entityGroupLocation=null" : ", entityGroupLocation="
+ sn.toString()));
}
/**
* Adds split daughters to the Put
*/
public Put addDaughtersToPut(Put put, EntityGroupInfo splitA,
EntityGroupInfo splitB) {
if (splitA != null) {
put.add(FConstants.CATALOG_FAMILY, FConstants.SPLITA_QUALIFIER,
splitA.toByte());
}
if (splitB != null) {
put.add(FConstants.CATALOG_FAMILY, FConstants.SPLITB_QUALIFIER,
splitB.toByte());
}
return put;
}
/**
* Deletes daughters references in offlined split parent.
*
* @param parent
* Parent row we're to remove daughter reference from
* @throws com.alibaba.wasp.MetaException
*/
public void deleteDaughtersReferencesInParent(final EntityGroupInfo parent)
throws MetaException {
Delete delete = new Delete(getRowKey(parent));
delete
.deleteColumns(FConstants.CATALOG_FAMILY, FConstants.SPLITA_QUALIFIER);
delete
.deleteColumns(FConstants.CATALOG_FAMILY, FConstants.SPLITB_QUALIFIER);
delete(delete);
LOG.info("Deleted daughters references, qualifier="
+ Bytes.toStringBinary(FConstants.SPLITA_QUALIFIER) + " and qualifier="
+ Bytes.toStringBinary(FConstants.SPLITB_QUALIFIER) + ", from parent "
+ parent.getEntityGroupNameAsString());
}
/**
* Generates and returns a Put containing the EntityGroupInfo into for the
* catalog table
*/
public static Put makePutFromEntityGroupInfo(EntityGroupInfo entityGroupInfo) {
Put put = new Put(getRowKey(entityGroupInfo));
addEntityGroupInfo(put, entityGroupInfo);
return put;
}
private static Put addEntityGroupInfo(final Put p,
final EntityGroupInfo entityGroupInfo) {
p.add(FConstants.CATALOG_FAMILY, FConstants.EGINFO,
entityGroupInfo.toByte());
return p;
}
private static Put addLocation(final Put p, final ServerName sn) {
p.add(FConstants.CATALOG_FAMILY, FConstants.EGLOCATION, sn.toByte());
return p;
}
private void put(final Put p) throws MetaException {
HTableInterface meta = getHTable();
try {
if (LOG.isDebugEnabled()) {
LOG.debug("FMeta Put " + p);
}
meta.put(p);
} catch (IOException e) {
throw new MetaException(e);
} finally {
closeHTable(meta);
}
}
private boolean checkAndPut(final byte[] family, final byte[] qualifier,
final byte[] value, final Put p) throws MetaException {
HTableInterface meta = getHTable();
try {
if (LOG.isDebugEnabled()) {
LOG.debug("FMeta checkAndPut " + p);
}
return meta.checkAndPut(p.getRow(), family, qualifier, value, p);
} catch (IOException e) {
throw new MetaException(e);
} finally {
closeHTable(meta);
}
}
private void put(final List<Put> p) throws MetaException {
HTableInterface meta = getHTable();
try {
meta.put(p);
if (LOG.isDebugEnabled()) {
LOG.debug("FMeta Put " + p);
}
} catch (IOException e) {
throw new MetaException(e);
} finally {
closeHTable(meta);
}
}
private Result get(final Get get) throws MetaException {
HTableInterface meta = getHTable();
try {
if (LOG.isDebugEnabled()) {
LOG.debug("FMeta Get " + get);
}
return meta.get(get);
} catch (IOException e) {
throw new MetaException(e);
} finally {
closeHTable(meta);
}
}
private void delete(final Delete delete) throws MetaException {
HTableInterface meta = getHTable();
try {
if (LOG.isDebugEnabled()) {
LOG.debug("FMeta Delete " + delete);
}
meta.delete(delete);
} catch (IOException e) {
throw new MetaException(e);
} finally {
closeHTable(meta);
}
}
private void delete(final List<Delete> allDelete) throws MetaException {
HTableInterface meta = getHTable();
try {
meta.delete(allDelete);
if (LOG.isDebugEnabled()) {
LOG.debug("FMeta Delete " + allDelete);
}
} catch (IOException e) {
throw new MetaException(e);
} finally {
closeHTable(meta);
}
}
private boolean exists(final Get get) throws MetaException {
HTableInterface meta = getHTable();
try {
boolean ex = meta.exists(get);
if (LOG.isDebugEnabled()) {
LOG.debug("FMeta " + get + " exists " + "=" + ex);
}
return ex;
} catch (IOException e) {
throw new MetaException(e);
} finally {
closeHTable(meta);
}
}
public boolean isTableAvailable(final byte[] tableName) throws IOException {
final AtomicBoolean available = new AtomicBoolean(true);
final AtomicInteger entityGroupCount = new AtomicInteger(0);
FMetaScanner.MetaScannerVisitor visitor = new FMetaScanner.MetaScannerVisitorBase() {
@Override
public boolean processRow(Result row) throws IOException {
EntityGroupInfo info = FMetaScanner.getEntityGroupInfo(row);
if (info != null) {
if (Bytes.equals(tableName, info.getTableName())) {
ServerName sn = ServerName.getServerName(row);
if (sn == null) {
available.set(false);
return false;
}
entityGroupCount.incrementAndGet();
}
}
return true;
}
};
FMetaScanner.metaScan(getConf(), visitor);
return available.get() && (entityGroupCount.get() > 0);
}
public void updateLocation(Configuration conf,
EntityGroupInfo entityGroupInfo, ServerName serverNameFromMasterPOV)
throws MetaException {
updateEntityGroupLocation(entityGroupInfo, serverNameFromMasterPOV);
}
}
| |
/**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.policy.quota;
import io.gravitee.common.http.HttpStatusCode;
import io.gravitee.common.util.Maps;
import io.gravitee.gateway.api.ExecutionContext;
import io.gravitee.gateway.api.Request;
import io.gravitee.gateway.api.Response;
import io.gravitee.policy.api.PolicyChain;
import io.gravitee.policy.api.PolicyResult;
import io.gravitee.policy.api.annotations.OnRequest;
import io.gravitee.policy.quota.configuration.QuotaConfiguration;
import io.gravitee.policy.quota.configuration.QuotaPolicyConfiguration;
import io.gravitee.policy.quota.utils.DateUtils;
import io.gravitee.repository.ratelimit.api.RateLimitService;
import io.gravitee.repository.ratelimit.model.RateLimit;
import io.reactivex.SingleObserver;
import io.reactivex.disposables.Disposable;
import io.vertx.reactivex.core.Context;
import io.vertx.reactivex.core.RxHelper;
import io.vertx.reactivex.core.Vertx;
import java.util.function.Supplier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The quota policy, also known as throttling insure that a user (given its api key or IP address) is allowed
* to make x requests per y time period.
*
* Useful when you want to ensure that your APIs does not get flooded with requests.
*
* @author David BRASSELY (david.brassely at graviteesource.com)
* @author GraviteeSource Team
*/
@SuppressWarnings("unused")
public class QuotaPolicy {
/**
* LOGGER
*/
private static final Logger LOGGER = LoggerFactory.getLogger(QuotaPolicy.class);
private static final String QUOTA_TOO_MANY_REQUESTS = "QUOTA_TOO_MANY_REQUESTS";
/**
* The maximum number of requests that the consumer is permitted to make per time unit.
*/
public static final String X_QUOTA_LIMIT = "X-Quota-Limit";
/**
* The number of requests remaining in the current rate limit window.
*/
public static final String X_QUOTA_REMAINING = "X-Quota-Remaining";
/**
* The time at which the current rate limit window resets in UTC epoch seconds.
*/
public static final String X_QUOTA_RESET = "X-Quota-Reset";
public static final String ATTR_OAUTH_CLIENT_ID = "oauth.client_id";
private static char KEY_SEPARATOR = ':';
private static char RATE_LIMIT_TYPE = 'q';
/**
* Rate limit policy configuration
*/
private final QuotaPolicyConfiguration quotaPolicyConfiguration;
public QuotaPolicy(QuotaPolicyConfiguration quotaPolicyConfiguration) {
this.quotaPolicyConfiguration = quotaPolicyConfiguration;
}
@OnRequest
public void onRequest(Request request, Response response, ExecutionContext executionContext, PolicyChain policyChain) {
RateLimitService rateLimitService = executionContext.getComponent(RateLimitService.class);
QuotaConfiguration quotaConfiguration = quotaPolicyConfiguration.getQuota();
if (rateLimitService == null) {
policyChain.failWith(PolicyResult.failure("No rate-limit service has been installed."));
return;
}
String key = createRateLimitKey(request, executionContext, quotaConfiguration);
long limit = (quotaConfiguration.getLimit() > 0)
? quotaConfiguration.getLimit()
: executionContext.getTemplateEngine().getValue(quotaConfiguration.getDynamicLimit(), Long.class);
Context context = Vertx.currentContext();
rateLimitService
.incrementAndGet(
key,
quotaPolicyConfiguration.isAsync(),
new Supplier<RateLimit>() {
@Override
public RateLimit get() {
// Set the time at which the current rate limit window resets in UTC epoch seconds.
long resetTimeMillis = DateUtils.getEndOfPeriod(
request.timestamp(),
quotaConfiguration.getPeriodTime(),
quotaConfiguration.getPeriodTimeUnit()
);
RateLimit rate = new RateLimit(key);
rate.setCounter(0);
rate.setLimit(limit);
rate.setResetTime(resetTimeMillis);
rate.setSubscription((String) executionContext.getAttribute(ExecutionContext.ATTR_SUBSCRIPTION_ID));
return rate;
}
}
)
.observeOn(RxHelper.scheduler(context))
.subscribe(
new SingleObserver<RateLimit>() {
@Override
public void onSubscribe(Disposable d) {}
@Override
public void onSuccess(RateLimit rateLimit) {
// Set Rate Limit headers on response
long remaining = Math.max(0, limit - rateLimit.getCounter());
if (quotaPolicyConfiguration.isAddHeaders()) {
response.headers().set(X_QUOTA_LIMIT, Long.toString(limit));
response.headers().set(X_QUOTA_REMAINING, Long.toString(remaining));
response.headers().set(X_QUOTA_RESET, Long.toString(rateLimit.getResetTime()));
}
executionContext.setAttribute(ExecutionContext.ATTR_QUOTA_COUNT, rateLimit.getCounter());
executionContext.setAttribute(ExecutionContext.ATTR_QUOTA_REMAINING, remaining);
executionContext.setAttribute(ExecutionContext.ATTR_QUOTA_LIMIT, quotaConfiguration.getLimit());
executionContext.setAttribute(ExecutionContext.ATTR_QUOTA_RESET_TIME, rateLimit.getResetTime());
if (rateLimit.getCounter() <= limit) {
policyChain.doNext(request, response);
} else {
policyChain.failWith(createLimitExceeded(quotaConfiguration, limit));
}
}
@Override
public void onError(Throwable e) {
// Set Rate Limit headers on response
if (quotaPolicyConfiguration.isAddHeaders()) {
response.headers().set(X_QUOTA_LIMIT, Long.toString(limit));
// We don't know about the remaining calls, let's assume it is the same as the limit
response.headers().set(X_QUOTA_REMAINING, Long.toString(limit));
response.headers().set(X_QUOTA_RESET, Long.toString(-1));
}
executionContext.setAttribute(ExecutionContext.ATTR_QUOTA_REMAINING, limit);
executionContext.setAttribute(ExecutionContext.ATTR_QUOTA_LIMIT, limit);
// If an errors occurs at the repository level, we accept the call
policyChain.doNext(request, response);
}
}
);
}
private String createRateLimitKey(Request request, ExecutionContext executionContext, QuotaConfiguration quotaConfiguration) {
// Rate limit key contains the following:
// 1_ (PLAN_ID, SUBSCRIPTION_ID) pair, note that for keyless plans this is evaluated to (1, CLIENT_IP)
// 2_ User-defined key, if it exists
// 3_ Rate Type (rate-limit / quota)
// 4_ RESOLVED_PATH (policy attached to a path rather than a plan)
String resolvedPath = (String) executionContext.getAttribute(ExecutionContext.ATTR_RESOLVED_PATH);
StringBuilder key = new StringBuilder();
String plan = (String) executionContext.getAttribute(ExecutionContext.ATTR_PLAN);
if (plan != null) {
key
.append(executionContext.getAttribute(ExecutionContext.ATTR_PLAN))
.append(executionContext.getAttribute(ExecutionContext.ATTR_SUBSCRIPTION_ID));
} else if (executionContext.getAttributes().containsKey(ATTR_OAUTH_CLIENT_ID)) { // TODO manage also APIKey when managed by K8S plugins
key.append(executionContext.getAttribute(ATTR_OAUTH_CLIENT_ID));
} else {
key.append(executionContext.getAttribute(ExecutionContext.ATTR_API));
}
if (quotaConfiguration.getKey() != null && !quotaConfiguration.getKey().isEmpty()) {
key.append(KEY_SEPARATOR).append(executionContext.getTemplateEngine().getValue(quotaConfiguration.getKey(), String.class));
}
key.append(KEY_SEPARATOR).append(RATE_LIMIT_TYPE);
if (resolvedPath != null) {
key.append(KEY_SEPARATOR).append(resolvedPath.hashCode());
}
return key.toString();
}
private PolicyResult createLimitExceeded(QuotaConfiguration quotaConfiguration, long actualLimit) {
return PolicyResult.failure(
QUOTA_TOO_MANY_REQUESTS,
HttpStatusCode.TOO_MANY_REQUESTS_429,
"Quota exceeded ! You reach the limit of " +
actualLimit +
" requests per " +
quotaConfiguration.getPeriodTime() +
' ' +
quotaConfiguration.getPeriodTimeUnit().name().toLowerCase(),
Maps
.<String, Object>builder()
.put("limit", actualLimit)
.put("period_time", quotaConfiguration.getPeriodTime())
.put("period_unit", quotaConfiguration.getPeriodTimeUnit())
.build()
);
}
}
| |
/*
* Copyright (C) 2012 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.reflect;
import static com.google.common.collect.Maps.immutableEntry;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.testing.MapTestSuiteBuilder;
import com.google.common.collect.testing.features.CollectionFeature;
import com.google.common.collect.testing.features.CollectionSize;
import com.google.common.collect.testing.features.MapFeature;
import com.google.common.reflect.ImmutableTypeToInstanceMapTest.TestTypeToInstanceMapGenerator;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import java.util.Map;
import java.util.Map.Entry;
/**
* Unit test of {@link MutableTypeToInstanceMap}.
*
* @author Ben Yu
*/
public class MutableTypeToInstanceMapTest extends TestCase {
@AndroidIncompatible // problem with suite builders on Android
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTestSuite(MutableTypeToInstanceMapTest.class);
suite.addTest(MapTestSuiteBuilder
.using(new TestTypeToInstanceMapGenerator() {
// Other tests will verify what real, warning-free usage looks like
// but here we have to do some serious fudging
@Override
@SuppressWarnings("unchecked")
public Map<TypeToken, Object> create(Object... elements) {
MutableTypeToInstanceMap<Object> map
= new MutableTypeToInstanceMap<Object>();
for (Object object : elements) {
Entry<TypeToken, Object> entry = (Entry<TypeToken, Object>) object;
map.putInstance(entry.getKey(), entry.getValue());
}
return (Map) map;
}
})
.named("MutableTypeToInstanceMap")
.withFeatures(
MapFeature.SUPPORTS_REMOVE,
MapFeature.RESTRICTS_KEYS,
MapFeature.ALLOWS_NULL_VALUES,
CollectionFeature.SUPPORTS_ITERATOR_REMOVE,
CollectionSize.ANY,
MapFeature.ALLOWS_ANY_NULL_QUERIES)
.createTestSuite());
return suite;
}
private TypeToInstanceMap<Object> map;
@Override protected void setUp() throws Exception {
map = new MutableTypeToInstanceMap<Object>();
}
public void testPutThrows() {
try {
map.put(TypeToken.of(Integer.class), new Integer(5));
fail();
} catch (UnsupportedOperationException expected) {}
}
public void testPutAllThrows() {
try {
map.putAll(ImmutableMap.of(TypeToken.of(Integer.class), new Integer(5)));
fail();
} catch (UnsupportedOperationException expected) {}
}
public void testEntrySetMutationThrows() {
map.putInstance(String.class, "test");
assertEquals(TypeToken.of(String.class), map.entrySet().iterator().next().getKey());
assertEquals("test", map.entrySet().iterator().next().getValue());
try {
map.entrySet().iterator().next().setValue(1);
fail();
} catch (UnsupportedOperationException expected) {}
}
public void testEntrySetToArrayMutationThrows() {
map.putInstance(String.class, "test");
@SuppressWarnings("unchecked") // Should get a CCE later if cast is wrong
Entry<Object, Object> entry = (Entry<Object, Object>) map.entrySet().toArray()[0];
assertEquals(TypeToken.of(String.class), entry.getKey());
assertEquals("test", entry.getValue());
try {
entry.setValue(1);
fail();
} catch (UnsupportedOperationException expected) {}
}
public void testEntrySetToTypedArrayMutationThrows() {
map.putInstance(String.class, "test");
@SuppressWarnings("unchecked") // Should get a CCE later if cast is wrong
Entry<Object, Object> entry = map.entrySet().toArray(new Entry[0])[0];
assertEquals(TypeToken.of(String.class), entry.getKey());
assertEquals("test", entry.getValue());
try {
entry.setValue(1);
fail();
} catch (UnsupportedOperationException expected) {}
}
public void testPutAndGetInstance() {
assertNull(map.putInstance(Integer.class, new Integer(5)));
Integer oldValue = map.putInstance(Integer.class, new Integer(7));
assertEquals(5, (int) oldValue);
Integer newValue = map.getInstance(Integer.class);
assertEquals(7, (int) newValue);
assertEquals(7, (int) map.getInstance(TypeToken.of(Integer.class)));
// Won't compile: map.putInstance(Double.class, new Long(42));
}
public void testNull() {
try {
map.putInstance((TypeToken) null, new Integer(1));
fail();
} catch (NullPointerException expected) {
}
map.putInstance(Integer.class, null);
assertTrue(map.containsKey(TypeToken.of(Integer.class)));
assertTrue(map.entrySet().contains(immutableEntry(TypeToken.of(Integer.class), null)));
assertNull(map.get(TypeToken.of(Integer.class)));
assertNull(map.getInstance(Integer.class));
map.putInstance(Long.class, null);
assertTrue(map.containsKey(TypeToken.of(Long.class)));
assertTrue(map.entrySet().contains(immutableEntry(TypeToken.of(Long.class), null)));
assertNull(map.get(TypeToken.of(Long.class)));
assertNull(map.getInstance(Long.class));
}
public void testPrimitiveAndWrapper() {
assertNull(map.getInstance(int.class));
assertNull(map.getInstance(Integer.class));
assertNull(map.putInstance(int.class, 0));
assertNull(map.putInstance(Integer.class, 1));
assertEquals(2, map.size());
assertEquals(0, (int) map.getInstance(int.class));
assertEquals(1, (int) map.getInstance(Integer.class));
assertEquals(0, (int) map.putInstance(int.class, null));
assertEquals(1, (int) map.putInstance(Integer.class, null));
assertNull(map.getInstance(int.class));
assertNull(map.getInstance(Integer.class));
assertEquals(2, map.size());
}
public void testParameterizedType() {
TypeToken<ImmutableList<Integer>> type = new TypeToken<ImmutableList<Integer>>() {};
map.putInstance(type, ImmutableList.of(1));
assertEquals(1, map.size());
assertEquals(ImmutableList.of(1), map.getInstance(type));
}
public void testGenericArrayType() {
@SuppressWarnings("unchecked") // Trying to test generic array
ImmutableList<Integer>[] array = new ImmutableList[] {ImmutableList.of(1)};
TypeToken<ImmutableList<Integer>[]> type = new TypeToken<ImmutableList<Integer>[]>() {};
map.putInstance(type, array);
assertEquals(1, map.size());
assertThat(map.getInstance(type)).asList().containsExactly(array[0]);
}
public void testWildcardType() {
TypeToken<ImmutableList<?>> type = new TypeToken<ImmutableList<?>>() {};
map.putInstance(type, ImmutableList.of(1));
assertEquals(1, map.size());
assertEquals(ImmutableList.of(1), map.getInstance(type));
}
public void testGetInstance_withTypeVariable() {
try {
map.getInstance(this.<Number>anyIterableType());
fail();
} catch (IllegalArgumentException expected) {}
}
public void testPutInstance_withTypeVariable() {
try {
map.putInstance(this.<Integer>anyIterableType(), ImmutableList.of(1));
fail();
} catch (IllegalArgumentException expected) {}
}
private <T> TypeToken<Iterable<T>> anyIterableType() {
return new TypeToken<Iterable<T>>() {};
}
}
| |
package psidev.psi.mi.jami.xml.io.writer.elements.impl.extended.expanded.xml25;
import junit.framework.Assert;
import org.junit.Ignore;
import org.junit.Test;
import psidev.psi.mi.jami.binary.ModelledBinaryInteraction;
import psidev.psi.mi.jami.exception.IllegalRangeException;
import psidev.psi.mi.jami.model.*;
import psidev.psi.mi.jami.model.impl.*;
import psidev.psi.mi.jami.utils.CvTermUtils;
import psidev.psi.mi.jami.utils.InteractorUtils;
import psidev.psi.mi.jami.utils.RangeUtils;
import psidev.psi.mi.jami.xml.cache.PsiXmlObjectCache;
import psidev.psi.mi.jami.xml.cache.InMemoryIdentityObjectCache;
import psidev.psi.mi.jami.xml.io.writer.elements.impl.AbstractXmlWriterTest;
import psidev.psi.mi.jami.xml.model.extension.ExtendedPsiXmlInteraction;
import psidev.psi.mi.jami.xml.model.extension.binary.xml25.XmlModelledBinaryInteraction;
import javax.xml.stream.XMLStreamException;
import java.io.IOException;
import java.math.BigDecimal;
/**
* Unit tester for XmlModelledBinaryInteractionWriter
*
* @author Marine Dumousseau (marine@ebi.ac.uk)
* @version $Id$
* @since <pre>26/11/13</pre>
*/
public class XmlModelledBinaryInteractionWriterTest extends AbstractXmlWriterTest {
private String interaction = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_complex = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactionRef>4</interactionRef>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_complexAsInteractor ="<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>test complex</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>complex</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0314\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_shortName ="<interaction id=\"1\">\n" +
" <names>\n" +
" <shortLabel>interaction test</shortLabel>\n"+
" </names>\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_fullName ="<interaction id=\"1\">\n" +
" <names>\n" +
" <fullName>interaction test</fullName>\n"+
" </names>\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_aliases ="<interaction id=\"1\">\n" +
" <names>\n" +
" <alias type=\"synonym\">interaction synonym</alias>\n"+
" <alias>test</alias>\n"+
" </names>\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_identifier = "<interaction id=\"1\">\n" +
" <xref>\n" +
" <primaryRef db=\"intact\" id=\"EBI-xxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" <secondaryRef db=\"test\" id=\"xxxx1\"/>\n"+
" </xref>\n"+
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_xref = "<interaction id=\"1\">\n" +
" <xref>\n" +
" <primaryRef db=\"test2\" id=\"xxxx2\"/>\n" +
" <secondaryRef db=\"test\" id=\"xxxx1\"/>\n"+
" </xref>\n"+
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_inferred = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" <featureList>\n" +
" <feature id=\"5\">\n" +
" <featureType>\n" +
" <names>\n" +
" <shortLabel>biological feature</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0252\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </featureType>\n" +
" <featureRangeList>\n" +
" <featureRange>\n" +
" <startStatus>\n" +
" <names>\n" +
" <shortLabel>certain</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </startStatus>\n" +
" <begin position=\"1\"/>\n"+
" <endStatus>\n" +
" <names>\n" +
" <shortLabel>certain</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </endStatus>\n" +
" <end position=\"4\"/>\n"+
" </featureRange>\n"+
" </featureRangeList>\n" +
" </feature>\n"+
" </featureList>\n" +
" </participant>\n"+
" <participant id=\"6\">\n" +
" <interactor id=\"7\">\n" +
" <names>\n" +
" <shortLabel>protein test2</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" <featureList>\n" +
" <feature id=\"8\">\n" +
" <featureType>\n" +
" <names>\n" +
" <shortLabel>biological feature</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0252\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </featureType>\n" +
" <featureRangeList>\n" +
" <featureRange>\n" +
" <startStatus>\n" +
" <names>\n" +
" <shortLabel>certain</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </startStatus>\n" +
" <begin position=\"1\"/>\n"+
" <endStatus>\n" +
" <names>\n" +
" <shortLabel>certain</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0335\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </endStatus>\n" +
" <end position=\"4\"/>\n"+
" </featureRange>\n"+
" </featureRangeList>\n" +
" </feature>\n"+
" </featureList>\n" +
" </participant>\n"+
" </participantList>\n" +
" <inferredInteractionList>\n" +
" <inferredInteraction>\n" +
" <participant>\n" +
" <participantFeatureRef>5</participantFeatureRef>\n" +
" </participant>\n"+
" <participant>\n" +
" <participantFeatureRef>8</participantFeatureRef>\n" +
" </participant>\n"+
" </inferredInteraction>\n"+
" </inferredInteractionList>\n" +
"</interaction>";
private String interaction_type = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
" <interactionType>\n" +
" <names>\n" +
" <shortLabel>association</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0914\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactionType>\n" +
"</interaction>";
private String interaction_attributes = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
" <attributeList>\n" +
" <attribute name=\"test2\"/>\n"+
" <attribute name=\"test3\"/>\n"+
" <attribute name=\"spoke expansion\" nameAc=\"MI:1060\"/>\n"+
" </attributeList>\n"+
"</interaction>";
private String interaction_registered = "<interaction id=\"2\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"3\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"4\">\n" +
" <interactor id=\"5\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
"</interaction>";
private String interaction_confidence = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
" <confidenceList>\n" +
" <confidence>\n" +
" <unit>\n" +
" <names>\n" +
" <shortLabel>intact-miscore</shortLabel>\n"+
" </names>\n"+
" </unit>\n" +
" <value>0.8</value>\n" +
" </confidence>\n"+
" </confidenceList>\n" +
"</interaction>";
private String interaction_parameter = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
" <parameterList>\n" +
" <parameter term=\"kd\" base=\"10\" exponent=\"0\" factor=\"5\">\n" +
" <experimentRef>2</experimentRef>\n" +
" </parameter>\n"+
" </parameterList>\n" +
"</interaction>";
private String interaction_preAssembly = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
" <attributeList>\n" +
" <attribute name=\"pre-assembly\" nameAc=\"MI:1158\"/>\n" +
" <attribute name=\"positive cooperative effect\" nameAc=\"MI:1154\"/>\n" +
" <attribute name=\"configurational pre-organization\" nameAc=\"MI:1174\"/>\n"+
" <attribute name=\"affected interaction\" nameAc=\"MI:1150\">5</attribute>\n" +
" </attributeList>\n" +
"</interaction>";
private String interaction_allostery = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
" <attributeList>\n" +
" <attribute name=\"allostery\" nameAc=\"MI:1157\"/>\n" +
" <attribute name=\"allosteric molecule\" nameAc=\"MI:1159\">3</attribute>\n" +
" <attribute name=\"allosteric effector\" nameAc=\"MI:1160\">5</attribute>\n" +
" <attribute name=\"heterotropic allostery\" nameAc=\"MI:1168\"/>\n" +
" <attribute name=\"allosteric change in structure\" nameAc=\"MI:1165\"/>\n" +
" <attribute name=\"positive cooperative effect\" nameAc=\"MI:1154\"/>\n" +
" <attribute name=\"allosteric v-type response\" nameAc=\"MI:1163\"/>\n" +
" <attribute name=\"affected interaction\" nameAc=\"MI:1150\">6</attribute>\n" +
" </attributeList>\n" +
"</interaction>";
private String interaction_intra = "<interaction id=\"1\">\n" +
" <experimentList>\n" +
" <experimentDescription id=\"2\">\n" +
" <bibref>\n" +
" <xref>\n" +
" <primaryRef db=\"pubmed\" dbAc=\"MI:0446\" id=\"xxxxxx\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </bibref>\n"+
" <interactionDetectionMethod>\n" +
" <names>\n" +
" <shortLabel>unspecified method</shortLabel>\n"+
" </names>\n"+
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0686\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n"+
" </xref>\n"+
" </interactionDetectionMethod>\n"+
" </experimentDescription>\n"+
" </experimentList>\n" +
" <participantList>\n" +
" <participant id=\"3\">\n" +
" <interactor id=\"4\">\n" +
" <names>\n" +
" <shortLabel>protein test</shortLabel>\n" +
" </names>\n" +
" <interactorType>\n" +
" <names>\n" +
" <shortLabel>protein</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0326\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </interactorType>\n" +
" </interactor>\n" +
" <biologicalRole>\n" +
" <names>\n" +
" <shortLabel>unspecified role</shortLabel>\n" +
" </names>\n" +
" <xref>\n" +
" <primaryRef db=\"psi-mi\" dbAc=\"MI:0488\" id=\"MI:0499\" refType=\"identity\" refTypeAc=\"MI:0356\"/>\n" +
" </xref>\n" +
" </biologicalRole>\n" +
" </participant>\n"+
" </participantList>\n" +
" <intraMolecular>true</intraMolecular>\n" +
"</interaction>";
private PsiXmlObjectCache elementCache = new InMemoryIdentityObjectCache();
@Test
public void test_write_interaction() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction, output.toString());
}
@Test
public void test_write_participant_complex() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
Complex complex = new DefaultComplex("test complex");
complex.getParticipants().add(new DefaultModelledParticipant(new DefaultProtein("test protein")));
ModelledParticipant participant = new DefaultModelledParticipant(complex);
interaction.addParticipant(participant);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_complex, output.toString());
}
@Test
public void test_write_participant_complex_as_interactor() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
Complex complex = new DefaultComplex("test complex");
complex.getParticipants().add(new DefaultModelledParticipant(new DefaultProtein("test protein")));
ModelledParticipant participant = new DefaultModelledParticipant(complex);
interaction.addParticipant(participant);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.setComplexAsInteractor(true);
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_complexAsInteractor, output.toString());
}
@Test
public void test_write_participant_complex_no_participants() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
Complex complex = new DefaultComplex("test complex");
ModelledParticipant participant = new DefaultModelledParticipant(complex);
interaction.addParticipant(participant);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_complexAsInteractor, output.toString());
}
@Test
public void test_write_interaction_shortName() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction("interaction test");
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_shortName, output.toString());
}
@Test
public void test_write_interaction_fullName() throws XMLStreamException, IOException, IllegalRangeException {
NamedInteraction interaction = new XmlModelledBinaryInteraction();
interaction.setFullName("interaction test");
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write((ModelledBinaryInteraction)interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_fullName, output.toString());
}
@Test
public void test_write_interaction_alias() throws XMLStreamException, IOException, IllegalRangeException {
NamedInteraction interaction = new XmlModelledBinaryInteraction();
interaction.getAliases().add(new DefaultAlias(new DefaultCvTerm("synonym"), "interaction synonym"));
interaction.getAliases().add(new DefaultAlias("test"));
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write((ModelledBinaryInteraction)interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_aliases, output.toString());
}
@Test
public void test_write_interaction_identifier() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
interaction.getIdentifiers().add(new DefaultXref(new DefaultCvTerm("intact"), "EBI-xxx"));
interaction.getXrefs().add(new DefaultXref(new DefaultCvTerm("test"), "xxxx1"));
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_identifier, output.toString());
}
@Test
public void test_write_interaction_xref() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
interaction.getXrefs().add(new DefaultXref(new DefaultCvTerm("test2"), "xxxx2"));
interaction.getXrefs().add(new DefaultXref(new DefaultCvTerm("test"), "xxxx1"));
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_xref, output.toString());
}
@Test
@Ignore
public void test_write_interaction_inferred() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
ModelledParticipant participant2 = new DefaultModelledParticipant(new DefaultProtein("protein test2"));
// two inferred interactiosn f1, f2, f3 and f3,f4
ModelledFeature f1 = new DefaultModelledFeature();
f1.getRanges().add(RangeUtils.createRangeFromString("1-4"));
ModelledFeature f2 = new DefaultModelledFeature();
f2.getRanges().add(RangeUtils.createRangeFromString("1-4"));
f1.getLinkedFeatures().add(f2);
f2.getLinkedFeatures().add(f1);
participant.addFeature(f1);
participant2.addFeature(f2);
interaction.addParticipant(participant);
interaction.addParticipant(participant2);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_inferred, output.toString());
}
@Test
public void test_write_interaction_type() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
interaction.setInteractionType(CvTermUtils.createMICvTerm("association", "MI:0914"));
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_type, output.toString());
}
@Test
public void test_write_interaction_attributes() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
interaction.getAnnotations().add(new DefaultAnnotation(new DefaultCvTerm("test2")));
interaction.getAnnotations().add(new DefaultAnnotation(new DefaultCvTerm("test3")));
interaction.setComplexExpansion(CvTermUtils.createMICvTerm("spoke expansion", "MI:1060"));
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_attributes, output.toString());
}
@Test
public void test_write_interaction_registered() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
elementCache.clear();
elementCache.extractIdForInteraction(new DefaultInteraction());
elementCache.extractIdForInteraction(interaction);
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_registered, output.toString());
}
@Test
public void test_write_interaction_parameter() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
interaction.getModelledParameters().add(new DefaultModelledParameter(new DefaultCvTerm("kd"), new ParameterValue(new BigDecimal(5))));
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_parameter, output.toString());
}
@Test
public void test_write_interaction_confidence() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
interaction.getModelledConfidences().add(new DefaultModelledConfidence(new DefaultCvTerm("intact-miscore"), "0.8"));
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_confidence, output.toString());
}
@Test
public void test_write_interaction_preassembly() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
Preassembly assembly = new DefaultPreassemby(CvTermUtils.createMICvTerm("positive cooperative effect", "MI:1154"));
assembly.setResponse(CvTermUtils.createMICvTerm("configurational pre-organization", "MI:1174"));
assembly.getAffectedInteractions().add(new DefaultModelledInteraction());
interaction.getCooperativeEffects().add(assembly);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_preAssembly, output.toString());
}
@Test
public void test_write_interaction_preassembly_defaultExperiment() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
Preassembly assembly = new DefaultPreassemby(CvTermUtils.createMICvTerm("positive cooperative effect", "MI:1154"));
assembly.setResponse(CvTermUtils.createMICvTerm("configurational pre-organization", "MI:1174"));
assembly.getAffectedInteractions().add(new DefaultModelledInteraction());
assembly.getCooperativityEvidences().add(new DefaultCooperativityEvidence(new DefaultPublication("xxxxxx")));
interaction.getCooperativeEffects().add(assembly);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("12345")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_preAssembly, output.toString());
}
@Test
public void test_write_interaction_allostery() throws XMLStreamException, IOException, IllegalRangeException {
ModelledBinaryInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
Allostery allostery = new DefaultAllostery(CvTermUtils.createMICvTerm("positive cooperative effect", "MI:1154"),
participant, new DefaultMoleculeEffector(new DefaultModelledParticipant(InteractorUtils.createUnknownBasicInteractor())));
allostery.setResponse(CvTermUtils.createMICvTerm("allosteric v-type response", "MI:1163"));
allostery.getAffectedInteractions().add(new DefaultModelledInteraction());
allostery.setAllostericMechanism(CvTermUtils.createMICvTerm("allosteric change in structure", "MI:1165"));
allostery.setAllosteryType(CvTermUtils.createMICvTerm("heterotropic allostery", "MI:1168"));
interaction.getCooperativeEffects().add(allostery);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write(interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_allostery, output.toString());
}
@Test
public void test_write_interaction_intraMolecular() throws XMLStreamException, IOException, IllegalRangeException {
ExtendedPsiXmlInteraction interaction = new XmlModelledBinaryInteraction();
ModelledParticipant participant = new DefaultModelledParticipant(new DefaultProtein("protein test"));
interaction.addParticipant(participant);
interaction.setIntraMolecular(true);
elementCache.clear();
XmlModelledBinaryInteractionWriter writer = new XmlModelledBinaryInteractionWriter(createStreamWriter(), this.elementCache);
writer.setDefaultExperiment(new DefaultExperiment(new DefaultPublication("xxxxxx")));
writer.write((ModelledBinaryInteraction)interaction);
streamWriter.flush();
Assert.assertEquals(this.interaction_intra, output.toString());
}
}
| |
/*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.provider.p4runtime.packet.impl;
import org.onlab.packet.EthType;
import org.onosproject.mastership.MastershipService;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.flow.TrafficTreatment;
import org.onosproject.net.packet.DefaultOutboundPacket;
import org.onosproject.net.packet.DefaultPacketContext;
import org.onosproject.net.packet.InboundPacket;
import org.onosproject.net.packet.OutboundPacket;
import org.onosproject.net.packet.PacketContext;
import org.onosproject.net.packet.PacketProgrammable;
import org.onosproject.net.packet.PacketProvider;
import org.onosproject.net.packet.PacketProviderRegistry;
import org.onosproject.net.packet.PacketProviderService;
import org.onosproject.net.pi.model.PiPipelineInterpreter;
import org.onosproject.net.pi.runtime.PiPacketOperation;
import org.onosproject.net.provider.AbstractProvider;
import org.onosproject.net.provider.ProviderId;
import org.onosproject.p4runtime.api.P4RuntimeController;
import org.onosproject.p4runtime.api.P4RuntimeEvent;
import org.onosproject.p4runtime.api.P4RuntimeEventListener;
import org.onosproject.p4runtime.api.P4RuntimePacketIn;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Deactivate;
import org.osgi.service.component.annotations.Reference;
import org.osgi.service.component.annotations.ReferenceCardinality;
import org.slf4j.Logger;
import java.nio.ByteBuffer;
import static org.onosproject.net.flow.DefaultTrafficTreatment.emptyTreatment;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Implementation of a packet provider for P4Runtime device.
*/
@Component(immediate = true)
public class P4RuntimePacketProvider extends AbstractProvider implements PacketProvider {
private final Logger log = getLogger(getClass());
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected P4RuntimeController controller;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected PacketProviderRegistry providerRegistry;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected DeviceService deviceService;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected MastershipService mastershipService;
private PacketProviderService providerService;
private InternalPacketListener packetListener = new InternalPacketListener();
/**
* Creates a new P4Runtime packet provider.
*/
public P4RuntimePacketProvider() {
super(new ProviderId("p4runtime", "org.onosproject.provider.p4runtime.packet"));
}
@Activate
protected void activate() {
providerService = providerRegistry.register(this);
controller.addListener(packetListener);
log.info("Started");
}
@Deactivate
public void deactivate() {
controller.removeListener(packetListener);
providerRegistry.unregister(this);
providerService = null;
log.info("Stopped");
}
@Override
public void emit(OutboundPacket packet) {
if (packet != null) {
DeviceId deviceId = packet.sendThrough();
Device device = deviceService.getDevice(deviceId);
if (device.is(PacketProgrammable.class) && mastershipService.isLocalMaster(deviceId)) {
PacketProgrammable packetProgrammable = device.as(PacketProgrammable.class);
packetProgrammable.emit(packet);
} else {
log.warn("No PacketProgrammable behavior for device {}", deviceId);
}
}
}
private EthType.EtherType getEtherType(ByteBuffer data) {
final short shortEthType = data.getShort(12);
data.rewind();
return EthType.EtherType.lookup(shortEthType);
}
/**
* Internal packet context implementation.
*/
private class P4RuntimePacketContext extends DefaultPacketContext {
P4RuntimePacketContext(long time, InboundPacket inPkt, OutboundPacket outPkt, boolean block) {
super(time, inPkt, outPkt, block);
}
@Override
public void send() {
if (this.block()) {
log.info("Unable to send, packet context is blocked");
return;
}
DeviceId deviceId = outPacket().sendThrough();
ByteBuffer rawData = outPacket().data();
TrafficTreatment treatment;
if (outPacket().treatment() == null) {
treatment = (treatmentBuilder() == null) ? emptyTreatment() : treatmentBuilder().build();
} else {
treatment = outPacket().treatment();
}
OutboundPacket outboundPacket = new DefaultOutboundPacket(deviceId, treatment, rawData);
emit(outboundPacket);
}
}
/**
* Internal packet listener to handle packet-in events received from the P4Runtime controller.
*/
private class InternalPacketListener implements P4RuntimeEventListener {
@Override
public void event(P4RuntimeEvent event) {
//Masterhip message is sent to everybody but picked up only by master.
//FIXME we need the device ID into p4RuntimeEvnetSubject to check for mastsership
if (!(event.subject() instanceof P4RuntimePacketIn) || event.type() != P4RuntimeEvent.Type.PACKET_IN) {
log.debug("Unrecognized event type {}, discarding", event.type());
// Not a packet-in event, ignore it.
return;
}
P4RuntimePacketIn eventSubject = (P4RuntimePacketIn) event.subject();
DeviceId deviceId = eventSubject.deviceId();
Device device = deviceService.getDevice(eventSubject.deviceId());
if (device == null) {
log.warn("Unable to process packet-in from {}, device is null in the core", deviceId);
return;
}
if (!device.is(PiPipelineInterpreter.class)) {
log.warn("Unable to process packet-in from {}, device has no PiPipelineInterpreter behaviour",
deviceId);
return;
}
PiPacketOperation operation = eventSubject.packetOperation();
InboundPacket inPkt;
try {
inPkt = device.as(PiPipelineInterpreter.class).mapInboundPacket(operation, deviceId);
} catch (PiPipelineInterpreter.PiInterpreterException e) {
log.warn("Unable to interpret inbound packet from {}: {}", deviceId, e.getMessage());
return;
}
if (log.isTraceEnabled()) {
final EthType.EtherType etherType = getEtherType(inPkt.unparsed());
log.trace("Received PACKET-IN <<< device={} ingress_port={} eth_type={}",
inPkt.receivedFrom().deviceId(), inPkt.receivedFrom().port(),
etherType.ethType().toString());
}
if (inPkt == null) {
log.debug("Received null inbound packet. Ignoring.");
return;
}
OutboundPacket outPkt = new DefaultOutboundPacket(eventSubject.deviceId(), null,
operation.data().asReadOnlyBuffer());
PacketContext pktCtx = new P4RuntimePacketContext(System.currentTimeMillis(), inPkt, outPkt, false);
// Pushing the packet context up for processing.
providerService.processPacket(pktCtx);
}
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simplesystemsmanagement.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
*/
public class CreateAssociationRequest extends AmazonWebServiceRequest implements
Serializable, Cloneable {
/**
* <p>
* The name of the SSM document.
* </p>
*/
private String name;
/**
* <p>
* The instance ID.
* </p>
*/
private String instanceId;
/**
* <p>
* The parameters for the documents runtime configuration.
* </p>
*/
private java.util.Map<String, java.util.List<String>> parameters;
/**
* <p>
* The name of the SSM document.
* </p>
*
* @param name
* The name of the SSM document.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name of the SSM document.
* </p>
*
* @return The name of the SSM document.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name of the SSM document.
* </p>
*
* @param name
* The name of the SSM document.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateAssociationRequest withName(String name) {
setName(name);
return this;
}
/**
* <p>
* The instance ID.
* </p>
*
* @param instanceId
* The instance ID.
*/
public void setInstanceId(String instanceId) {
this.instanceId = instanceId;
}
/**
* <p>
* The instance ID.
* </p>
*
* @return The instance ID.
*/
public String getInstanceId() {
return this.instanceId;
}
/**
* <p>
* The instance ID.
* </p>
*
* @param instanceId
* The instance ID.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateAssociationRequest withInstanceId(String instanceId) {
setInstanceId(instanceId);
return this;
}
/**
* <p>
* The parameters for the documents runtime configuration.
* </p>
*
* @return The parameters for the documents runtime configuration.
*/
public java.util.Map<String, java.util.List<String>> getParameters() {
return parameters;
}
/**
* <p>
* The parameters for the documents runtime configuration.
* </p>
*
* @param parameters
* The parameters for the documents runtime configuration.
*/
public void setParameters(
java.util.Map<String, java.util.List<String>> parameters) {
this.parameters = parameters;
}
/**
* <p>
* The parameters for the documents runtime configuration.
* </p>
*
* @param parameters
* The parameters for the documents runtime configuration.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateAssociationRequest withParameters(
java.util.Map<String, java.util.List<String>> parameters) {
setParameters(parameters);
return this;
}
public CreateAssociationRequest addParametersEntry(String key,
java.util.List<String> value) {
if (null == this.parameters) {
this.parameters = new java.util.HashMap<String, java.util.List<String>>();
}
if (this.parameters.containsKey(key))
throw new IllegalArgumentException("Duplicated keys ("
+ key.toString() + ") are provided.");
this.parameters.put(key, value);
return this;
}
/**
* Removes all the entries added into Parameters. <p> Returns a reference
* to this object so that method calls can be chained together.
*/
public CreateAssociationRequest clearParametersEntries() {
this.parameters = null;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getName() != null)
sb.append("Name: " + getName() + ",");
if (getInstanceId() != null)
sb.append("InstanceId: " + getInstanceId() + ",");
if (getParameters() != null)
sb.append("Parameters: " + getParameters());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateAssociationRequest == false)
return false;
CreateAssociationRequest other = (CreateAssociationRequest) obj;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null
&& other.getName().equals(this.getName()) == false)
return false;
if (other.getInstanceId() == null ^ this.getInstanceId() == null)
return false;
if (other.getInstanceId() != null
&& other.getInstanceId().equals(this.getInstanceId()) == false)
return false;
if (other.getParameters() == null ^ this.getParameters() == null)
return false;
if (other.getParameters() != null
&& other.getParameters().equals(this.getParameters()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode
+ ((getInstanceId() == null) ? 0 : getInstanceId().hashCode());
hashCode = prime * hashCode
+ ((getParameters() == null) ? 0 : getParameters().hashCode());
return hashCode;
}
@Override
public CreateAssociationRequest clone() {
return (CreateAssociationRequest) super.clone();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.workunit;
import java.util.List;
import java.util.Locale;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
/**
* A class representing all the base attributes required by all tables types. Subclasses
* will be expected to validate each table type for their respective required attributes.
*
* <p>
* The extract ID only needs to be unique for {@link Extract}s belonging to the same
* namespace/table. One or more {@link WorkUnit}s can share the same extract ID.
* {@link WorkUnit}s that do share an extract ID will be considered parts of a single
* {@link Extract} for the purpose of applying publishing policies.
* </p>
*
* @author kgoodhop
*
*/
public class Extract extends State {
public enum TableType {
SNAPSHOT_ONLY,
SNAPSHOT_APPEND,
APPEND_ONLY
}
private final State previousTableState = new State();
/**
* Constructor.
*
* @param state a {@link SourceState} carrying properties needed to construct an {@link Extract}
* @param namespace dot separated namespace path
* @param type {@link TableType}
* @param table table name
*
* @deprecated Extract does not use any property in {@link SourceState}.
* Use {@link #Extract(TableType, String, String)}
*/
@Deprecated
public Extract(SourceState state, TableType type, String namespace, String table) {
// Values should only be null for deserialization
if (state != null && type != null && !Strings.isNullOrEmpty(namespace) && !Strings.isNullOrEmpty(table)) {
// Constructing DTF
DateTimeZone timeZone = getTimeZoneHelper(state);
DateTimeFormatter DTF = DateTimeFormat.forPattern("yyyyMMddHHmmss").withLocale(Locale.US).withZone(timeZone);
String extractId = DTF.print(new DateTime());
super.addAll(state);
super.setProp(ConfigurationKeys.EXTRACT_TABLE_TYPE_KEY, type.toString());
super.setProp(ConfigurationKeys.EXTRACT_NAMESPACE_NAME_KEY, namespace);
super.setProp(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY, table);
super.setProp(ConfigurationKeys.EXTRACT_EXTRACT_ID_KEY, extractId);
for (WorkUnitState pre : state.getPreviousWorkUnitStates()) {
Extract previousExtract = pre.getWorkunit().getExtract();
if (previousExtract.getNamespace().equals(namespace) && previousExtract.getTable().equals(table)) {
this.previousTableState.addAll(pre);
}
}
// Setting full drop date if not already specified, the value can still be overridden if required.
if (state.getPropAsBoolean(ConfigurationKeys.EXTRACT_IS_FULL_KEY)
&& !state.contains(ConfigurationKeys.EXTRACT_FULL_RUN_TIME_KEY)) {
super.setProp(ConfigurationKeys.EXTRACT_FULL_RUN_TIME_KEY, System.currentTimeMillis());
}
}
}
DateTimeZone getTimeZoneHelper(SourceState state) {
return DateTimeZone.forID(state.getProp(ConfigurationKeys.EXTRACT_ID_TIME_ZONE,
ConfigurationKeys.DEFAULT_EXTRACT_ID_TIME_ZONE));
}
/**
* Constructor.
*
* @param type {@link TableType}
* @param namespace dot separated namespace path
* @param table table name
*/
public Extract(TableType type, String namespace, String table) {
this(new SourceState(), type, namespace, table);
}
/**
* Deep copy constructor.
*
* @param extract the other {@link Extract} instance
*/
public Extract(Extract extract) {
super.addAll(extract.getProperties());
}
@Override
public boolean equals(Object object) {
if (!(object instanceof Extract)) {
return false;
}
Extract other = (Extract) object;
return super.equals(other) && this.getNamespace().equals(other.getNamespace())
&& this.getTable().equals(other.getTable()) && this.getExtractId().equals(other.getExtractId());
}
@Override
public int hashCode() {
return (this.getNamespace() + this.getTable() + this.getExtractId()).hashCode();
}
/**
* Get the writer output file path corresponding to this {@link Extract}.
*
* @return writer output file path corresponding to this {@link Extract}
* @deprecated As {@code this.getIsFull} is deprecated.
*/
@Deprecated
public String getOutputFilePath() {
return this.getNamespace().replaceAll("\\.", "/") + "/" + this.getTable() + "/" + this.getExtractId() + "_"
+ (this.getIsFull() ? "full" : "append");
}
/**
* If this {@link Extract} has extract table type defined.
*
* @return <code>true</code> if it has, <code>false</code> otherwise.
*/
public boolean hasType() {
return contains(ConfigurationKeys.EXTRACT_TABLE_TYPE_KEY);
}
/**
* Get the {@link TableType} of the table.
*
* @return {@link TableType} of the table
*/
public TableType getType() {
return TableType.valueOf(getProp(ConfigurationKeys.EXTRACT_TABLE_TYPE_KEY));
}
/**
* Get the dot-separated namespace of the table.
*
* @return dot-separated namespace of the table
*/
public String getNamespace() {
return getProp(ConfigurationKeys.EXTRACT_NAMESPACE_NAME_KEY, "");
}
/**
* Get the name of the table.
*
* @return name of the table
*/
public String getTable() {
return getProp(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY, "");
}
/**
* Get a (non-globally) unique ID for this {@link Extract}.
*
* @return unique ID for this {@link Extract}
*/
public String getExtractId() {
return getProp(ConfigurationKeys.EXTRACT_EXTRACT_ID_KEY, "");
}
/**
* Set a (non-globally) unique ID for this {@link Extract}.
*
* @param extractId unique ID for this {@link Extract}
*/
public void setExtractId(String extractId) {
setProp(ConfigurationKeys.EXTRACT_EXTRACT_ID_KEY, extractId);
}
/**
* Check if this {@link Extract} represents the full contents of the source table.
*
* @return <code>true</code> if this {@link Extract} represents the full contents
* of the source table and <code>false</code> otherwise
* @deprecated It is recommend to get this information from {@code WorkUnit} instead of {@code Extract}.
*/
@Deprecated
public boolean getIsFull() {
return getPropAsBoolean(ConfigurationKeys.EXTRACT_IS_FULL_KEY, false);
}
/**
* Set full drop date from the given time.
*
* @param extractFullRunTime full extract time
* @deprecated It is recommend to set this information in {@code WorkUnit} instead of {@code Extract}.
*/
@Deprecated
public void setFullTrue(long extractFullRunTime) {
setProp(ConfigurationKeys.EXTRACT_IS_FULL_KEY, true);
setProp(ConfigurationKeys.EXTRACT_FULL_RUN_TIME_KEY, extractFullRunTime);
}
/**
* Set primary keys.
*
* <p>
* The order of primary keys does not matter.
* </p>
*
* @param primaryKeyFieldName primary key names
* @deprecated It is recommended to set primary keys in {@code WorkUnit} instead of {@code Extract}.
*/
@Deprecated
public void setPrimaryKeys(String... primaryKeyFieldName) {
setProp(ConfigurationKeys.EXTRACT_PRIMARY_KEY_FIELDS_KEY, Joiner.on(",").join(primaryKeyFieldName));
}
/**
* Add more primary keys to the existing set of primary keys.
*
* @param primaryKeyFieldName primary key names
* @deprecated @deprecated It is recommended to add primary keys in {@code WorkUnit} instead of {@code Extract}.
*/
@Deprecated
public void addPrimaryKey(String... primaryKeyFieldName) {
StringBuilder sb = new StringBuilder(getProp(ConfigurationKeys.EXTRACT_PRIMARY_KEY_FIELDS_KEY, ""));
Joiner.on(",").appendTo(sb, primaryKeyFieldName);
setProp(ConfigurationKeys.EXTRACT_PRIMARY_KEY_FIELDS_KEY, sb.toString());
}
/**
* Get the list of primary keys.
*
* @return list of primary keys
* @deprecated It is recommended to obtain primary keys from {@code WorkUnit} instead of {@code Extract}.
*/
@Deprecated
public List<String> getPrimaryKeys() {
return getPropAsList(ConfigurationKeys.EXTRACT_PRIMARY_KEY_FIELDS_KEY);
}
/**
* Set delta fields.
*
* <p>
* The order of delta fields does not matter.
* </p>
*
* @param deltaFieldName delta field names
* @deprecated It is recommended to set delta fields in {@code WorkUnit} instead of {@code Extract}.
*/
@Deprecated
public void setDeltaFields(String... deltaFieldName) {
setProp(ConfigurationKeys.EXTRACT_DELTA_FIELDS_KEY, Joiner.on(",").join(deltaFieldName));
}
/**
* Add more delta fields to the existing set of delta fields.
*
* @param deltaFieldName delta field names
* @deprecated It is recommended to add delta fields in {@code WorkUnit} instead of {@code Extract}.
*/
@Deprecated
public void addDeltaField(String... deltaFieldName) {
StringBuilder sb = new StringBuilder(getProp(ConfigurationKeys.EXTRACT_DELTA_FIELDS_KEY, ""));
Joiner.on(",").appendTo(sb, deltaFieldName);
setProp(ConfigurationKeys.EXTRACT_DELTA_FIELDS_KEY, sb.toString());
}
/**
* Get the list of delta fields.
*
* @return list of delta fields
* @deprecated It is recommended to obtain delta fields from {@code WorkUnit} instead of {@code Extract}.
*/
@Deprecated
public List<String> getDeltaFields() {
return getPropAsList(ConfigurationKeys.EXTRACT_DELTA_FIELDS_KEY);
}
/**
* Get the previous table {@link State}.
*
* @return previous table {@link State}
*/
public State getPreviousTableState() {
return this.previousTableState;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.query.h2.opt;
import java.io.IOException;
import java.util.Collection;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.processors.cache.CacheObject;
import org.apache.ignite.internal.processors.cache.CacheObjectContext;
import org.apache.ignite.internal.processors.cache.version.GridCacheVersion;
import org.apache.ignite.internal.processors.query.GridQueryIndexDescriptor;
import org.apache.ignite.internal.processors.query.GridQueryTypeDescriptor;
import org.apache.ignite.internal.util.GridAtomicLong;
import org.apache.ignite.internal.util.GridCloseableIteratorAdapter;
import org.apache.ignite.internal.util.lang.GridCloseableIterator;
import org.apache.ignite.internal.util.offheap.unsafe.GridUnsafeMemory;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteBiTuple;
import org.apache.ignite.spi.indexing.IndexingQueryFilter;
import org.apache.ignite.spi.indexing.IndexingQueryCacheFilter;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.classic.MultiFieldQueryParser;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.util.BytesRef;
import org.h2.util.JdbcUtils;
import org.jetbrains.annotations.Nullable;
import static org.apache.ignite.internal.processors.query.QueryUtils.KEY_FIELD_NAME;
import static org.apache.ignite.internal.processors.query.QueryUtils.VAL_FIELD_NAME;
/**
* Lucene fulltext index.
*/
public class GridLuceneIndex implements AutoCloseable {
/** Field name for string representation of value. */
public static final String VAL_STR_FIELD_NAME = "_gg_val_str__";
/** Field name for value version. */
public static final String VER_FIELD_NAME = "_gg_ver__";
/** Field name for value expiration time. */
public static final String EXPIRATION_TIME_FIELD_NAME = "_gg_expires__";
/** */
private final String cacheName;
/** */
private final GridQueryTypeDescriptor type;
/** */
private final IndexWriter writer;
/** */
private final String[] idxdFields;
/** */
private final AtomicLong updateCntr = new GridAtomicLong();
/** */
private final GridLuceneDirectory dir;
/** */
private final GridKernalContext ctx;
/**
* Constructor.
*
* @param ctx Kernal context.
* @param cacheName Cache name.
* @param type Type descriptor.
* @throws IgniteCheckedException If failed.
*/
public GridLuceneIndex(GridKernalContext ctx, @Nullable String cacheName, GridQueryTypeDescriptor type)
throws IgniteCheckedException {
this.ctx = ctx;
this.cacheName = cacheName;
this.type = type;
dir = new GridLuceneDirectory(new GridUnsafeMemory(0));
try {
writer = new IndexWriter(dir, new IndexWriterConfig(new StandardAnalyzer()));
}
catch (IOException e) {
throw new IgniteCheckedException(e);
}
GridQueryIndexDescriptor idx = type.textIndex();
if (idx != null) {
Collection<String> fields = idx.fields();
idxdFields = new String[fields.size() + 1];
fields.toArray(idxdFields);
}
else {
assert type.valueTextIndex() || type.valueClass() == String.class;
idxdFields = new String[1];
}
idxdFields[idxdFields.length - 1] = VAL_STR_FIELD_NAME;
}
/**
* @return Cache object context.
*/
private CacheObjectContext objectContext() {
if (ctx == null)
return null;
return ctx.cache().internalCache(cacheName).context().cacheObjectContext();
}
/**
* Stores given data in this fulltext index.
*
* @param k Key.
* @param v Value.
* @param ver Version.
* @param expires Expiration time.
* @throws IgniteCheckedException If failed.
*/
@SuppressWarnings("ConstantConditions")
public void store(CacheObject k, CacheObject v, GridCacheVersion ver, long expires) throws IgniteCheckedException {
CacheObjectContext coctx = objectContext();
Object key = k.isPlatformType() ? k.value(coctx, false) : k;
Object val = v.isPlatformType() ? v.value(coctx, false) : v;
Document doc = new Document();
boolean stringsFound = false;
if (type.valueTextIndex() || type.valueClass() == String.class) {
doc.add(new TextField(VAL_STR_FIELD_NAME, val.toString(), Field.Store.YES));
stringsFound = true;
}
for (int i = 0, last = idxdFields.length - 1; i < last; i++) {
Object fieldVal = type.value(idxdFields[i], key, val);
if (fieldVal != null) {
doc.add(new TextField(idxdFields[i], fieldVal.toString(), Field.Store.YES));
stringsFound = true;
}
}
BytesRef keyByteRef = new BytesRef(k.valueBytes(coctx));
try {
final Term term = new Term(KEY_FIELD_NAME, keyByteRef);
if (!stringsFound) {
writer.deleteDocuments(term);
return; // We did not find any strings to be indexed, will not store data at all.
}
doc.add(new StringField(KEY_FIELD_NAME, keyByteRef, Field.Store.YES));
if (type.valueClass() != String.class)
doc.add(new StoredField(VAL_FIELD_NAME, v.valueBytes(coctx)));
doc.add(new StoredField(VER_FIELD_NAME, ver.toString().getBytes()));
doc.add(new LongPoint(EXPIRATION_TIME_FIELD_NAME, expires));
// Next implies remove than add atomically operation.
writer.updateDocument(term, doc);
}
catch (IOException e) {
throw new IgniteCheckedException(e);
}
finally {
updateCntr.incrementAndGet();
}
}
/**
* Removes entry for given key from this index.
*
* @param key Key.
* @throws IgniteCheckedException If failed.
*/
public void remove(CacheObject key) throws IgniteCheckedException {
try {
writer.deleteDocuments(new Term(KEY_FIELD_NAME,
new BytesRef(key.valueBytes(objectContext()))));
}
catch (IOException e) {
throw new IgniteCheckedException(e);
}
finally {
updateCntr.incrementAndGet();
}
}
/**
* Runs lucene fulltext query over this index.
*
* @param qry Query.
* @param filters Filters over result.
* @return Query result.
* @throws IgniteCheckedException If failed.
*/
public <K, V> GridCloseableIterator<IgniteBiTuple<K, V>> query(String qry,
IndexingQueryFilter filters) throws IgniteCheckedException {
IndexReader reader;
try {
long updates = updateCntr.get();
if (updates != 0) {
writer.commit();
updateCntr.addAndGet(-updates);
}
//We can cache reader\searcher and change this to 'openIfChanged'
reader = DirectoryReader.open(writer);
}
catch (IOException e) {
throw new IgniteCheckedException(e);
}
IndexSearcher searcher;
TopDocs docs;
try {
searcher = new IndexSearcher(reader);
MultiFieldQueryParser parser = new MultiFieldQueryParser(idxdFields,
writer.getAnalyzer());
// parser.setAllowLeadingWildcard(true);
// Filter expired items.
Query filter = LongPoint.newRangeQuery(EXPIRATION_TIME_FIELD_NAME, U.currentTimeMillis(), Long.MAX_VALUE);
BooleanQuery query = new BooleanQuery.Builder()
.add(parser.parse(qry), BooleanClause.Occur.MUST)
.add(filter, BooleanClause.Occur.FILTER)
.build();
docs = searcher.search(query, Integer.MAX_VALUE);
}
catch (Exception e) {
U.closeQuiet(reader);
throw new IgniteCheckedException(e);
}
IndexingQueryCacheFilter fltr = null;
if (filters != null)
fltr = filters.forCache(cacheName);
return new It<>(reader, searcher, docs.scoreDocs, fltr);
}
/** {@inheritDoc} */
@Override public void close() {
U.closeQuiet(writer);
U.close(dir, ctx.log(GridLuceneIndex.class));
}
/**
* Key-value iterator over fulltext search result.
*/
private class It<K, V> extends GridCloseableIteratorAdapter<IgniteBiTuple<K, V>> {
/** */
private static final long serialVersionUID = 0L;
/** */
private final IndexReader reader;
/** */
private final IndexSearcher searcher;
/** */
private final ScoreDoc[] docs;
/** */
private final IndexingQueryCacheFilter filters;
/** */
private int idx;
/** */
private IgniteBiTuple<K, V> curr;
/** */
private CacheObjectContext coctx;
/**
* Constructor.
*
* @param reader Reader.
* @param searcher Searcher.
* @param docs Docs.
* @param filters Filters over result.
* @throws IgniteCheckedException if failed.
*/
private It(IndexReader reader, IndexSearcher searcher, ScoreDoc[] docs, IndexingQueryCacheFilter filters)
throws IgniteCheckedException {
this.reader = reader;
this.searcher = searcher;
this.docs = docs;
this.filters = filters;
coctx = objectContext();
findNext();
}
/**
* @param bytes Bytes.
* @param ldr Class loader.
* @return Object.
* @throws IgniteCheckedException If failed.
*/
@SuppressWarnings("unchecked")
private <Z> Z unmarshall(byte[] bytes, ClassLoader ldr) throws IgniteCheckedException {
if (coctx == null) // For tests.
return (Z)JdbcUtils.deserialize(bytes, null);
return (Z)coctx.kernalContext().cacheObjects().unmarshal(coctx, bytes, ldr);
}
/**
* Finds next element.
*
* @throws IgniteCheckedException If failed.
*/
@SuppressWarnings("unchecked")
private void findNext() throws IgniteCheckedException {
curr = null;
while (idx < docs.length) {
Document doc;
try {
doc = searcher.doc(docs[idx++].doc);
}
catch (IOException e) {
throw new IgniteCheckedException(e);
}
ClassLoader ldr = null;
if (ctx != null && ctx.deploy().enabled())
ldr = ctx.cache().internalCache(cacheName).context().deploy().globalLoader();
K k = unmarshall(doc.getBinaryValue(KEY_FIELD_NAME).bytes, ldr);
if (filters != null && !filters.apply(k))
continue;
V v = type.valueClass() == String.class ?
(V)doc.get(VAL_STR_FIELD_NAME) :
this.<V>unmarshall(doc.getBinaryValue(VAL_FIELD_NAME).bytes, ldr);
assert v != null;
curr = new IgniteBiTuple<>(k, v);
break;
}
}
/** {@inheritDoc} */
@Override protected IgniteBiTuple<K, V> onNext() throws IgniteCheckedException {
IgniteBiTuple<K, V> res = curr;
findNext();
return res;
}
/** {@inheritDoc} */
@Override protected boolean onHasNext() throws IgniteCheckedException {
return curr != null;
}
/** {@inheritDoc} */
@Override protected void onClose() throws IgniteCheckedException {
U.closeQuiet(reader);
}
}
}
| |
package com.swe.zz_deprecated;
import com.jme3.app.Application;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.event.*;
import java.io.File;
import javax.swing.*;
import javax.swing.filechooser.FileFilter;
public class EditorMenuItems extends DeprecatedSwing {
public EditorMenuItems() {
}
protected void menuButtonz() {
final JMenuItem itemOpen = new JMenuItem("Open");
menuTortureMethods.add(itemOpen);
itemOpen.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
}
});
final JMenuItem itemSave = new JMenuItem("Save");
menuTortureMethods.add(itemSave);
itemSave.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
}
});
final JMenuItem itemExit = new JMenuItem("Exit");
menuTortureMethods.add(itemExit);
itemExit.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent ae) {
frame.dispose();
app.stop();
}
});
// JButton loadDiffuseButton = new JButton("Load Diffuse Texture");
// loadDiffuseButton.setSize(200, 20);
// loadDiffuseButton.setPreferredSize(new Dimension(190, 20));
// loadDiffuseButton.setVerticalTextPosition(AbstractButton.CENTER);
// loadDiffuseButton.setHorizontalTextPosition(AbstractButton.LEADING);
// loadDiffuseButton.addActionListener(new ActionListener() {
// public void actionPerformed(ActionEvent e) {
// if (((BBSceneGrid)app).selectedEntity != null && strLst.size() > 0) loadDiffuseTexture();
//
//
// }
// });
// optionPanel.add(loadDiffuseButton);
//
// JButton loadNormalButton = new JButton("Load Normal Texture");
// loadNormalButton.setSize(200, 20);
// loadNormalButton.setPreferredSize(new Dimension(190, 20));
// loadNormalButton.setVerticalTextPosition(AbstractButton.CENTER);
// loadNormalButton.setHorizontalTextPosition(AbstractButton.LEADING);
// loadNormalButton.addActionListener(new ActionListener() {
// public void actionPerformed(ActionEvent e) {
// if (((BBSceneGrid)app).selectedEntity != null && strLst.size() > 0) loadNormalTexture();
// }
// });
// optionPanel.add(loadNormalButton);
//
// optionPanel.add(new JToolBar.Separator());
//
//
// JButton RemoveSelectedModel = new JButton("Remove Selected Model");
// RemoveSelectedModel.setSize(200, 20);
// RemoveSelectedModel.setPreferredSize(new Dimension(190, 20));
// RemoveSelectedModel.setVerticalTextPosition(AbstractButton.CENTER);
// RemoveSelectedModel.setHorizontalTextPosition(AbstractButton.LEADING);
// RemoveSelectedModel.addActionListener(new ActionListener() {
// public void actionPerformed(ActionEvent e) {
//
//
// if (((BBSceneGrid)app).selectedEntity != null || listEntity.getSelectedValue() != null){
// modelGeo.clear();
// listGeo.repaint();
//
// modelEntity.removeElement(((BBSceneGrid)app).selectedEntity);
// listEntity.repaint();
//
// ((BBSceneGrid)app).RemoveSelectedEntity();
// }
// }
// });
// optionPanel.add(RemoveSelectedModel);
//
// JButton clearScene = new JButton("Clear Scene");
// clearScene.setSize(200, 20);
// clearScene.setPreferredSize(new Dimension(190, 20));
// clearScene.setVerticalTextPosition(AbstractButton.CENTER);
// clearScene.setHorizontalTextPosition(AbstractButton.LEADING);
// clearScene.addActionListener(new ActionListener() {
// public void actionPerformed(ActionEvent e) {
// ((BBSceneGrid)app).ClearScene();
// modelEntity.clear();
// listEntity.repaint();
// modelGeo.clear();
// listGeo.repaint();
// }
// });
// optionPanel.add(clearScene);
//
//
// optionPanel.add(new JToolBar.Separator());
//
//
//
// JButton Nor_Inv_X = new JButton("Normal InvertX");
// Nor_Inv_X.setSize(200, 20);
// Nor_Inv_X.setPreferredSize(new Dimension(190, 20));
// Nor_Inv_X.setVerticalTextPosition(AbstractButton.CENTER);
// Nor_Inv_X.setHorizontalTextPosition(AbstractButton.LEADING);
// Nor_Inv_X.addActionListener(new ActionListener() {
// public void actionPerformed(ActionEvent e) {
// if (((BBSceneGrid)app).selectedEntity != null && strLst.size() > 0)
// ((BBSceneGrid)app).setShaderParam("Nor_Inv_X", strLst);
// }
// });
// optionPanel.add(Nor_Inv_X);
//
// JButton Nor_Inv_Y = new JButton("Normal InvertY");
// Nor_Inv_Y.setSize(200, 20);
// Nor_Inv_Y.setPreferredSize(new Dimension(190, 20));
// Nor_Inv_Y.setVerticalTextPosition(AbstractButton.CENTER);
// Nor_Inv_Y.setHorizontalTextPosition(AbstractButton.LEADING);
// Nor_Inv_Y.addActionListener(new ActionListener() {
// public void actionPerformed(ActionEvent e) {
// if (((BBSceneGrid)app).selectedEntity != null && strLst.size() > 0)
// ((BBSceneGrid)app).setShaderParam("Nor_Inv_Y", strLst);
// }
// });
// optionPanel.add(Nor_Inv_Y);
//
// JButton Alpha_A_Dif = new JButton("Alpha Diffuse");
// Alpha_A_Dif.setSize(200, 20);
// Alpha_A_Dif.setPreferredSize(new Dimension(190, 20));
// Alpha_A_Dif.setVerticalTextPosition(AbstractButton.CENTER);
// Alpha_A_Dif.setHorizontalTextPosition(AbstractButton.LEADING);
// Alpha_A_Dif.addActionListener(new ActionListener() {
// public void actionPerformed(ActionEvent e) {
// if (((BBSceneGrid)app).selectedEntity != null && strLst.size() > 0)
// ((BBSceneGrid)app).setShaderParam("Alpha_A_Dif", strLst);
// }
// });
// optionPanel.add(Alpha_A_Dif);
//
// JButton EmissiveMap = new JButton("Emissive Alpha Diffuse");
// EmissiveMap.setSize(200, 20);
// EmissiveMap.setPreferredSize(new Dimension(190, 20));
// EmissiveMap.setVerticalTextPosition(AbstractButton.CENTER);
// EmissiveMap.setHorizontalTextPosition(AbstractButton.LEADING);
// EmissiveMap.addActionListener(new ActionListener() {
// public void actionPerformed(ActionEvent e) {
// if (((BBSceneGrid)app).selectedEntity != null && strLst.size() > 0)
// ((BBSceneGrid)app).setShaderParam("EmissiveMap", strLst);
// }
// });
// optionPanel.add(EmissiveMap);
//
// JButton Spec_A_Nor = new JButton("Specular Normal");
// Spec_A_Nor.setSize(200, 20);
// Spec_A_Nor.setPreferredSize(new Dimension(190, 20));
// Spec_A_Nor.setVerticalTextPosition(AbstractButton.CENTER);
// Spec_A_Nor.setHorizontalTextPosition(AbstractButton.LEADING);
// Spec_A_Nor.addActionListener(new ActionListener() {
// public void actionPerformed(ActionEvent e) {
// if (((BBSceneGrid)app).selectedEntity != null && strLst.size() > 0)
// ((BBSceneGrid)app).setShaderParam("Spec_A_Nor", strLst);
// }
// });
// optionPanel.add(Spec_A_Nor);
//
// JButton Spec_A_Dif = new JButton("Specular Diffuse");
// Spec_A_Dif.setSize(200, 20);
// Spec_A_Dif.setPreferredSize(new Dimension(190, 20));
// Spec_A_Dif.setVerticalTextPosition(AbstractButton.CENTER);
// Spec_A_Dif.setHorizontalTextPosition(AbstractButton.LEADING);
// Spec_A_Dif.addActionListener(new ActionListener() {
// public void actionPerformed(ActionEvent e) {
// if (((BBSceneGrid)app).selectedEntity != null && strLst.size() > 0)
// ((BBSceneGrid)app).setShaderParam("Spec_A_Dif", strLst);
// }
// });
// optionPanel.add(Spec_A_Dif);
}
protected void panelButtonz(Container cnt){
JButton loadModelButton = new JButton("Load Model");
loadModelButton.setSize(200, 20);
loadModelButton.setPreferredSize(new Dimension(190, 20));
loadModelButton.setVerticalTextPosition(AbstractButton.CENTER);
loadModelButton.setHorizontalTextPosition(AbstractButton.LEADING);
loadModelButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
// loadModelFromFile();
}
});
cnt.add(loadModelButton);
// JMenuItem loadModelButton = new JMenuItem("Load Model");
// cnt.add(loadModelButton);
// loadModelButton.addActionListener(new ActionListener() {
// public void actionPerformed(ActionEvent e) {
//// loadModelFromFile();
// }
// });
}
// private void loadModelFromFile(){
// mFileCm.setFileFilter(modFilter);
// int returnVal = mFileCm.showOpenDialog(null);
//
// if (returnVal == JFileChooser.APPROVE_OPTION) {
// File file = mFileCm.getSelectedFile();
//// try{
//// mLogArea.append("Loading file : " + file.getCanonicalPath() +"\n");
//// ((BBSceneGrid)app).loadExternalModel(file.getName(), file.getParent());
////
//// //Load Entity list
////
//// modelEntity.addElement(((BBSceneGrid)app).selectedEntity);
//// listEntity.repaint();
//// // Load Geometries list
//// modelGeo.clear();
////
//// for (int i=0; i<BBWorldManager.getInstance().getEntity(((BBSceneGrid)app).selectedEntity).getAllGeometries().toArray().length; i++) {
//// Geometry geo = BBWorldManager.getInstance().getEntity(((BBSceneGrid)app).selectedEntity).getAllGeometries().get(i);
//// geo.setUserData("Model", file.getName());
//// modelGeo.add(i, geo.getName());
//// }
////
//// listGeo.repaint();
//// strLst.clear();
////
//// }catch (IOException ex){}
//
// }
//
// mFileCm.setSelectedFile(null);
// }
// private void loadDiffuseTexture(){
// mFileCm.setFileFilter(texFilter);
// int returnVal = mFileCm.showOpenDialog(null);
//
// if (returnVal == JFileChooser.APPROVE_OPTION) {
// File file = mFileCm.getSelectedFile();
//// mFileCt.setCurrentDirectory(file);
// try{
// mLogArea.append("Loading file : " + file.getCanonicalPath() +"\n");
//
// if (strLst.size() > 0 && ((BBSceneGrid)app).selectedEntity != null) {
//
// ((BBSceneGrid)app).loadTexture("DiffuseMap", file.getName(), file.getParent(), strLst);
// }
// }catch (IOException ex){}
// }
//
// mFileCm.setSelectedFile(null);
// }
//
// private void loadNormalTexture(){
// mFileCm.setFileFilter(texFilter);
// int returnVal = mFileCm.showOpenDialog(null);
//
// if (returnVal == JFileChooser.APPROVE_OPTION) {
// File file = mFileCm.getSelectedFile();
//// mFileCt.setCurrentDirectory(file);
// try{
// mLogArea.append("Loading file : " + file.getCanonicalPath() +"\n");
//
//
// if (strLst.size() > 0 && ((BBSceneGrid)app).selectedEntity != null) {
//
// ((BBSceneGrid)app).loadTexture("NormalMap", file.getName(), file.getParent(), strLst);
//
// }
// }catch (IOException ex){}
// }
//
// mFileCm.setSelectedFile(null);
// }
}
| |
/*
* Copyright (C) 2014 Google, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dagger.internal.codegen;
import com.google.auto.value.processor.AutoAnnotationProcessor;
import com.google.common.collect.ImmutableList;
import com.google.testing.compile.JavaFileObjects;
import javax.tools.JavaFileObject;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import static com.google.common.truth.Truth.assert_;
import static com.google.testing.compile.JavaSourcesSubjectFactory.javaSources;
@RunWith(JUnit4.class)
public class MapKeyProcessorTest {
@Test
public void mapKeyCreatorFile() {
JavaFileObject enumKeyFile = JavaFileObjects.forSourceLines("test.PathKey",
"package test;",
"import dagger.MapKey;",
"import java.lang.annotation.Retention;",
"import static java.lang.annotation.RetentionPolicy.RUNTIME;",
"",
"@MapKey(unwrapValue = false)",
"@Retention(RUNTIME)",
"public @interface PathKey {",
" PathEnum value();",
" String relativePath() default \"Defaultpath\";",
"}");
JavaFileObject pathEnumFile = JavaFileObjects.forSourceLines("test.PathEnum",
"package test;",
"",
"public enum PathEnum {",
" ADMIN,",
" LOGIN;",
"}");
JavaFileObject generatedKeyCreator = JavaFileObjects.forSourceLines("test.PathKeyCreator",
"package test;",
"",
"import javax.annotation.Generated;",
"",
"@Generated(\"dagger.internal.codegen.ComponentProcessor\")",
"public final class PathKeyCreator {",
" @com.google.auto.value.AutoAnnotation",
" public static PathKey createPathKey(PathEnum value, String relativePath) {",
" return new AutoAnnotation_PathKeyCreator_createPathKey(value, relativePath);",
" }",
"}");
assert_().about(javaSources())
.that(ImmutableList.of(
enumKeyFile,
pathEnumFile))
.processedWith(new ComponentProcessor(), new AutoAnnotationProcessor())
.compilesWithoutError()
.and()
.generatesSources(generatedKeyCreator);
}
@Test
public void nestedMapKeyCreatorFile() {
JavaFileObject enumKeyFile = JavaFileObjects.forSourceLines("test.Container",
"package test;",
"import dagger.MapKey;",
"import java.lang.annotation.Retention;",
"import static java.lang.annotation.RetentionPolicy.RUNTIME;",
"",
"public interface Container {",
"@MapKey(unwrapValue = false)",
"@Retention(RUNTIME)",
"public @interface PathKey {",
" PathEnum value();",
" String relativePath() default \"Defaultpath\";",
"}",
"}");
JavaFileObject pathEnumFile = JavaFileObjects.forSourceLines("test.PathEnum",
"package test;",
"",
"public enum PathEnum {",
" ADMIN,",
" LOGIN;",
"}");
JavaFileObject generatedKeyCreator =
JavaFileObjects.forSourceLines("test.Container$PathKeyCreator",
"package test;",
"",
"import javax.annotation.Generated;",
"import test.Container.PathKey",
"",
"@Generated(\"dagger.internal.codegen.ComponentProcessor\")",
"public final class Container$PathKeyCreator {",
" @com.google.auto.value.AutoAnnotation",
" public static PathKey createPathKey(PathEnum value, String relativePath) {",
" return new AutoAnnotation_Container$PathKeyCreator_createPathKey(",
" value, relativePath);",
" }",
"}");
assert_().about(javaSources())
.that(ImmutableList.of(
enumKeyFile,
pathEnumFile))
.processedWith(new ComponentProcessor(), new AutoAnnotationProcessor())
.compilesWithoutError()
.and()
.generatesSources(generatedKeyCreator);
}
@Test
public void mapKeyComponentFileWithDisorderedKeyField() {
JavaFileObject mapModuleOneFile = JavaFileObjects.forSourceLines("test.MapModuleOne",
"package test;",
"",
"import static dagger.Provides.Type.MAP;",
"",
"import dagger.Module;",
"import dagger.Provides;",
"",
"@Module",
"final class MapModuleOne {",
" @Provides(type = MAP) @PathKey(relativePath = \"AdminPath\", value = PathEnum.ADMIN)",
" Handler provideAdminHandler() {",
" return new AdminHandler();",
" }",
"}");
JavaFileObject mapModuleTwoFile =JavaFileObjects.forSourceLines("test.MapModuleTwo",
"package test;",
"",
"import static dagger.Provides.Type.MAP;",
"",
"import dagger.Module;",
"import dagger.Provides;",
"",
"@Module",
"final class MapModuleTwo {",
" @Provides(type = MAP) @PathKey(value = PathEnum.LOGIN, relativePath = \"LoginPath\")",
" Handler provideLoginHandler() {",
" return new LoginHandler();",
" }",
"}");
JavaFileObject enumKeyFile = JavaFileObjects.forSourceLines("test.PathKey",
"package test;",
"import dagger.MapKey;",
"import java.lang.annotation.Retention;",
"import static java.lang.annotation.RetentionPolicy.RUNTIME;",
"",
"@MapKey(unwrapValue = false)",
"@Retention(RUNTIME)",
"public @interface PathKey {",
" PathEnum value();",
" String relativePath() default \"DefaultPath\";",
"}");
JavaFileObject pathEnumFile = JavaFileObjects.forSourceLines("test.PathEnum",
"package test;",
"",
"public enum PathEnum {",
" ADMIN,",
" LOGIN;",
"}");
JavaFileObject handlerFile = JavaFileObjects.forSourceLines("test.Handler",
"package test;",
"",
"interface Handler {}");
JavaFileObject loginHandlerFile = JavaFileObjects.forSourceLines("test.LoginHandler",
"package test;",
"",
"class LoginHandler implements Handler {",
" public LoginHandler() {}",
"}");
JavaFileObject adminHandlerFile = JavaFileObjects.forSourceLines("test.AdminHandler",
"package test;",
"",
"class AdminHandler implements Handler {",
" public AdminHandler() {}",
"}");
JavaFileObject componentFile = JavaFileObjects.forSourceLines("test.TestComponent",
"package test;",
"",
"import dagger.Component;",
"import java.util.Map;",
"import javax.inject.Provider;",
"",
"@Component(modules = {MapModuleOne.class, MapModuleTwo.class})",
"interface TestComponent {",
" Map<PathKey, Provider<Handler>> dispatcher();",
"}");
JavaFileObject generatedComponent = JavaFileObjects.forSourceLines("test.DaggerTestComponent",
"package test;",
"",
"import dagger.internal.MapProviderFactory;",
"import java.util.Map;",
"import javax.annotation.Generated;",
"import javax.inject.Provider;",
"",
"@Generated(\"dagger.internal.codegen.ComponentProcessor\")",
"public final class DaggerTestComponent implements TestComponent {",
" private Provider<Handler> mapOfPathKeyAndProviderOfHandlerContribution1;",
" private Provider<Handler> mapOfPathKeyAndProviderOfHandlerContribution2;",
" private Provider<Map<PathKey, Provider<Handler>>>",
" mapOfPathKeyAndProviderOfHandlerProvider;",
"",
" private DaggerTestComponent(Builder builder) {",
" assert builder != null;",
" initialize(builder);",
" }",
"",
" public static Builder builder() {",
" return new Builder();",
" }",
"",
" public static TestComponent create() {",
" return builder().build();",
" }",
"",
" private void initialize(final Builder builder) {",
" this.mapOfPathKeyAndProviderOfHandlerContribution1 =",
" MapModuleOne_ProvideAdminHandlerFactory.create(builder.mapModuleOne);",
" this.mapOfPathKeyAndProviderOfHandlerContribution2 =",
" MapModuleTwo_ProvideLoginHandlerFactory.create(builder.mapModuleTwo);",
" this.mapOfPathKeyAndProviderOfHandlerProvider =",
" MapProviderFactory.<PathKey, Handler>builder(2)",
" .put(PathKeyCreator.createPathKey(PathEnum.ADMIN, \"AdminPath\"),",
" mapOfPathKeyAndProviderOfHandlerContribution1)",
" .put(PathKeyCreator.createPathKey(PathEnum.LOGIN, \"LoginPath\"),",
" mapOfPathKeyAndProviderOfHandlerContribution2)",
" .build();",
" }",
"",
" @Override",
" public Map<PathKey, Provider<Handler>> dispatcher() {",
" return mapOfPathKeyAndProviderOfHandlerProvider.get();",
" }",
"",
" public static final class Builder {",
" private MapModuleOne mapModuleOne;",
" private MapModuleTwo mapModuleTwo;",
"",
" private Builder() {",
" }",
"",
" public TestComponent build() {",
" if (mapModuleOne == null) {",
" this.mapModuleOne = new MapModuleOne();",
" }",
" if (mapModuleTwo == null) {",
" this.mapModuleTwo = new MapModuleTwo();",
" }",
" return new DaggerTestComponent(this);",
" }",
"",
" public Builder mapModuleOne(MapModuleOne mapModuleOne) {",
" if (mapModuleOne == null) {",
" throw new NullPointerException(\"mapModuleOne\");",
" }",
" this.mapModuleOne = mapModuleOne;",
" return this;",
" }",
"",
" public Builder mapModuleTwo(MapModuleTwo mapModuleTwo) {",
" if (mapModuleTwo == null) {",
" throw new NullPointerException(\"mapModuleTwo\");",
" }",
" this.mapModuleTwo = mapModuleTwo;",
" return this;",
" }",
" }",
"}");
assert_().about(javaSources())
.that(ImmutableList.of(
mapModuleOneFile,
mapModuleTwoFile,
enumKeyFile,
pathEnumFile,
handlerFile,
loginHandlerFile,
adminHandlerFile,
componentFile))
.processedWith(new ComponentProcessor(), new AutoAnnotationProcessor())
.compilesWithoutError()
.and()
.generatesSources(generatedComponent);
}
@Test
public void mapKeyComponentFileWithDefaultField() {
JavaFileObject mapModuleOneFile = JavaFileObjects.forSourceLines("test.MapModuleOne",
"package test;",
"",
"import static dagger.Provides.Type.MAP;",
"",
"import dagger.Module;",
"import dagger.Provides;",
"",
"@Module",
"final class MapModuleOne {",
" @Provides(type = MAP) @PathKey(value = PathEnum.ADMIN) Handler provideAdminHandler() {",
" return new AdminHandler();",
" }",
"}");
JavaFileObject mapModuleTwoFile =JavaFileObjects.forSourceLines("test.MapModuleTwo",
"package test;",
"",
"import static dagger.Provides.Type.MAP;",
"",
"import dagger.Module;",
"import dagger.Provides;",
"",
"@Module",
"final class MapModuleTwo {",
" @Provides(type = MAP) @PathKey(value = PathEnum.LOGIN, relativePath = \"LoginPath\")",
" Handler provideLoginHandler() {",
" return new LoginHandler();",
" }",
"}");
JavaFileObject enumKeyFile = JavaFileObjects.forSourceLines("test.PathKey",
"package test;",
"import dagger.MapKey;",
"import java.lang.annotation.Retention;",
"import static java.lang.annotation.RetentionPolicy.RUNTIME;",
"",
"@MapKey(unwrapValue = false)",
"@Retention(RUNTIME)",
"public @interface PathKey {",
" PathEnum value();",
" String relativePath() default \"DefaultPath\";",
"}");
JavaFileObject pathEnumFile = JavaFileObjects.forSourceLines("test.PathEnum",
"package test;",
"",
"public enum PathEnum {",
" ADMIN,",
" LOGIN;",
"}");
JavaFileObject handlerFile = JavaFileObjects.forSourceLines("test.Handler",
"package test;",
"",
"interface Handler {}");
JavaFileObject loginHandlerFile = JavaFileObjects.forSourceLines("test.LoginHandler",
"package test;",
"",
"class LoginHandler implements Handler {",
" public LoginHandler() {}",
"}");
JavaFileObject adminHandlerFile = JavaFileObjects.forSourceLines("test.AdminHandler",
"package test;",
"",
"class AdminHandler implements Handler {",
" public AdminHandler() {}",
"}");
JavaFileObject componentFile = JavaFileObjects.forSourceLines("test.TestComponent",
"package test;",
"",
"import dagger.Component;",
"import java.util.Map;",
"import javax.inject.Provider;",
"",
"@Component(modules = {MapModuleOne.class, MapModuleTwo.class})",
"interface TestComponent {",
" Map<PathKey, Provider<Handler>> dispatcher();",
"}");
JavaFileObject generatedComponent = JavaFileObjects.forSourceLines("test.DaggerTestComponent",
"package test;",
"",
"import dagger.internal.MapProviderFactory;",
"import java.util.Map;",
"import javax.annotation.Generated;",
"import javax.inject.Provider;",
"",
"@Generated(\"dagger.internal.codegen.ComponentProcessor\")",
"public final class DaggerTestComponent implements TestComponent {",
" private Provider<Handler> mapOfPathKeyAndProviderOfHandlerContribution1;",
" private Provider<Handler> mapOfPathKeyAndProviderOfHandlerContribution2;",
" private Provider<Map<PathKey, Provider<Handler>>>",
" mapOfPathKeyAndProviderOfHandlerProvider;",
"",
" private DaggerTestComponent(Builder builder) {",
" assert builder != null;",
" initialize(builder);",
" }",
"",
" public static Builder builder() {",
" return new Builder();",
" }",
"",
" public static TestComponent create() {",
" return builder().build();",
" }",
"",
" private void initialize(final Builder builder) {",
" this.mapOfPathKeyAndProviderOfHandlerContribution1 =",
" MapModuleOne_ProvideAdminHandlerFactory.create(builder.mapModuleOne);",
" this.mapOfPathKeyAndProviderOfHandlerContribution2 =",
" MapModuleTwo_ProvideLoginHandlerFactory.create(builder.mapModuleTwo);",
" this.mapOfPathKeyAndProviderOfHandlerProvider =",
" MapProviderFactory.<PathKey, Handler>builder(2)",
" .put(PathKeyCreator.createPathKey(PathEnum.ADMIN, \"DefaultPath\"),",
" mapOfPathKeyAndProviderOfHandlerContribution1)",
" .put(PathKeyCreator.createPathKey(PathEnum.LOGIN, \"LoginPath\"),",
" mapOfPathKeyAndProviderOfHandlerContribution2)",
" .build();",
" }",
"",
" @Override",
" public Map<PathKey, Provider<Handler>> dispatcher() {",
" return mapOfPathKeyAndProviderOfHandlerProvider.get();",
" }",
"",
" public static final class Builder {",
" private MapModuleOne mapModuleOne;",
" private MapModuleTwo mapModuleTwo;",
"",
" private Builder() {",
" }",
"",
" public TestComponent build() {",
" if (mapModuleOne == null) {",
" this.mapModuleOne = new MapModuleOne();",
" }",
" if (mapModuleTwo == null) {",
" this.mapModuleTwo = new MapModuleTwo();",
" }",
" return new DaggerTestComponent(this);",
" }",
"",
" public Builder mapModuleOne(MapModuleOne mapModuleOne) {",
" if (mapModuleOne == null) {",
" throw new NullPointerException(\"mapModuleOne\");",
" }",
" this.mapModuleOne = mapModuleOne;",
" return this;",
" }",
"",
" public Builder mapModuleTwo(MapModuleTwo mapModuleTwo) {",
" if (mapModuleTwo == null) {",
" throw new NullPointerException(\"mapModuleTwo\");",
" }",
" this.mapModuleTwo = mapModuleTwo;",
" return this;",
" }",
" }",
"}");
assert_().about(javaSources())
.that(ImmutableList.of(
mapModuleOneFile,
mapModuleTwoFile,
enumKeyFile,
pathEnumFile,
handlerFile,
loginHandlerFile,
adminHandlerFile,
componentFile))
.processedWith(new ComponentProcessor(), new AutoAnnotationProcessor())
.compilesWithoutError()
.and()
.generatesSources(generatedComponent);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
* to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.kafka.clients.producer.internals;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.kafka.clients.ClientRequest;
import org.apache.kafka.clients.ClientResponse;
import org.apache.kafka.clients.KafkaClient;
import org.apache.kafka.common.Cluster;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.InvalidMetadataException;
import org.apache.kafka.common.errors.RetriableException;
import org.apache.kafka.common.metrics.Measurable;
import org.apache.kafka.common.metrics.MetricConfig;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.metrics.Sensor;
import org.apache.kafka.common.metrics.stats.Avg;
import org.apache.kafka.common.metrics.stats.Max;
import org.apache.kafka.common.metrics.stats.Rate;
import org.apache.kafka.common.protocol.ApiKeys;
import org.apache.kafka.common.protocol.Errors;
import org.apache.kafka.common.requests.ProduceRequest;
import org.apache.kafka.common.requests.ProduceResponse;
import org.apache.kafka.common.requests.RequestSend;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.common.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The background thread that handles the sending of produce requests to the Kafka cluster. This thread makes metadata
* requests to renew its view of the cluster and then sends produce requests to the appropriate nodes.
*/
public class Sender implements Runnable {
private static final Logger log = LoggerFactory.getLogger(Sender.class);
/* the state of each nodes connection */
private final KafkaClient client;
/* the record accumulator that batches records */
private final RecordAccumulator accumulator;
/* the metadata for the client */
private final Metadata metadata;
/* the maximum request size to attempt to send to the server */
private final int maxRequestSize;
/* the number of acknowledgements to request from the server */
private final short acks;
/* the max time in ms for the server to wait for acknowlegements */
private final int requestTimeout;
/* the number of times to retry a failed request before giving up */
private final int retries;
/* the clock instance used for getting the time */
private final Time time;
/* true while the sender thread is still running */
private volatile boolean running;
/* metrics */
private final SenderMetrics sensors;
/* param clientId of the client */
private String clientId;
public Sender(KafkaClient client,
Metadata metadata,
RecordAccumulator accumulator,
int maxRequestSize,
short acks,
int retries,
int requestTimeout,
Metrics metrics,
Time time,
String clientId) {
this.client = client;
this.accumulator = accumulator;
this.metadata = metadata;
this.maxRequestSize = maxRequestSize;
this.running = true;
this.requestTimeout = requestTimeout;
this.acks = acks;
this.retries = retries;
this.time = time;
this.clientId = clientId;
this.sensors = new SenderMetrics(metrics);
}
/**
* The main run loop for the sender thread
*/
public void run() {
log.debug("Starting Kafka producer I/O thread.");
// main loop, runs until close is called
while (running) {
try {
run(time.milliseconds());
} catch (Exception e) {
log.error("Uncaught error in kafka producer I/O thread: ", e);
}
}
log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records.");
// okay we stopped accepting requests but there may still be
// requests in the accumulator or waiting for acknowledgment,
// wait until these are completed.
while (this.accumulator.hasUnsent() || this.client.inFlightRequestCount() > 0) {
try {
run(time.milliseconds());
} catch (Exception e) {
log.error("Uncaught error in kafka producer I/O thread: ", e);
}
}
this.client.close();
log.debug("Shutdown of Kafka producer I/O thread has completed.");
}
/**
* Run a single iteration of sending
*
* @param now The current POSIX time in milliseconds
*/
public void run(long now) {
Cluster cluster = metadata.fetch();
// get the list of partitions with data ready to send
RecordAccumulator.ReadyCheckResult result = this.accumulator.ready(cluster, now);
// if there are any partitions whose leaders are not known yet, force metadata update
if (result.unknownLeadersExist)
this.metadata.requestUpdate();
// remove any nodes we aren't ready to send to
Iterator<Node> iter = result.readyNodes.iterator();
long notReadyTimeout = Long.MAX_VALUE;
while (iter.hasNext()) {
Node node = iter.next();
if (!this.client.ready(node, now)) {
iter.remove();
notReadyTimeout = Math.min(notReadyTimeout, this.client.connectionDelay(node, now));
}
}
// create produce requests
Map<Integer, List<RecordBatch>> batches = this.accumulator.drain(cluster, result.readyNodes, this.maxRequestSize, now);
List<ClientRequest> requests = createProduceRequests(batches, now);
sensors.updateProduceRequestMetrics(requests);
// If we have any nodes that are ready to send + have sendable data, poll with 0 timeout so this can immediately
// loop and try sending more data. Otherwise, the timeout is determined by nodes that have partitions with data
// that isn't yet sendable (e.g. lingering, backing off). Note that this specifically does not include nodes
// with sendable data that aren't ready to send since they would cause busy looping.
long pollTimeout = Math.min(result.nextReadyCheckDelayMs, notReadyTimeout);
if (result.readyNodes.size() > 0) {
log.trace("Nodes with data ready to send: {}", result.readyNodes);
log.trace("Created {} produce requests: {}", requests.size(), requests);
pollTimeout = 0;
}
// if some partitions are already ready to be sent, the select time would be 0;
// otherwise if some partition already has some data accumulated but not ready yet,
// the select time will be the time difference between now and its linger expiry time;
// otherwise the select time will be the time difference between now and the metadata expiry time;
List<ClientResponse> responses = this.client.poll(requests, pollTimeout, now);
for (ClientResponse response : responses) {
if (response.wasDisconnected())
handleDisconnect(response, now);
else
handleResponse(response, now);
}
}
/**
* Start closing the sender (won't actually complete until all data is sent out)
*/
public void initiateClose() {
this.running = false;
this.accumulator.close();
this.wakeup();
}
private void handleDisconnect(ClientResponse response, long now) {
log.trace("Cancelled request {} due to node {} being disconnected", response, response.request().request().destination());
int correlation = response.request().request().header().correlationId();
@SuppressWarnings("unchecked")
Map<TopicPartition, RecordBatch> responseBatches = (Map<TopicPartition, RecordBatch>) response.request().attachment();
for (RecordBatch batch : responseBatches.values())
completeBatch(batch, Errors.NETWORK_EXCEPTION, -1L, correlation, now);
}
/**
* Handle a produce response
*/
private void handleResponse(ClientResponse response, long now) {
int correlationId = response.request().request().header().correlationId();
log.trace("Received produce response from node {} with correlation id {}",
response.request().request().destination(),
correlationId);
@SuppressWarnings("unchecked")
Map<TopicPartition, RecordBatch> batches = (Map<TopicPartition, RecordBatch>) response.request().attachment();
// if we have a response, parse it
if (response.hasResponse()) {
ProduceResponse produceResponse = new ProduceResponse(response.responseBody());
for (Map.Entry<TopicPartition, ProduceResponse.PartitionResponse> entry : produceResponse.responses().entrySet()) {
TopicPartition tp = entry.getKey();
ProduceResponse.PartitionResponse partResp = entry.getValue();
Errors error = Errors.forCode(partResp.errorCode);
RecordBatch batch = batches.get(tp);
completeBatch(batch, error, partResp.baseOffset, correlationId, now);
}
this.sensors.recordLatency(response.request().request().destination(), response.requestLatencyMs());
} else {
// this is the acks = 0 case, just complete all requests
for (RecordBatch batch : batches.values())
completeBatch(batch, Errors.NONE, -1L, correlationId, now);
}
}
/**
* Complete or retry the given batch of records.
* @param batch The record batch
* @param error The error (or null if none)
* @param baseOffset The base offset assigned to the records if successful
* @param correlationId The correlation id for the request
* @param now The current POSIX time stamp in milliseconds
*/
private void completeBatch(RecordBatch batch, Errors error, long baseOffset, long correlationId, long now) {
if (error != Errors.NONE && canRetry(batch, error)) {
// retry
log.warn("Got error produce response with correlation id {} on topic-partition {}, retrying ({} attempts left). Error: {}",
correlationId,
batch.topicPartition,
this.retries - batch.attempts - 1,
error);
this.accumulator.reenqueue(batch, now);
this.sensors.recordRetries(batch.topicPartition.topic(), batch.recordCount);
} else {
// tell the user the result of their request
batch.done(baseOffset, error.exception());
this.accumulator.deallocate(batch);
if (error != Errors.NONE)
this.sensors.recordErrors(batch.topicPartition.topic(), batch.recordCount);
}
if (error.exception() instanceof InvalidMetadataException)
metadata.requestUpdate();
}
/**
* We can retry a send if the error is transient and the number of attempts taken is fewer than the maximum allowed
*/
private boolean canRetry(RecordBatch batch, Errors error) {
return batch.attempts < this.retries && error.exception() instanceof RetriableException;
}
/**
* Transfer the record batches into a list of produce requests on a per-node basis
*/
private List<ClientRequest> createProduceRequests(Map<Integer, List<RecordBatch>> collated, long now) {
List<ClientRequest> requests = new ArrayList<ClientRequest>(collated.size());
for (Map.Entry<Integer, List<RecordBatch>> entry : collated.entrySet())
requests.add(produceRequest(now, entry.getKey(), acks, requestTimeout, entry.getValue()));
return requests;
}
/**
* Create a produce request from the given record batches
*/
private ClientRequest produceRequest(long now, int destination, short acks, int timeout, List<RecordBatch> batches) {
Map<TopicPartition, ByteBuffer> produceRecordsByPartition = new HashMap<TopicPartition, ByteBuffer>(batches.size());
Map<TopicPartition, RecordBatch> recordsByPartition = new HashMap<TopicPartition, RecordBatch>(batches.size());
for (RecordBatch batch : batches) {
TopicPartition tp = batch.topicPartition;
ByteBuffer recordsBuffer = batch.records.buffer();
recordsBuffer.flip();
produceRecordsByPartition.put(tp, recordsBuffer);
recordsByPartition.put(tp, batch);
}
ProduceRequest request = new ProduceRequest(acks, timeout, produceRecordsByPartition);
RequestSend send = new RequestSend(destination, this.client.nextRequestHeader(ApiKeys.PRODUCE), request.toStruct());
return new ClientRequest(now, acks != 0, send, recordsByPartition);
}
/**
* Wake up the selector associated with this send thread
*/
public void wakeup() {
this.client.wakeup();
}
/**
* A collection of sensors for the sender
*/
private class SenderMetrics {
private final Metrics metrics;
public final Sensor retrySensor;
public final Sensor errorSensor;
public final Sensor queueTimeSensor;
public final Sensor requestTimeSensor;
public final Sensor recordsPerRequestSensor;
public final Sensor batchSizeSensor;
public final Sensor compressionRateSensor;
public final Sensor maxRecordSizeSensor;
public SenderMetrics(Metrics metrics) {
this.metrics = metrics;
Map<String, String> metricTags = new LinkedHashMap<String, String>();
metricTags.put("client-id", clientId);
String metricGrpName = "producer-metrics";
this.batchSizeSensor = metrics.sensor("batch-size");
MetricName m = new MetricName("batch-size-avg", metricGrpName, "The average number of bytes sent per partition per-request.", metricTags);
this.batchSizeSensor.add(m, new Avg());
m = new MetricName("batch-size-max", metricGrpName, "The max number of bytes sent per partition per-request.", metricTags);
this.batchSizeSensor.add(m, new Max());
this.compressionRateSensor = metrics.sensor("compression-rate");
m = new MetricName("compression-rate-avg", metricGrpName, "The average compression rate of record batches.", metricTags);
this.compressionRateSensor.add(m, new Avg());
this.queueTimeSensor = metrics.sensor("queue-time");
m = new MetricName("record-queue-time-avg", metricGrpName, "The average time in ms record batches spent in the record accumulator.", metricTags);
this.queueTimeSensor.add(m, new Avg());
m = new MetricName("record-queue-time-max", metricGrpName, "The maximum time in ms record batches spent in the record accumulator.", metricTags);
this.queueTimeSensor.add(m, new Max());
this.requestTimeSensor = metrics.sensor("request-time");
m = new MetricName("request-latency-avg", metricGrpName, "The average request latency in ms", metricTags);
this.requestTimeSensor.add(m, new Avg());
m = new MetricName("request-latency-max", metricGrpName, "The maximum request latency in ms", metricTags);
this.requestTimeSensor.add(m, new Max());
this.recordsPerRequestSensor = metrics.sensor("records-per-request");
m = new MetricName("record-send-rate", metricGrpName, "The average number of records sent per second.", metricTags);
this.recordsPerRequestSensor.add(m, new Rate());
m = new MetricName("records-per-request-avg", metricGrpName, "The average number of records per request.", metricTags);
this.recordsPerRequestSensor.add(m, new Avg());
this.retrySensor = metrics.sensor("record-retries");
m = new MetricName("record-retry-rate", metricGrpName, "The average per-second number of retried record sends", metricTags);
this.retrySensor.add(m, new Rate());
this.errorSensor = metrics.sensor("errors");
m = new MetricName("record-error-rate", metricGrpName, "The average per-second number of record sends that resulted in errors", metricTags);
this.errorSensor.add(m, new Rate());
this.maxRecordSizeSensor = metrics.sensor("record-size-max");
m = new MetricName("record-size-max", metricGrpName, "The maximum record size", metricTags);
this.maxRecordSizeSensor.add(m, new Max());
m = new MetricName("record-size-avg", metricGrpName, "The average record size", metricTags);
this.maxRecordSizeSensor.add(m, new Avg());
m = new MetricName("requests-in-flight", metricGrpName, "The current number of in-flight requests awaiting a response.", metricTags);
this.metrics.addMetric(m, new Measurable() {
public double measure(MetricConfig config, long now) {
return client.inFlightRequestCount();
}
});
m = new MetricName("metadata-age", metricGrpName, "The age in seconds of the current producer metadata being used.", metricTags);
metrics.addMetric(m, new Measurable() {
public double measure(MetricConfig config, long now) {
return (now - metadata.lastUpdate()) / 1000.0;
}
});
}
public void maybeRegisterTopicMetrics(String topic) {
// if one sensor of the metrics has been registered for the topic,
// then all other sensors should have been registered; and vice versa
String topicRecordsCountName = "topic." + topic + ".records-per-batch";
Sensor topicRecordCount = this.metrics.getSensor(topicRecordsCountName);
if (topicRecordCount == null) {
Map<String, String> metricTags = new LinkedHashMap<String, String>();
metricTags.put("client-id", clientId);
metricTags.put("topic", topic);
String metricGrpName = "producer-topic-metrics";
topicRecordCount = this.metrics.sensor(topicRecordsCountName);
MetricName m = new MetricName("record-send-rate", metricGrpName , metricTags);
topicRecordCount.add(m, new Rate());
String topicByteRateName = "topic." + topic + ".bytes";
Sensor topicByteRate = this.metrics.sensor(topicByteRateName);
m = new MetricName("byte-rate", metricGrpName , metricTags);
topicByteRate.add(m, new Rate());
String topicCompressionRateName = "topic." + topic + ".compression-rate";
Sensor topicCompressionRate = this.metrics.sensor(topicCompressionRateName);
m = new MetricName("compression-rate", metricGrpName , metricTags);
topicCompressionRate.add(m, new Avg());
String topicRetryName = "topic." + topic + ".record-retries";
Sensor topicRetrySensor = this.metrics.sensor(topicRetryName);
m = new MetricName("record-retry-rate", metricGrpName , metricTags);
topicRetrySensor.add(m, new Rate());
String topicErrorName = "topic." + topic + ".record-errors";
Sensor topicErrorSensor = this.metrics.sensor(topicErrorName);
m = new MetricName("record-error-rate", metricGrpName , metricTags);
topicErrorSensor.add(m, new Rate());
}
}
public void updateProduceRequestMetrics(List<ClientRequest> requests) {
long now = time.milliseconds();
for (int i = 0; i < requests.size(); i++) {
ClientRequest request = requests.get(i);
int records = 0;
if (request.attachment() != null) {
Map<TopicPartition, RecordBatch> responseBatches = (Map<TopicPartition, RecordBatch>) request.attachment();
for (RecordBatch batch : responseBatches.values()) {
// register all per-topic metrics at once
String topic = batch.topicPartition.topic();
maybeRegisterTopicMetrics(topic);
// per-topic record send rate
String topicRecordsCountName = "topic." + topic + ".records-per-batch";
Sensor topicRecordCount = Utils.notNull(this.metrics.getSensor(topicRecordsCountName));
topicRecordCount.record(batch.recordCount);
// per-topic bytes send rate
String topicByteRateName = "topic." + topic + ".bytes";
Sensor topicByteRate = Utils.notNull(this.metrics.getSensor(topicByteRateName));
topicByteRate.record(batch.records.sizeInBytes());
// per-topic compression rate
String topicCompressionRateName = "topic." + topic + ".compression-rate";
Sensor topicCompressionRate = Utils.notNull(this.metrics.getSensor(topicCompressionRateName));
topicCompressionRate.record(batch.records.compressionRate());
// global metrics
this.batchSizeSensor.record(batch.records.sizeInBytes(), now);
this.queueTimeSensor.record(batch.drainedMs - batch.createdMs, now);
this.compressionRateSensor.record(batch.records.compressionRate());
this.maxRecordSizeSensor.record(batch.maxRecordSize, now);
records += batch.recordCount;
}
this.recordsPerRequestSensor.record(records, now);
}
}
}
public void recordRetries(String topic, int count) {
long now = time.milliseconds();
this.retrySensor.record(count, now);
String topicRetryName = "topic." + topic + ".record-retries";
Sensor topicRetrySensor = this.metrics.getSensor(topicRetryName);
if (topicRetrySensor != null)
topicRetrySensor.record(count, now);
}
public void recordErrors(String topic, int count) {
long now = time.milliseconds();
this.errorSensor.record(count, now);
String topicErrorName = "topic." + topic + ".record-errors";
Sensor topicErrorSensor = this.metrics.getSensor(topicErrorName);
if (topicErrorSensor != null)
topicErrorSensor.record(count, now);
}
public void recordLatency(int node, long latency) {
long now = time.milliseconds();
this.requestTimeSensor.record(latency, now);
if (node >= 0) {
String nodeTimeName = "node-" + node + ".latency";
Sensor nodeRequestTime = this.metrics.getSensor(nodeTimeName);
if (nodeRequestTime != null)
nodeRequestTime.record(latency, now);
}
}
}
}
| |
/**
*
*/
package com.atlassian.bamboo.plugins.checkstyle;
import java.io.File;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import com.atlassian.bamboo.build.Job;
import com.atlassian.bamboo.plugins.checkstyle.tasks.CheckStyleTaskConfigurator;
import com.atlassian.bamboo.resultsummary.ResultsSummary;
import com.atlassian.bamboo.task.TaskDefinition;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.lang.math.NumberUtils;
import com.atlassian.bamboo.build.logger.BuildLogger;
import com.atlassian.bamboo.builder.BuildState;
import com.atlassian.bamboo.v2.build.BuildContext;
import com.atlassian.bamboo.ww2.actions.build.admin.create.BuildConfiguration;
import org.aspectj.weaver.Iterators;
/**
* Helper class with some common method for Checkstyle plugin
*
* @author lauvigne
*/
public class CheckstylePluginHelper
implements ICheckStyleBuildProcessor
{
public static final String CHECKSTYLE_TASK_PLUGIN_KEY = "com.atlassian.bamboo.plugins.checkstyle:checkStyleTask";
/**
* @param job that
* @return true if the plugin is actif for plan associated to this customConfiguration
*/
public static boolean isPluginActivated( Job job )
{
return Iterables.find(job.getBuildDefinition().getTaskDefinitions(), new Predicate<TaskDefinition>() {
@Override
public boolean apply(TaskDefinition taskDefinition) {
return taskDefinition.getPluginKey().equals(CHECKSTYLE_TASK_PLUGIN_KEY);
}
}, null) != null;
}
/**
* @param customBuildData Map that constains the metadata of a build
* @return true if the build has checkstyle results
*/
public static boolean hasCheckstyleResults( Map<String, String> customBuildData )
{
return customBuildData != null
&& customBuildData.containsKey( ICheckStyleBuildProcessor.CHECKSTYLE_TOTAL_VIOLATIONS );
}
/**
* @param summary a result of a build
* @return true if the build has checkstyle results
*/
public static boolean hasCheckstyleResults( ResultsSummary summary )
{
if ( summary == null )
{
return false;
}
else
{
Map<String, String> customBuildData = summary.getCustomBuildData();
return hasCheckstyleResults( customBuildData );
}
}
/**
* @param configuration The build plan configuration
* @return null if the url is valid, the error message if is invalid
*/
public static String validCheckstyleURL( BuildConfiguration configuration )
{
if ( configuration.getBoolean( CHECKSTYLE_EXISTS )
&& StringUtils.isNotBlank( configuration.getString( CheckStyleTaskConfigurator.CHECKSTYLE_SITE_URL ) ) )
{
String url = configuration.getString( CheckStyleTaskConfigurator.CHECKSTYLE_SITE_URL );
return validHttpURL( url );
}
// Else it's valid
return null;
}
/**
* @param url the url to test
* @return null if the url is valid, the error message if is invalid
*/
protected static String validHttpURL( String url )
{
try
{
HttpClient client = new HttpClient();
client.getHttpConnectionManager().getParams().setConnectionTimeout( 2 * 1000 );
GetMethod get = new GetMethod( url );
get.setFollowRedirects( true );
int resultCode;
resultCode = client.executeMethod( get );
if ( resultCode != 200 )
{
return get.getResponseBodyAsString();
}
}
catch ( Exception e )
{
return ExceptionUtils.getMessage( e );
}
// Else it's valid
return null;
}
/**
* Transform topViolations with filename by topViolations with Http URL.
*
* @param sourceDirectory the working directory where the name has ran
* @param customConfiguration Map that constains the metadata of a plan configuration
* @param checkstyleResults map that contains the checkStyle results
*/
public static void transformFilenameInHttpURL( File sourceDirectory,
Map<String, String> customConfiguration,
Map<String, String> checkstyleResults )
{
String baseURL = customConfiguration.get( CheckStyleTaskConfigurator.CHECKSTYLE_SITE_URL );
String topViolations = checkstyleResults.get( ICheckStyleBuildProcessor.CHECKSTYLE_TOP_VIOLATIONS );
String newTopViolations = transformFilenameInHttpURL( sourceDirectory, baseURL, topViolations );
checkstyleResults.put( ICheckStyleBuildProcessor.CHECKSTYLE_TOP_VIOLATIONS, newTopViolations );
}
/**
* @param topViolation the top violation content with absolute filename
* @return the topViolation with http URL
*/
public static String transformFilenameInHttpURL( File sourceDirectory, String baseURL,
String topViolation )
{
Map<String, Integer> topViolationInitial = CsvHelper.extractToCsv( topViolation );
Map<String, Integer> topViolationFinal = new HashMap<String, Integer>();
Iterator<Entry<String, Integer>> it = topViolationInitial.entrySet().iterator();
while ( it.hasNext() )
{
Entry<String, Integer> entry = it.next();
topViolationFinal.put( convertFilenameInHttpURL( sourceDirectory, baseURL, entry.getKey() ),
entry.getValue() );
}
return CsvHelper.convertTopViolationsToCsv( topViolationFinal );
}
/**
* TODO improve performance and design Be carefull, this method works only for maven2 report.
*
* @param sourceDirectory
* @param baseURL
* @param key
* @return the string represent the http URL
*/
public static String convertFilenameInHttpURL( File sourceDirectory, String baseURL, String key )
{
String newBaseURL = StringUtils.trimToEmpty(baseURL);
newBaseURL = StringUtils.removeEnd( newBaseURL, "/" );
String filename = FilenameUtils.normalize( key );
String beginPath = StringUtils.remove( filename, sourceDirectory.getPath() );
beginPath = beginPath.replace( '\\', '/' );
String sourceDir = "src/main/java/";
String[] splitUrl = StringUtils.splitByWholeSeparator( beginPath, sourceDir );
beginPath = splitUrl[0].replaceAll( "//", "/" );
String classname = splitUrl[1].replace( '/', '.' );
String result = newBaseURL + beginPath + "checkstyle.html#" + classname;
return result;
}
/**
* Converts String to int
*
* @return -1 if parse exception or non-positive integer else returns
* integer from String
*/
public static int getThreshold(
String value)
{
int returnValue = NumberUtils.toInt( value, -1 );
if ( returnValue < 0 )
{
returnValue = -1;
}
return returnValue;
}
public static void processThreshold(
BuildContext context, Map<String, String> checkstyleResults, String type, BuildLogger buildLogger)
{
Map<String, String> customConfiguration = context.getBuildDefinition().getCustomConfiguration();
String thresholdName = CheckStyleTaskConfigurator.CHECKSTYLE_ERROR_PRIORITY_THRESHOLD;
String violationName = ICheckStyleBuildProcessor.CHECKSTYLE_ERROR_PRIORITY_VIOLATIONS;
if ( "warning".equals( type ) )
{
thresholdName = CheckStyleTaskConfigurator.CHECKSTYLE_WARNING_PRIORITY_THRESHOLD;
violationName = ICheckStyleBuildProcessor.CHECKSTYLE_WARNING_PRIORITY_VIOLATIONS;
}
int threshold = CheckstylePluginHelper.getThreshold( customConfiguration.get( thresholdName ) );
double violations = NumberUtils.toDouble( checkstyleResults.get( violationName ), 0 );
if ( threshold >= 0 && violations > threshold )
{
String msg = String.format( "Checkstyle %s violations [%s] exceed threshold [%s]", type, violations, threshold );
//Save Why the build failed in build result
context.getBuildResult().getBuildErrors().add( msg );
context.getBuildResult().getCustomBuildData().put( "failedMessage", msg );
//Save Why the build failed in the build log
buildLogger.addErrorLogEntry( msg );
//Failed this build
context.getBuildResult().setBuildState( BuildState.FAILED );
}
}
}
| |
package sorcer.sml.services;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.sorcer.test.ProjectContext;
import org.sorcer.test.SorcerTestRunner;
import sorcer.arithmetic.provider.impl.AdderImpl;
import sorcer.arithmetic.provider.impl.MultiplierImpl;
import sorcer.arithmetic.provider.impl.SubtractorImpl;
import sorcer.core.provider.rendezvous.ServiceJobber;
import sorcer.service.*;
import sorcer.service.modeling.Model;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static sorcer.co.operator.*;
import static sorcer.eo.operator.*;
import static sorcer.eo.operator.get;
import static sorcer.eo.operator.result;
import static sorcer.eo.operator.value;
import static sorcer.mo.operator.inputs;
import static sorcer.mo.operator.*;
import static sorcer.po.operator.invoker;
/**
* @author Mike Sobolewski
*/
@RunWith(SorcerTestRunner.class)
@ProjectContext("examples/sml")
public class Services {
private final static Logger logger = LoggerFactory.getLogger(Services.class);
@Test
public void evaluateModel() throws Exception {
Model context = model(ent("x1", 20.0), ent("x2", 80.0),
ent("result/y", invoker("x1 + x2", ents("x1", "x2"))));
// declare response paths
responseUp(context, "result/y");
assertTrue(get(context, "x1").equals(20.0));
assertTrue(get(context, "x2").equals(80.0));
Model out = response(context);
assertEquals(1, size(out));
assertTrue(get(out, "result/y").equals(100.0));
}
@Test
public void modelInsOutsRsp() throws Exception {
Model context = model(inEnt("x1", 20.0), inEnt("x2", 80.0),
outEnt("result/y", invoker("x1 + x2", ents("x1", "x2"))));
Model inputs = inputs(context);
logger.info("inputs : " + inputs(context));
assertEquals(2, size(inputs));
Model outputs = outputs(context);
assertEquals(1, size(outputs));
logger.info("outputs : " + outputs(context));
// declare response paths
responseUp(context, "result/y");
Model out = response(context);
assertEquals(1, size(out));
assertTrue(get(out, "result/y").equals(100.0));
// more response paths
responseUp(context, "x1");
out = response(context);
assertEquals(2, size(out));
assertTrue(get(out, "x1").equals(20.0));
}
@Test
public void exertModel() throws Exception {
Model model = model(sig("add", AdderImpl.class),
inEnt("arg/x1", 20.0), inEnt("arg/x2", 80.0),
outEnt("result/y"));
Model out = exert(model);
assertEquals(6, size(out));
logger.info("out : " + out);
logger.info("out @ arg/x1: " + get(out, "arg/x1"));
logger.info("out @ arg/x2: " + value(out, "arg/x2"));
logger.info("out @ result/y: " + value(out, "result/y"));
assertEquals(100.0, value(out, "result/y"));
}
@Test
public void exertSrvModel() throws Exception {
Model m = model(
inEnt("multiply/x1", 10.0), inEnt("multiply/x2", 50.0),
inEnt("add/x1", 20.0), inEnt("add/x2", 80.0),
srv(sig("multiply", MultiplierImpl.class, result("multiply/out",
inPaths("multiply/x1", "multiply/x2")))),
srv(sig("add", AdderImpl.class, result("add/out",
inPaths("add/x1", "add/x2")))),
srv(sig("subtract", SubtractorImpl.class, result("model/response",
inPaths("multiply/out", "add/out")))),
aka("y1", "multiply/x1"),
response("subtract"));
//dependsOn(m, ent("subtract", paths("multiply", "add")));
logger.info("response: " + response(m));
Context out = response(m);
assertTrue(get(out, "subtract").equals(400.0));
}
@Test
public void exertMogram() throws Exception {
Mogram mogram = mogram(sig("add", AdderImpl.class),
cxt("add", inEnt("arg/x1", 20.0), inEnt("arg/x2", 80.0),
outEnt("result/y")));
Mogram out = exert(mogram);
Context cxt = context(out);
logger.info("out context: " + cxt);
logger.info("context @ arg/x1: " + get(cxt, "arg/x1"));
logger.info("context @ arg/x2: " + value(cxt, "arg/x2"));
logger.info("context @ result/y: " + value(cxt, "result/y"));
assertEquals(100.0, value(cxt, "result/y"));
}
@Test
public void exertTask() throws Exception {
Mogram t5 = task("t5", sig("add", AdderImpl.class),
cxt("add", inEnt("arg/x1", 20.0), inEnt("arg/x2", 80.0),
outEnt("result/y")));
Mogram out = exert(t5);
Context cxt = context(out);
logger.info("out context: " + cxt);
logger.info("context @ arg/x1: " + value(cxt, "arg/x1"));
logger.info("context @ arg/x2: " + value(cxt, "arg/x2"));
logger.info("context @ result/y: " + value(cxt, "result/y"));
assertEquals(100.0, value(cxt, "result/y"));
}
@Test
public void evaluateTask() throws Exception {
Service t5 = task("t5", sig("add", AdderImpl.class),
cxt("add", inEnt("arg/x1", 20.0), inEnt("arg/x2", 80.0),
result("result/y")));
// get a single context argument at the result path
assertEquals(100.0, exec(t5));
// get the subcontext output from the the result path
assertTrue(context(ent("arg/x1", 20.0), ent("result/z", 100.0)).equals(
exec(t5, result("result/z", outPaths("arg/x1", "result/z")))));
}
@Test
public void exertJob() throws Exception {
Mogram t3 = task("t3", sig("subtract", SubtractorImpl.class),
cxt("subtract", inEnt("arg/x1"), inEnt("arg/x2"), outEnt("result/y")));
Mogram t4 = task("t4", sig("multiply", MultiplierImpl.class),
// cxt("multiply", in("super/arg/x1"), in("arg/x2", 50.0),
cxt("multiply", inEnt("arg/x1", 10.0), inEnt("arg/x2", 50.0),
outEnt("result/y")));
Mogram t5 = task("t5", sig("add", AdderImpl.class),
cxt("add", inEnt("arg/x1", 20.0), inEnt("arg/x2", 80.0),
outEnt("result/y")));
Mogram job = //j1(j2(t4(x1, x2), t5(x1, x2)), t3(x1, x2))
job("j1", sig(ServiceJobber.class),
cxt(inEnt("arg/x1", 10.0),
result("job/result", outPaths("j1/t3/result/y"))),
job("j2", sig(ServiceJobber.class), t4, t5),
t3,
pipe(outPoint(t4, "result/y"), inPoint(t3, "arg/x1")),
pipe(outPoint(t5, "result/y"), inPoint(t3, "arg/x2")));
logger.info("srv job context: " + upcontext(job));
logger.info("srv j1/t3 context: " + context(job, "j1/t3"));
logger.info("srv j1/j2/t4 context: " + context(job, "j1/j2/t4"));
logger.info("srv j1/j2/t5 context: " + context(job, "j1/j2/t5"));
Mogram exertion = exert(job);
logger.info("srv job context: " + upcontext(exertion));
logger.info("exertion value @ j1/t3/arg/x2 = " + get(exertion, "j1/t3/arg/x2"));
assertEquals(100.0, get(exertion, "j1/t3/arg/x2"));
}
@Test
public void evaluateJob() throws Exception {
Mogram t3 = task("t3", sig("subtract", SubtractorImpl.class),
cxt("subtract", inEnt("arg/x1"), inEnt("arg/x2"), result("result/y")));
Mogram t4 = task("t4", sig("multiply", MultiplierImpl.class),
cxt("multiply", inEnt("arg/x1", 10.0), inEnt("arg/x2", 50.0), result("result/y")));
Mogram t5 = task("t5", sig("add", AdderImpl.class),
cxt("add", inEnt("arg/x1", 20.0), inEnt("arg/x2", 80.0), result("result/y")));
//TODO: CHECK Access.PULL doesn't work with ServiceJobber!!!
Mogram job = //j1(j2(t4(x1, x2), t5(x1, x2)), t3(x1, x2))
job("j1", sig(ServiceJobber.class), result("job/result", outPaths("j1/t3/result/y")),
job("j2", sig(ServiceJobber.class), t4, t5, strategy(Strategy.Flow.PAR, Strategy.Access.PUSH)),
t3,
pipe(outPoint(t4, "result/y"), inPoint(t3, "arg/x1")),
pipe(outPoint(t5, "result/y"), inPoint(t3, "arg/x2")));
// get the result value
assertEquals(400.0, exec(job));
// get the subcontext output from the exertion
assertTrue(context(ent("j1/j2/t4/result/y", 500.0),
ent("j1/j2/t5/result/y", 100.0),
ent("j1/t3/result/y", 400.0)).equals(
exec(job, result("result/z",
outPaths("j1/j2/t4/result/y", "j1/j2/t5/result/y", "j1/t3/result/y")))));
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.snapshots.blobstore;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot.FileInfo;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static java.util.Collections.unmodifiableMap;
/**
* Contains information about all snapshot for the given shard in repository
* <p>
* This class is used to find files that were already snapshotted and clear out files that no longer referenced by any
* snapshots
*/
public class BlobStoreIndexShardSnapshots implements Iterable<SnapshotFiles>, ToXContentFragment {
private final List<SnapshotFiles> shardSnapshots;
private final Map<String, FileInfo> files;
private final Map<String, List<FileInfo>> physicalFiles;
public BlobStoreIndexShardSnapshots(List<SnapshotFiles> shardSnapshots) {
this.shardSnapshots = Collections.unmodifiableList(new ArrayList<>(shardSnapshots));
// Map between blob names and file info
Map<String, FileInfo> newFiles = new HashMap<>();
// Map between original physical names and file info
Map<String, List<FileInfo>> physicalFiles = new HashMap<>();
for (SnapshotFiles snapshot : shardSnapshots) {
// First we build map between filenames in the repo and their original file info
// this map will be used in the next loop
for (FileInfo fileInfo : snapshot.indexFiles()) {
FileInfo oldFile = newFiles.put(fileInfo.name(), fileInfo);
assert oldFile == null || oldFile.isSame(fileInfo);
}
// We are doing it in two loops here so we keep only one copy of the fileInfo per blob
// the first loop de-duplicates fileInfo objects that were loaded from different snapshots but refer to
// the same blob
for (FileInfo fileInfo : snapshot.indexFiles()) {
List<FileInfo> physicalFileList = physicalFiles.get(fileInfo.physicalName());
if (physicalFileList == null) {
physicalFileList = new ArrayList<>();
physicalFiles.put(fileInfo.physicalName(), physicalFileList);
}
physicalFileList.add(newFiles.get(fileInfo.name()));
}
}
Map<String, List<FileInfo>> mapBuilder = new HashMap<>();
for (Map.Entry<String, List<FileInfo>> entry : physicalFiles.entrySet()) {
mapBuilder.put(entry.getKey(), Collections.unmodifiableList(new ArrayList<>(entry.getValue())));
}
this.physicalFiles = unmodifiableMap(mapBuilder);
this.files = unmodifiableMap(newFiles);
}
private BlobStoreIndexShardSnapshots(Map<String, FileInfo> files, List<SnapshotFiles> shardSnapshots) {
this.shardSnapshots = shardSnapshots;
this.files = files;
Map<String, List<FileInfo>> physicalFiles = new HashMap<>();
for (SnapshotFiles snapshot : shardSnapshots) {
for (FileInfo fileInfo : snapshot.indexFiles()) {
List<FileInfo> physicalFileList = physicalFiles.get(fileInfo.physicalName());
if (physicalFileList == null) {
physicalFileList = new ArrayList<>();
physicalFiles.put(fileInfo.physicalName(), physicalFileList);
}
physicalFileList.add(files.get(fileInfo.name()));
}
}
Map<String, List<FileInfo>> mapBuilder = new HashMap<>();
for (Map.Entry<String, List<FileInfo>> entry : physicalFiles.entrySet()) {
mapBuilder.put(entry.getKey(), Collections.unmodifiableList(new ArrayList<>(entry.getValue())));
}
this.physicalFiles = unmodifiableMap(mapBuilder);
}
private BlobStoreIndexShardSnapshots() {
shardSnapshots = Collections.emptyList();
files = Collections.emptyMap();
physicalFiles = Collections.emptyMap();
}
/**
* Returns list of snapshots
*
* @return list of snapshots
*/
public List<SnapshotFiles> snapshots() {
return this.shardSnapshots;
}
/**
* Finds reference to a snapshotted file by its original name
*
* @param physicalName original name
* @return a list of file infos that match specified physical file or null if the file is not present in any of snapshots
*/
public List<FileInfo> findPhysicalIndexFiles(String physicalName) {
return physicalFiles.get(physicalName);
}
/**
* Finds reference to a snapshotted file by its snapshot name
*
* @param name file name
* @return file info or null if file is not present in any of snapshots
*/
public FileInfo findNameFile(String name) {
return files.get(name);
}
@Override
public Iterator<SnapshotFiles> iterator() {
return shardSnapshots.iterator();
}
static final class Fields {
static final String FILES = "files";
static final String SNAPSHOTS = "snapshots";
}
static final class ParseFields {
static final ParseField FILES = new ParseField("files");
static final ParseField SNAPSHOTS = new ParseField("snapshots");
}
/**
* Writes index file for the shard in the following format.
* <pre>
* <code>
* {
* "files": [{
* "name": "__3",
* "physical_name": "_0.si",
* "length": 310,
* "checksum": "1tpsg3p",
* "written_by": "5.1.0",
* "meta_hash": "P9dsFxNMdWNlb......"
* }, {
* "name": "__2",
* "physical_name": "segments_2",
* "length": 150,
* "checksum": "11qjpz6",
* "written_by": "5.1.0",
* "meta_hash": "P9dsFwhzZWdtZ......."
* }, {
* "name": "__1",
* "physical_name": "_0.cfe",
* "length": 363,
* "checksum": "er9r9g",
* "written_by": "5.1.0"
* }, {
* "name": "__0",
* "physical_name": "_0.cfs",
* "length": 3354,
* "checksum": "491liz",
* "written_by": "5.1.0"
* }, {
* "name": "__4",
* "physical_name": "segments_3",
* "length": 150,
* "checksum": "134567",
* "written_by": "5.1.0",
* "meta_hash": "P9dsFwhzZWdtZ......."
* }],
* "snapshots": {
* "snapshot_1": {
* "files": ["__0", "__1", "__2", "__3"]
* },
* "snapshot_2": {
* "files": ["__0", "__1", "__2", "__4"]
* }
* }
* }
* }
* </code>
* </pre>
*/
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
// First we list all blobs with their file infos:
builder.startArray(Fields.FILES);
for (Map.Entry<String, FileInfo> entry : files.entrySet()) {
FileInfo.toXContent(entry.getValue(), builder, params);
}
builder.endArray();
// Then we list all snapshots with list of all blobs that are used by the snapshot
builder.startObject(Fields.SNAPSHOTS);
for (SnapshotFiles snapshot : shardSnapshots) {
builder.startObject(snapshot.snapshot());
builder.startArray(Fields.FILES);
for (FileInfo fileInfo : snapshot.indexFiles()) {
builder.value(fileInfo.name());
}
builder.endArray();
builder.endObject();
}
builder.endObject();
return builder;
}
public static BlobStoreIndexShardSnapshots fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
if (token == null) { // New parser
token = parser.nextToken();
}
Map<String, List<String>> snapshotsMap = new HashMap<>();
Map<String, FileInfo> files = new HashMap<>();
if (token == XContentParser.Token.START_OBJECT) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token != XContentParser.Token.FIELD_NAME) {
throw new ElasticsearchParseException("unexpected token [{}]", token);
}
String currentFieldName = parser.currentName();
token = parser.nextToken();
if (token == XContentParser.Token.START_ARRAY) {
if (ParseFields.FILES.match(currentFieldName) == false) {
throw new ElasticsearchParseException("unknown array [{}]", currentFieldName);
}
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
FileInfo fileInfo = FileInfo.fromXContent(parser);
files.put(fileInfo.name(), fileInfo);
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (ParseFields.SNAPSHOTS.match(currentFieldName) == false) {
throw new ElasticsearchParseException("unknown object [{}]", currentFieldName);
}
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token != XContentParser.Token.FIELD_NAME) {
throw new ElasticsearchParseException("unknown object [{}]", currentFieldName);
}
String snapshot = parser.currentName();
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchParseException("unknown object [{}]", currentFieldName);
}
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
if (parser.nextToken() == XContentParser.Token.START_ARRAY) {
if (ParseFields.FILES.match(currentFieldName) == false) {
throw new ElasticsearchParseException("unknown array [{}]", currentFieldName);
}
List<String> fileNames = new ArrayList<>();
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
fileNames.add(parser.text());
}
snapshotsMap.put(snapshot, fileNames);
}
}
}
}
} else {
throw new ElasticsearchParseException("unexpected token [{}]", token);
}
}
}
List<SnapshotFiles> snapshots = new ArrayList<>(snapshotsMap.size());
for (Map.Entry<String, List<String>> entry : snapshotsMap.entrySet()) {
List<FileInfo> fileInfosBuilder = new ArrayList<>();
for (String file : entry.getValue()) {
FileInfo fileInfo = files.get(file);
assert fileInfo != null;
fileInfosBuilder.add(fileInfo);
}
snapshots.add(new SnapshotFiles(entry.getKey(), Collections.unmodifiableList(fileInfosBuilder)));
}
return new BlobStoreIndexShardSnapshots(files, Collections.unmodifiableList(snapshots));
}
}
| |
/*
* Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
import java.util.*;
import java.io.*;
import java.net.*;
import java.nio.file.*;
import java.nio.file.attribute.*;
import java.nio.charset.*;
import com.sun.tools.sjavac.Main;
public
class SJavac {
public static void main(String... args) throws Exception {
SJavac s = new SJavac();
s.test();
}
FileSystem defaultfs = FileSystems.getDefault();
// Where to put generated sources that will
// test aspects of sjavac, ie JTWork/scratch/gensrc
Path gensrc;
// More gensrc dirs are used to test merging of serveral source roots.
Path gensrc2;
Path gensrc3;
// Where to put compiled classes.
Path bin;
// Where to put c-header files.
Path headers;
// The sjavac compiler.
Main main = new Main();
// Remember the previous bin and headers state here.
Map<String,Long> previous_bin_state;
Map<String,Long> previous_headers_state;
public void test() throws Exception {
gensrc = defaultfs.getPath("gensrc");
gensrc2 = defaultfs.getPath("gensrc2");
gensrc3 = defaultfs.getPath("gensrc3");
bin = defaultfs.getPath("bin");
headers = defaultfs.getPath("headers");
Files.createDirectory(gensrc);
Files.createDirectory(gensrc2);
Files.createDirectory(gensrc3);
Files.createDirectory(bin);
Files.createDirectory(headers);
initialCompile();
incrementalCompileNoChanges();
incrementalCompileDroppingClasses();
incrementalCompileWithChange();
incrementalCompileDropAllNatives();
incrementalCompileAddNative();
incrementalCompileChangeNative();
compileWithOverrideSource();
compileWithInvisibleSources();
compileCircularSources();
compileExcludingDependency();
delete(gensrc);
delete(gensrc2);
delete(gensrc3);
delete(bin);
delete(headers);
}
void initialCompile() throws Exception {
System.out.println("\nInitial compile of gensrc.");
System.out.println("----------------------------");
populate(gensrc,
"alfa/AINT.java",
"package alfa; public interface AINT { void aint(); }",
"alfa/A.java",
"package alfa; public class A implements AINT { "+
"public final static int DEFINITION = 17; public void aint() { } }",
"alfa/AA.java",
"package alfa;"+
"// A package private class, not contributing to the public api.\n"+
"class AA {"+
" // A properly nested static inner class.\n"+
" static class AAA { }\n"+
" // A properly nested inner class.\n"+
" class AAAA { }\n"+
" Runnable foo() {\n"+
" // A proper anonymous class.\n"+
" return new Runnable() { public void run() { } };\n"+
" }\n"+
" AAA aaa;\n"+
" AAAA aaaa;\n"+
" AAAAA aaaaa;\n"+
"}\n"+
"class AAAAA {\n"+
" // A bad auxiliary class, but no one is referencing it\n"+
" // from outside of this source file, therefore it is ok.\n"+
"}\n",
"beta/BINT.java",
"package beta;public interface BINT { void foo(); }",
"beta/B.java",
"package beta; import alfa.A; public class B {"+
"private int b() { return A.DEFINITION; } native void foo(); }");
compile("gensrc", "-d", "bin", "-h", "headers", "-j", "1",
"--server:portfile=testserver,background=false", "--log=debug");
previous_bin_state = collectState(bin);
previous_headers_state = collectState(headers);
}
void incrementalCompileNoChanges() throws Exception {
System.out.println("\nTesting that no change in sources implies no change in binaries.");
System.out.println("------------------------------------------------------------------");
compile("gensrc", "-d", "bin", "-h", "headers", "-j", "1",
"--server:portfile=testserver,background=false", "--log=debug");
Map<String,Long> new_bin_state = collectState(bin);
verifyEqual(new_bin_state, previous_bin_state);
Map<String,Long> new_headers_state = collectState(headers);
verifyEqual(previous_headers_state, new_headers_state);
}
void incrementalCompileDroppingClasses() throws Exception {
System.out.println("\nTesting that deleting AA.java deletes all");
System.out.println("generated inner class as well as AA.class");
System.out.println("-----------------------------------------");
removeFrom(gensrc, "alfa/AA.java");
compile("gensrc", "-d", "bin", "-h", "headers", "-j", "1",
"--server:portfile=testserver,background=false", "--log=debug");
Map<String,Long> new_bin_state = collectState(bin);
verifyThatFilesHaveBeenRemoved(previous_bin_state, new_bin_state,
"bin/alfa/AA$1.class",
"bin/alfa/AA$AAAA.class",
"bin/alfa/AA$AAA.class",
"bin/alfa/AAAAA.class",
"bin/alfa/AA.class");
previous_bin_state = new_bin_state;
Map<String,Long> new_headers_state = collectState(headers);
verifyEqual(previous_headers_state, new_headers_state);
}
void incrementalCompileWithChange() throws Exception {
System.out.println("\nNow update the A.java file with a new timestamps and");
System.out.println("new final static definition. This should trigger a recompile,");
System.out.println("not only of alfa, but also beta.");
System.out.println("But check that the generated native header was not updated!");
System.out.println("Since we did not modify the native api of B.");
System.out.println("-------------------------------------------------------------");
populate(gensrc,"alfa/A.java",
"package alfa; public class A implements AINT { "+
"public final static int DEFINITION = 18; public void aint() { } private void foo() { } }");
compile("gensrc", "-d", "bin", "-h", "headers", "-j", "1",
"--server:portfile=testserver,background=false", "--log=debug");
Map<String,Long> new_bin_state = collectState(bin);
verifyNewerFiles(previous_bin_state, new_bin_state,
"bin/alfa/A.class",
"bin/alfa/AINT.class",
"bin/beta/B.class",
"bin/beta/BINT.class",
"bin/javac_state");
previous_bin_state = new_bin_state;
Map<String,Long> new_headers_state = collectState(headers);
verifyEqual(new_headers_state, previous_headers_state);
}
void incrementalCompileDropAllNatives() throws Exception {
System.out.println("\nNow update the B.java file with one less native method,");
System.out.println("ie it has no longer any methods!");
System.out.println("Verify that beta_B.h is removed!");
System.out.println("---------------------------------------------------------");
populate(gensrc,"beta/B.java",
"package beta; import alfa.A; public class B {"+
"private int b() { return A.DEFINITION; } }");
compile("gensrc", "-d", "bin", "-h", "headers", "-j", "1",
"--server:portfile=testserver,background=false", "--log=debug");
Map<String,Long> new_bin_state = collectState(bin);
verifyNewerFiles(previous_bin_state, new_bin_state,
"bin/beta/B.class",
"bin/beta/BINT.class",
"bin/javac_state");
previous_bin_state = new_bin_state;
Map<String,Long> new_headers_state = collectState(headers);
verifyThatFilesHaveBeenRemoved(previous_headers_state, new_headers_state,
"headers/beta_B.h");
previous_headers_state = new_headers_state;
}
void incrementalCompileAddNative() throws Exception {
System.out.println("\nNow update the B.java file with a final static annotated with @Native.");
System.out.println("Verify that beta_B.h is added again!");
System.out.println("------------------------------------------------------------------------");
populate(gensrc,"beta/B.java",
"package beta; import alfa.A; public class B {"+
"private int b() { return A.DEFINITION; } "+
"@java.lang.annotation.Native final static int alfa = 42; }");
compile("gensrc", "-d", "bin", "-h", "headers", "-j", "1",
"--server:portfile=testserver,background=false", "--log=debug");
Map<String,Long> new_bin_state = collectState(bin);
verifyNewerFiles(previous_bin_state, new_bin_state,
"bin/beta/B.class",
"bin/beta/BINT.class",
"bin/javac_state");
previous_bin_state = new_bin_state;
Map<String,Long> new_headers_state = collectState(headers);
verifyThatFilesHaveBeenAdded(previous_headers_state, new_headers_state,
"headers/beta_B.h");
previous_headers_state = new_headers_state;
}
void incrementalCompileChangeNative() throws Exception {
System.out.println("\nNow update the B.java file with a new value for the final static"+
" annotated with @Native.");
System.out.println("Verify that beta_B.h is rewritten again!");
System.out.println("-------------------------------------------------------------------");
populate(gensrc,"beta/B.java",
"package beta; import alfa.A; public class B {"+
"private int b() { return A.DEFINITION; } "+
"@java.lang.annotation.Native final static int alfa = 43; }");
compile("gensrc", "-d", "bin", "-h", "headers", "-j", "1",
"--server:portfile=testserver,background=false", "--log=debug");
Map<String,Long> new_bin_state = collectState(bin);
verifyNewerFiles(previous_bin_state, new_bin_state,
"bin/beta/B.class",
"bin/beta/BINT.class",
"bin/javac_state");
previous_bin_state = new_bin_state;
Map<String,Long> new_headers_state = collectState(headers);
verifyNewerFiles(previous_headers_state, new_headers_state,
"headers/beta_B.h");
previous_headers_state = new_headers_state;
}
void compileWithOverrideSource() throws Exception {
System.out.println("\nNow verify that we can override sources to be compiled.");
System.out.println("Compile gensrc and gensrc2. However do not compile broken beta.B in gensrc,");
System.out.println("only compile ok beta.B in gensrc2.");
System.out.println("---------------------------------------------------------------------------");
delete(gensrc);
delete(gensrc2);
delete(bin);
previous_bin_state = collectState(bin);
populate(gensrc,"alfa/A.java",
"package alfa; import beta.B; import gamma.C; public class A { B b; C c; }",
"beta/B.java",
"package beta; public class B { broken",
"gamma/C.java",
"package gamma; public class C { }");
populate(gensrc2,
"beta/B.java",
"package beta; public class B { }");
compile("-x", "beta", "gensrc", "gensrc2", "-d", "bin", "-h", "headers", "-j", "1",
"--server:portfile=testserver,background=false");
Map<String,Long> new_bin_state = collectState(bin);
verifyThatFilesHaveBeenAdded(previous_bin_state, new_bin_state,
"bin/alfa/A.class",
"bin/beta/B.class",
"bin/gamma/C.class",
"bin/javac_state");
System.out.println("----- Compile with exluded beta went well!");
delete(bin);
compileExpectFailure("gensrc", "gensrc2", "-d", "bin", "-h", "headers", "-j", "1",
"--server:portfile=testserver,background=false");
System.out.println("----- Compile without exluded beta failed, as expected! Good!");
delete(bin);
}
void compileWithInvisibleSources() throws Exception {
System.out.println("\nNow verify that we can make sources invisible to linking (sourcepath).");
System.out.println("Compile gensrc and link against gensrc2 and gensrc3, however");
System.out.println("gensrc2 contains broken code in beta.B, thus we must exclude that package");
System.out.println("fortunately gensrc3 contains a proper beta.B.");
System.out.println("------------------------------------------------------------------------");
// Start with a fresh gensrcs and bin.
delete(gensrc);
delete(gensrc2);
delete(gensrc3);
delete(bin);
previous_bin_state = collectState(bin);
populate(gensrc,"alfa/A.java",
"package alfa; import beta.B; import gamma.C; public class A { B b; C c; }");
populate(gensrc2,"beta/B.java",
"package beta; public class B { broken",
"gamma/C.java",
"package gamma; public class C { }");
populate(gensrc3, "beta/B.java",
"package beta; public class B { }");
compile("gensrc", "-x", "beta", "-sourcepath", "gensrc2",
"-sourcepath", "gensrc3", "-d", "bin", "-h", "headers", "-j", "1",
"--server:portfile=testserver,background=false");
System.out.println("The first compile went well!");
Map<String,Long> new_bin_state = collectState(bin);
verifyThatFilesHaveBeenAdded(previous_bin_state, new_bin_state,
"bin/alfa/A.class",
"bin/javac_state");
System.out.println("----- Compile with exluded beta went well!");
delete(bin);
compileExpectFailure("gensrc", "-sourcepath", "gensrc2", "-sourcepath", "gensrc3",
"-d", "bin", "-h", "headers", "-j", "1",
"--server:portfile=testserver,background=false");
System.out.println("----- Compile without exluded beta failed, as expected! Good!");
delete(bin);
}
void compileCircularSources() throws Exception {
System.out.println("\nNow verify that circular sources split on multiple cores can be compiled.");
System.out.println("---------------------------------------------------------------------------");
// Start with a fresh gensrcs and bin.
delete(gensrc);
delete(gensrc2);
delete(gensrc3);
delete(bin);
previous_bin_state = collectState(bin);
populate(gensrc,"alfa/A.java",
"package alfa; public class A { beta.B b; }",
"beta/B.java",
"package beta; public class B { gamma.C c; }",
"gamma/C.java",
"package gamma; public class C { alfa.A a; }");
compile("gensrc", "-d", "bin", "-h", "headers", "-j", "3",
"--server:portfile=testserver,background=false","--log=debug");
Map<String,Long> new_bin_state = collectState(bin);
verifyThatFilesHaveBeenAdded(previous_bin_state, new_bin_state,
"bin/alfa/A.class",
"bin/beta/B.class",
"bin/gamma/C.class",
"bin/javac_state");
delete(bin);
}
/**
* Tests compiling class A that depends on class B without compiling class B
* @throws Exception If test fails
*/
void compileExcludingDependency() throws Exception {
System.out.println("\nVerify that excluding classes from compilation but not from linking works.");
System.out.println("---------------------------------------------------------------------------");
delete(gensrc);
delete(bin);
previous_bin_state = collectState(bin);
populate(gensrc,
"alfa/A.java",
"package alfa; public class A { beta.B b; }",
"beta/B.java",
"package beta; public class B { }");
compile("-x", "beta", "-src", "gensrc", "-x", "alfa", "-sourcepath", "gensrc",
"-d", "bin", "--server:portfile=testserver,background=false");
Map<String,Long> new_bin_state = collectState(bin);
verifyThatFilesHaveBeenAdded(previous_bin_state, new_bin_state,
"bin/alfa/A.class",
"bin/javac_state");
}
void removeFrom(Path dir, String... args) throws IOException {
for (String filename : args) {
Path p = dir.resolve(filename);
Files.delete(p);
}
}
void populate(Path src, String... args) throws IOException {
if (!Files.exists(src)) {
Files.createDirectory(src);
}
String[] a = args;
for (int i = 0; i<a.length; i+=2) {
String filename = a[i];
String content = a[i+1];
Path p = src.resolve(filename);
Files.createDirectories(p.getParent());
PrintWriter out = new PrintWriter(Files.newBufferedWriter(p,
Charset.defaultCharset()));
out.println(content);
out.close();
}
}
void delete(final Path root) throws IOException {
if (!Files.exists(root)) return;
Files.walkFileTree(root, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException
{
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException e) throws IOException
{
if (e == null) {
if (!dir.equals(root)) Files.delete(dir);
return FileVisitResult.CONTINUE;
} else {
// directory iteration failed
throw e;
}
}
});
}
void compile(String... args) throws Exception {
int rc = main.go(args, System.out, System.err);
if (rc != 0) throw new Exception("Error during compile!");
// Wait a second, to get around the (temporary) problem with
// second resolution in the Java file api. But do not do this
// on windows where the timestamps work.
long in_a_sec = System.currentTimeMillis()+1000;
while (in_a_sec > System.currentTimeMillis()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
}
}
}
void compileExpectFailure(String... args) throws Exception {
int rc = main.go(args, System.out, System.err);
if (rc == 0) throw new Exception("Expected error during compile! Did not fail!");
}
Map<String,Long> collectState(Path dir) throws IOException
{
final Map<String,Long> files = new HashMap<>();
Files.walkFileTree(dir, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException
{
files.put(file.toString(),new Long(Files.getLastModifiedTime(file).toMillis()));
return FileVisitResult.CONTINUE;
}
});
return files;
}
void verifyThatFilesHaveBeenRemoved(Map<String,Long> from,
Map<String,Long> to,
String... args) throws Exception {
Set<String> froms = from.keySet();
Set<String> tos = to.keySet();
if (froms.equals(tos)) {
throw new Exception("Expected new state to have fewer files than previous state!");
}
for (String t : tos) {
if (!froms.contains(t)) {
throw new Exception("Expected "+t+" to exist in previous state!");
}
}
for (String f : args) {
f = f.replace("/", File.separator);
if (!froms.contains(f)) {
throw new Exception("Expected "+f+" to exist in previous state!");
}
if (tos.contains(f)) {
throw new Exception("Expected "+f+" to have been removed from the new state!");
}
}
if (froms.size() - args.length != tos.size()) {
throw new Exception("There are more removed files than the expected list!");
}
}
void verifyThatFilesHaveBeenAdded(Map<String,Long> from,
Map<String,Long> to,
String... args) throws Exception {
Set<String> froms = from.keySet();
Set<String> tos = to.keySet();
if (froms.equals(tos)) {
throw new Exception("Expected new state to have more files than previous state!");
}
for (String t : froms) {
if (!tos.contains(t)) {
throw new Exception("Expected "+t+" to exist in new state!");
}
}
for (String f : args) {
f = f.replace("/", File.separator);
if (!tos.contains(f)) {
throw new Exception("Expected "+f+" to have been added to new state!");
}
if (froms.contains(f)) {
throw new Exception("Expected "+f+" to not exist in previous state!");
}
}
if (froms.size() + args.length != tos.size()) {
throw new Exception("There are more added files than the expected list!");
}
}
void verifyNewerFiles(Map<String,Long> from,
Map<String,Long> to,
String... args) throws Exception {
if (!from.keySet().equals(to.keySet())) {
throw new Exception("Expected the set of files to be identical!");
}
Set<String> files = new HashSet<String>();
for (String s : args) {
files.add(s.replace("/", File.separator));
}
for (String fn : from.keySet()) {
long f = from.get(fn);
long t = to.get(fn);
if (files.contains(fn)) {
if (t <= f) {
throw new Exception("Expected "+fn+" to have a more recent timestamp!");
}
} else {
if (t != f) {
throw new Exception("Expected "+fn+" to have the same timestamp!");
}
}
}
}
String print(Map<String,Long> m) {
StringBuilder b = new StringBuilder();
Set<String> keys = m.keySet();
for (String k : keys) {
b.append(k+" "+m.get(k)+"\n");
}
return b.toString();
}
void verifyEqual(Map<String,Long> from, Map<String,Long> to) throws Exception {
if (!from.equals(to)) {
System.out.println("FROM---"+print(from));
System.out.println("TO-----"+print(to));
throw new Exception("The dir should not differ! But it does!");
}
}
}
| |
package org.apollo.game.model.area;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.apollo.game.message.impl.RegionUpdateMessage;
import org.apollo.game.model.Direction;
import org.apollo.game.model.Position;
import org.apollo.game.model.area.collision.CollisionMatrix;
import org.apollo.game.model.area.update.GroupableEntity;
import org.apollo.game.model.area.update.UpdateOperation;
import org.apollo.game.model.entity.Entity;
import org.apollo.game.model.entity.EntityType;
import com.google.common.base.MoreObjects;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableSet;
import org.apollo.game.model.entity.obj.DynamicGameObject;
/**
* An 8x8 area of the map.
*
* @author Major
*/
public final class Region {
/**
* A {@link RegionListener} for {@link UpdateOperation}s.
*
* @author Major
*/
private static final class UpdateRegionListener implements RegionListener {
@Override
public void execute(Region region, Entity entity, EntityUpdateType update) {
EntityType type = entity.getEntityType();
if (!type.isMob()) {
region.record((Entity & GroupableEntity) entity, update);
}
}
}
/**
* The width and length of a Region, in tiles.
*/
public static final int SIZE = 8;
/**
* The default size of newly-created Lists, to reduce memory usage.
*/
private static final int DEFAULT_LIST_SIZE = 2;
/**
* The RegionCoordinates of this Region.
*/
private final RegionCoordinates coordinates;
/**
* The Map of Positions to Entities in that Position.
*/
private final Map<Position, Set<Entity>> entities = new HashMap<>();
/**
* A List of RegionListeners registered to this Region.
*/
private final List<RegionListener> listeners = new ArrayList<>();
/**
* The CollisionMatrix.
*/
private final CollisionMatrix[] matrices = CollisionMatrix.createMatrices(Position.HEIGHT_LEVELS, SIZE, SIZE);
/**
* The List of Sets containing RegionUpdateMessages that specifically remove StaticGameObjects. The
* List is ordered based on the height level the RegionUpdateMessages concern.
*/
private final List<Set<RegionUpdateMessage>> removedObjects = new ArrayList<>(Position.HEIGHT_LEVELS);
/**
* The List of Sets containing RegionUpdateMessages. The List is ordered based on the height level the
* RegionUpdateMessages concern. This only contains the updates to this Region that have occurred in the last
* pulse.
*/
private final List<Set<RegionUpdateMessage>> updates = new ArrayList<>(Position.HEIGHT_LEVELS);
/**
* Creates a new Region.
*
* @param x The x coordinate of the Region.
* @param y The y coordinate of the Region.
*/
public Region(int x, int y) {
this(new RegionCoordinates(x, y));
}
/**
* Creates a new Region with the specified {@link RegionCoordinates}.
*
* @param coordinates The RegionCoordinates.
*/
public Region(RegionCoordinates coordinates) {
this.coordinates = coordinates;
listeners.add(new UpdateRegionListener());
for (int height = 0; height < Position.HEIGHT_LEVELS; height++) {
removedObjects.add(new HashSet<>());
updates.add(new HashSet<>(DEFAULT_LIST_SIZE));
}
}
/**
* Adds a {@link Entity} to the Region. Note that this does not spawn the Entity, or do any other action other than
* register it to this Region.
*
* @param entity The Entity.
* @param notify Whether or not the {@link RegionListener}s for this Region should be notified.
* @throws IllegalArgumentException If the Entity does not belong in this Region.
*/
public void addEntity(Entity entity, boolean notify) {
Position position = entity.getPosition();
checkPosition(position);
Set<Entity> local = entities.computeIfAbsent(position, key -> new HashSet<>(DEFAULT_LIST_SIZE));
local.add(entity);
if (notify) {
notifyListeners(entity, EntityUpdateType.ADD);
}
}
/**
* Adds a {@link Entity} to the Region. Note that this does not spawn the Entity, or do any other action other than
* register it to this Region.
*
* By default, this method notifies RegionListeners for this region of the addition.
*
* @param entity The Entity.
* @throws IllegalArgumentException If the Entity does not belong in this Region.
*/
public void addEntity(Entity entity) {
addEntity(entity, true);
}
/**
* Checks if this Region contains the specified Entity.
*
* This method operates in constant time.
*
* @param entity The Entity.
* @return {@code true} if this Region contains the Entity, otherwise {@code false}.
*/
public boolean contains(Entity entity) {
Position position = entity.getPosition();
Set<Entity> local = entities.get(position);
return local != null && local.contains(entity);
}
/**
* Returns whether or not the specified {@link Position} is inside this Region.
*
* @param position The Position.
* @return {@code true} iff the specified Position is inside this Region.
*/
public boolean contains(Position position) {
return coordinates.equals(position.getRegionCoordinates());
}
/**
* Encodes the contents of this Region into a {@link Set} of {@link RegionUpdateMessage}s, to be sent to a client.
*
* @return The Set of RegionUpdateMessages.
*/
public Set<RegionUpdateMessage> encode(int height) {
Set<RegionUpdateMessage> additions = entities.values().stream()
.flatMap(Set::stream) // TODO fix this to work for ground items + projectiles
.filter(entity -> entity instanceof DynamicGameObject && entity.getPosition().getHeight() == height)
.map(entity -> ((GroupableEntity) entity).toUpdateOperation(this, EntityUpdateType.ADD).toMessage())
.collect(Collectors.toSet());
ImmutableSet.Builder<RegionUpdateMessage> builder = ImmutableSet.builder();
builder.addAll(additions).addAll(updates.get(height)).addAll(removedObjects.get(height));
return builder.build();
}
/**
* Gets this Region's {@link RegionCoordinates}.
*
* @return The RegionCoordinates.
*/
public RegionCoordinates getCoordinates() {
return coordinates;
}
/**
* Gets a shallow copy of the {@link Set} of {@link Entity} objects at the specified {@link Position}. The returned
* type will be immutable.
*
* @param position The Position containing the entities.
* @return The Set. Will be immutable.
*/
public Set<Entity> getEntities(Position position) {
Set<Entity> set = entities.get(position);
return (set == null) ? ImmutableSet.of() : ImmutableSet.copyOf(set);
}
/**
* Gets a shallow copy of the {@link Set} of {@link Entity}s with the specified {@link EntityType}(s). The returned
* type will be immutable. Type will be inferred from the call, so ensure that the Entity type and the reference
* correspond, or this method will fail at runtime.
*
* @param position The {@link Position} containing the entities.
* @param types The {@link EntityType}s.
* @return The Set of Entity objects.
*/
public <T extends Entity> Set<T> getEntities(Position position, EntityType... types) {
Set<Entity> local = entities.get(position);
if (local == null) {
return ImmutableSet.of();
}
Set<EntityType> set = new HashSet<>(Arrays.asList(types));
@SuppressWarnings("unchecked")
Set<T> filtered = (Set<T>) local.stream().filter(entity -> set.contains(entity.getEntityType()))
.collect(Collectors.toSet());
return ImmutableSet.copyOf(filtered);
}
/**
* Gets the {@link CollisionMatrix} at the specified height level.
*
* @param height The height level.
* @return The CollisionMatrix.
*/
public CollisionMatrix getMatrix(int height) {
Preconditions.checkElementIndex(height, matrices.length, "Matrix height level must be [0, " + matrices.length
+ "), received " + height + ".");
return matrices[height];
}
/**
* Gets the {@link Set} of {@link RegionUpdateMessage}s that have occurred in the last pulse. This method can
* only be called <strong>once</strong> per pulse.
*
* @param height The height level to get the RegionUpdateMessages for.
* @return The Set of RegionUpdateMessages.
*/
public Set<RegionUpdateMessage> getUpdates(int height) {
Set<RegionUpdateMessage> updates = this.updates.get(height);
Set<RegionUpdateMessage> copy = ImmutableSet.copyOf(updates);
updates.clear();
return copy;
}
/**
* Notifies the {@link RegionListener}s registered to this Region that an update has occurred.
*
* @param entity The {@link Entity} that was updated.
* @param type The {@link EntityUpdateType} that occurred.
*/
public void notifyListeners(Entity entity, EntityUpdateType type) {
listeners.forEach(listener -> listener.execute(this, entity, type));
}
/**
* Removes an {@link Entity} from this Region.
*
* @param entity The Entity.
* @throws IllegalArgumentException If the Entity does not belong in this Region, or if it was never added.
*/
public void removeEntity(Entity entity) {
Position position = entity.getPosition();
checkPosition(position);
Set<Entity> local = entities.get(position);
if (local == null || !local.remove(entity)) {
throw new IllegalArgumentException("Entity (" + entity + ") belongs in (" + this + ") but does not exist.");
}
notifyListeners(entity, EntityUpdateType.REMOVE);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this).add("coordinates", coordinates).toString();
}
/**
* Returns whether or not an Entity of the specified {@link EntityType type} can traverse the tile at the specified
* coordinate pair.
*
* @param position The {@link Position} of the tile.
* @param entity The {@link EntityType}.
* @param direction The {@link Direction} the Entity is approaching from.
* @return {@code true} if the tile at the specified coordinate pair is traversable, {@code false} if not.
*/
public boolean traversable(Position position, EntityType entity, Direction direction) {
CollisionMatrix matrix = matrices[position.getHeight()];
int x = position.getX(), y = position.getY();
return !matrix.untraversable(x % SIZE, y % SIZE, entity, direction);
}
/**
* Checks that the specified {@link Position} is included in this Region.
*
* @param position The position.
* @throws IllegalArgumentException If the specified position is not included in this Region.
*/
private void checkPosition(Position position) {
Preconditions.checkArgument(coordinates.equals(RegionCoordinates.fromPosition(position)),
"Position is not included in this Region.");
}
/**
* Records the specified {@link GroupableEntity} as being updated this pulse.
*
* @param entity The GroupableEntity.
* @param update The {@link EntityUpdateType}.
* @throws UnsupportedOperationException If the specified Entity cannot be operated on in this manner.
*/
private <T extends Entity & GroupableEntity> void record(T entity, EntityUpdateType update) {
UpdateOperation<?> operation = entity.toUpdateOperation(this, update);
RegionUpdateMessage message = operation.toMessage(), inverse = operation.inverse();
int height = entity.getPosition().getHeight();
Set<RegionUpdateMessage> updates = this.updates.get(height);
EntityType type = entity.getEntityType();
if (type == EntityType.STATIC_OBJECT) { // TODO set/clear collision matrix values
if (update == EntityUpdateType.REMOVE) {
removedObjects.get(height).add(message);
} else { // TODO should this really be possible?
removedObjects.get(height).remove(inverse);
}
updates.add(message);
} else {
updates.add(message);
updates.remove(inverse);
}
}
}
| |
package de.peeeq.wurstscript.intermediatelang.interpreter;
import de.peeeq.wurstio.jassinterpreter.DebugPrintError;
import de.peeeq.wurstio.jassinterpreter.InterpreterException;
import de.peeeq.wurstio.jassinterpreter.VarargArray;
import de.peeeq.wurstscript.ast.Annotation;
import de.peeeq.wurstscript.ast.HasModifier;
import de.peeeq.wurstscript.ast.Modifier;
import de.peeeq.wurstscript.gui.WurstGui;
import de.peeeq.wurstscript.intermediatelang.*;
import de.peeeq.wurstscript.jassIm.*;
import de.peeeq.wurstscript.jassinterpreter.ReturnException;
import de.peeeq.wurstscript.jassinterpreter.TestFailException;
import de.peeeq.wurstscript.jassinterpreter.TestSuccessException;
import de.peeeq.wurstscript.parser.WPos;
import de.peeeq.wurstscript.translation.imtranslation.FunctionFlagEnum;
import de.peeeq.wurstscript.translation.imtranslation.ImHelper;
import org.eclipse.jdt.annotation.Nullable;
import java.io.File;
import java.util.Arrays;
import java.util.Optional;
import java.util.stream.Collectors;
public class ILInterpreter implements AbstractInterpreter {
private ImProg prog;
private final ProgramState globalState;
private final TimerMockHandler timerMockHandler = new TimerMockHandler();
public ILInterpreter(ImProg prog, WurstGui gui, Optional<File> mapFile, ProgramState globalState) {
this.prog = prog;
this.globalState = globalState;
globalState.addNativeProvider(new BuiltinFuncs(globalState));
// globalState.addNativeProvider(new NativeFunctions());
}
public ILInterpreter(ImProg prog, WurstGui gui, Optional<File> mapFile, boolean isCompiletime) {
this(prog, gui, mapFile, new ProgramState(gui, prog, isCompiletime));
}
public static LocalState runFunc(ProgramState globalState, ImFunction f, @Nullable Element caller,
ILconst... args) {
if (Thread.currentThread().isInterrupted()) {
throw new InterpreterException(globalState, "Execution interrupted");
}
try {
if (f.hasFlag(FunctionFlagEnum.IS_VARARG)) {
// for vararg functions, rewrite args and put last argument
ILconst[] newArgs = new ILconst[f.getParameters().size()];
if (newArgs.length - 1 >= 0) System.arraycopy(args, 0, newArgs, 0, newArgs.length - 1);
ILconst[] varargArray = new ILconst[1 + args.length - newArgs.length];
for (int i = newArgs.length - 1, j = 0; i < args.length; i++, j++) {
varargArray[j] = args[i];
}
newArgs[newArgs.length - 1] = new VarargArray(varargArray);
args = newArgs;
}
if (f.getParameters().size() != args.length) {
throw new Error("wrong number of parameters when calling func " + f.getName() + "(" +
Arrays.stream(args).map(Object::toString).collect(Collectors.joining(", ")) + ")");
}
for (int i = 0; i < f.getParameters().size(); i++) {
// TODO could do typecheck here
args[i] = adjustTypeOfConstant(args[i], f.getParameters().get(i).getType());
}
if (isCompiletimeNative(f)) {
return runBuiltinFunction(globalState, f, args);
}
if (f.isNative()) {
return runBuiltinFunction(globalState, f, args);
}
LocalState localState = new LocalState();
int i = 0;
for (ImVar p : f.getParameters()) {
localState.setVal(p, args[i]);
i++;
}
if (f.getBody().isEmpty()) {
return localState.setReturnVal(ILconstNull.instance());
} else {
globalState.setLastStatement(f.getBody().get(0));
}
globalState.pushStackframe(f, args, (caller == null ? f : caller).attrTrace().attrErrorPos());
try {
f.getBody().runStatements(globalState, localState);
globalState.popStackframe();
} catch (ReturnException e) {
globalState.popStackframe();
ILconst retVal = e.getVal();
retVal = adjustTypeOfConstant(retVal, f.getReturnType());
return localState.setReturnVal(retVal);
}
if (f.getReturnType() instanceof ImVoid) {
return localState;
}
throw new InterpreterException("function " + f.getName() + " did not return any value...");
} catch (InterpreterException e) {
String msg = buildStacktrace(globalState, e);
e.setStacktrace(msg);
e.setTrace(getTrace(globalState, f));
throw e;
} catch (TestSuccessException | TestFailException | DebugPrintError e) {
throw e;
} catch (Throwable e) {
String msg = buildStacktrace(globalState, e);
de.peeeq.wurstscript.ast.Element trace = getTrace(globalState, f);
throw new InterpreterException(trace, "You encountered a bug in the interpreter: " + e, e).setStacktrace(msg);
}
}
public static de.peeeq.wurstscript.ast.Element getTrace(ProgramState globalState, ImFunction f) {
Element lastStatement = globalState.getLastStatement();
return lastStatement == null ? f.attrTrace() : lastStatement.attrTrace();
}
public static String buildStacktrace(ProgramState globalState, Throwable e) {
StringBuilder err = new StringBuilder();
try {
WPos src = globalState.getLastStatement().attrTrace().attrSource();
err.append("at : ").append(new File(src.getFile()).getName()).append(", line ").append(src.getLine()).append("\n");
} catch (Exception _e) {
// ignore
}
globalState.getStackFrames().appendTo(err);
return err.toString();
}
@SuppressWarnings("null")
private static ILconst adjustTypeOfConstant(@Nullable ILconst retVal, ImType expectedType) {
if (retVal instanceof ILconstInt && isTypeReal(expectedType)) {
ILconstInt retValI = (ILconstInt) retVal;
retVal = new ILconstReal(retValI.getVal());
}
return retVal;
}
private static boolean isTypeReal(ImType t) {
if (t instanceof ImSimpleType) {
ImSimpleType st = (ImSimpleType) t;
return st.getTypename().equals("real");
}
return false;
}
private static LocalState runBuiltinFunction(ProgramState globalState, ImFunction f, ILconst... args) {
StringBuilder errors = new StringBuilder();
for (NativesProvider natives : globalState.getNativeProviders()) {
try {
return new LocalState(natives.invoke(f.getName(), args));
} catch (NoSuchNativeException e) {
errors.append("\n").append(e.getMessage());
// ignore
}
}
globalState.compilationError("function " + f.getName() + " cannot be used from the Wurst interpreter.\n" + errors);
if (f.getReturnType() instanceof ImVoid) {
return new LocalState();
}
ILconst returnValue = ImHelper.defaultValueForComplexType(f.getReturnType()).evaluate(globalState, new LocalState());
return new LocalState(returnValue);
}
private static boolean isCompiletimeNative(ImFunction f) {
if (f.getTrace() instanceof HasModifier) {
HasModifier f2 = (HasModifier) f.getTrace();
for (Modifier m : f2.getModifiers()) {
if (m instanceof Annotation) {
Annotation annotation = (Annotation) m;
if (annotation.getAnnotationType().equals("@compiletimenative")) {
return true;
}
}
}
}
return false;
}
public LocalState executeFunction(String funcName, @Nullable Element trace) {
globalState.resetStackframes();
for (ImFunction f : prog.getFunctions()) {
if (f.getName().equals(funcName)) {
return runFunc(globalState, f, trace);
}
}
throw new Error("no function with name " + funcName + "was found.");
}
public void runVoidFunc(ImFunction f, @Nullable Element trace) {
globalState.resetStackframes();
ILconst[] args = {};
if (!f.getParameters().isEmpty()) {
// this should only happen because of added stacktrace parameter
args = new ILconstString[]{new ILconstString("initial call")};
}
runFunc(globalState, f, trace, args);
}
public Element getLastStatement() {
return globalState.getLastStatement();
}
public void writebackGlobalState(boolean injectObjects) {
globalState.writeBack(injectObjects);
}
public ProgramState getGlobalState() {
return globalState;
}
public void addNativeProvider(NativesProvider np) {
globalState.addNativeProvider(np);
}
public void setProgram(ImProg imProg) {
this.prog = imProg;
this.getGlobalState().setProg(imProg);
globalState.resetStackframes();
}
public ProgramState.StackTrace getStackFrames() {
return globalState.getStackFrames();
}
@Override
public void runFuncRef(ILconstFuncRef obj, @Nullable Element trace) {
runVoidFunc(obj.getFunc(), trace);
}
@Override
public TimerMockHandler getTimerMockHandler() {
return timerMockHandler;
}
@Override
public void completeTimers() {
timerMockHandler.completeTimers();
}
@Override
public ImProg getImProg() {
return prog;
}
@Override
public int getInstanceCount(int val) {
return (int) globalState.getAllObjects()
.stream()
.filter(o -> o.getType().getClassDef().attrTypeId() == val)
.filter(o -> !o.isDestroyed())
.count();
}
@Override
public int getMaxInstanceCount(int val) {
return (int) globalState.getAllObjects()
.stream()
.filter(o -> o.getType().getClassDef().attrTypeId() == val)
.count();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jmeter.extractor.json.render;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSplitPane;
import javax.swing.JTabbedPane;
import javax.swing.JTextArea;
import javax.swing.border.Border;
import javax.swing.border.EmptyBorder;
import org.apache.jmeter.gui.util.JSyntaxTextArea;
import org.apache.jmeter.gui.util.JTextScrollPane;
import org.apache.jmeter.samplers.SampleResult;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jmeter.visualizers.RenderAsJSON;
import org.apache.jmeter.visualizers.ResultRenderer;
import org.apache.jmeter.visualizers.ViewResultsFullVisualizer;
import org.apache.jorphan.gui.GuiUtils;
import org.apache.jorphan.gui.JLabeledTextField;
import org.fife.ui.rsyntaxtextarea.SyntaxConstants;
/**
* Abstract base class for implementation of a ResultsRenderer for a JSON tester
* @since 5.2
*/
abstract class AbstractRenderAsJsonRenderer implements ResultRenderer, ActionListener {
protected static final String NO_MATCH = "NO MATCH"; //$NON-NLS-1$
private static final String TAB_SEPARATOR = " "; //$NON-NLS-1$
private static final String TESTER_COMMAND = "TESTER_COMMAND"; // $NON-NLS-1$
private JPanel jsonWithExtractorPanel;
private JSyntaxTextArea jsonDataField;
private JLabeledTextField expressionField;
private JTextArea resultField;
private JTabbedPane rightSide;
private SampleResult sampleResult;
/** {@inheritDoc} */
@Override
public void clearData() {
this.jsonDataField.setText(""); // $NON-NLS-1$
// don't set empty to keep json path
this.resultField.setText(""); // $NON-NLS-1$
}
/** {@inheritDoc} */
@Override
public void init() {
// Create the panels for the json tab
jsonWithExtractorPanel = createExtractorPanel();
}
/**
* Display the response as text or as rendered HTML. Change the text on the
* button appropriate to the current display.
*
* @param e the ActionEvent being processed
*/
@Override
public void actionPerformed(ActionEvent e) {
String command = e.getActionCommand();
if ((sampleResult != null) && TESTER_COMMAND.equals(command)) {
String response = jsonDataField.getText();
executeTester(response);
}
}
/**
* Launch JSON path engine to parse a input text
* @param textToParse the text that will be parsed
*/
protected void executeTester(String textToParse) {
if (textToParse != null && textToParse.length() > 0
&& this.expressionField.getText().length() > 0) {
this.resultField.setText(process(textToParse));
this.resultField.setCaretPosition(0); // go to first line
}
}
protected String getExpression() {
return expressionField.getText();
}
/*================= internal business =================*/
/** {@inheritDoc} */
@Override
public void renderResult(SampleResult sampleResult) {
String response = ViewResultsFullVisualizer.getResponseAsString(sampleResult);
jsonDataField.setText(response == null ? "" : RenderAsJSON.prettyJSON(response, TAB_SEPARATOR)); //$NON-NLS-1$
jsonDataField.setCaretPosition(0);
}
/** {@inheritDoc} */
@Override
public void setupTabPane() {
// Add json-path tester pane
if (rightSide.indexOfTab(getTabLabel()) < 0) { // $NON-NLS-1$
rightSide.addTab(getTabLabel(), jsonWithExtractorPanel); // $NON-NLS-1$
}
clearData();
}
/**
* @return Extractor panel
*/
private JPanel createExtractorPanel() {
jsonDataField = JSyntaxTextArea.getInstance(50, 80, true);
jsonDataField.setCodeFoldingEnabled(true);
jsonDataField.setEditable(true);
jsonDataField.setBracketMatchingEnabled(false);
jsonDataField.setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_JSON);
jsonDataField.setLanguage(SyntaxConstants.SYNTAX_STYLE_JSON);
jsonDataField.setLineWrap(true);
jsonDataField.setWrapStyleWord(true);
JScrollPane jsonDataPane = JTextScrollPane.getInstance(jsonDataField, true);
jsonDataPane.setPreferredSize(new Dimension(100, 200));
JPanel panel = new JPanel(new BorderLayout(0, 5));
JSplitPane mainSplit = new JSplitPane(JSplitPane.VERTICAL_SPLIT,
jsonDataPane, createTechnologyExtractorTasksPanel());
mainSplit.setDividerLocation(0.6d);
mainSplit.setOneTouchExpandable(true);
panel.add(mainSplit, BorderLayout.CENTER);
return panel;
}
/**
* Create the extractor task pane
*
* @return extractor task pane
*/
private JPanel createTechnologyExtractorTasksPanel() {
JPanel jsonPathActionPanel = new JPanel();
jsonPathActionPanel.setLayout(new BoxLayout(jsonPathActionPanel, BoxLayout.X_AXIS));
Border margin = new EmptyBorder(5, 5, 0, 5);
jsonPathActionPanel.setBorder(margin);
expressionField = new JLabeledTextField(getExpressionLabel()); // $NON-NLS-1$
jsonPathActionPanel.add(expressionField, BorderLayout.WEST);
JButton testerButton = new JButton(getTestButtonLabel()); // $NON-NLS-1$
testerButton.setActionCommand(TESTER_COMMAND);
testerButton.addActionListener(this);
jsonPathActionPanel.add(testerButton, BorderLayout.EAST);
resultField = new JTextArea();
resultField.setEditable(false);
resultField.setLineWrap(true);
resultField.setWrapStyleWord(true);
resultField.setMinimumSize(new Dimension(100, 150));
JPanel jsonPathTasksPanel = new JPanel(new BorderLayout(0, 5));
jsonPathTasksPanel.add(jsonPathActionPanel, BorderLayout.NORTH);
jsonPathTasksPanel.add(GuiUtils.makeScrollPane(resultField), BorderLayout.CENTER);
return jsonPathTasksPanel;
}
/** {@inheritDoc} */
@Override
public synchronized void setRightSide(JTabbedPane side) {
rightSide = side;
}
/** {@inheritDoc} */
@Override
public synchronized void setSamplerResult(Object userObject) {
if (userObject instanceof SampleResult) {
sampleResult = (SampleResult) userObject;
}
}
/** {@inheritDoc} */
@Override
public void setLastSelectedTab(int index) {
// nothing to do
}
/** {@inheritDoc} */
@Override
public void renderImage(SampleResult sampleResult) {
clearData();
jsonDataField.setText(JMeterUtils.getResString("render_no_text")); // $NON-NLS-1$
}
/** {@inheritDoc} */
@Override
public void setBackgroundColor(Color backGround) {
// NOOP
}
/**
* @return Tab label
*/
protected abstract String getTabLabel();
/**
* @return Test button label
*/
protected abstract String getTestButtonLabel();
/**
* @return The label for the technology expression
*/
protected abstract String getExpressionLabel();
/**
* @param textToParse String of the response to process
* @return the extracted values using the technology
*/
protected abstract String process(String textToParse);
/**
* @return the rightSide
*/
protected synchronized JTabbedPane getRightSide() {
return rightSide;
}
/**
* @return the jsonWithExtractorPanel
*/
protected JPanel getJsonWithExtractorPanel() {
return jsonWithExtractorPanel;
}
/**
* @return the jsonDataField
*/
protected JSyntaxTextArea getJsonDataField() {
return jsonDataField;
}
/**
* @return the expressionField
*/
protected JLabeledTextField getExpressionField() {
return expressionField;
}
/**
* @return the resultField
*/
protected JTextArea getResultField() {
return resultField;
}
}
| |
/*
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kra.meeting;
import org.jmock.Expectations;
import org.jmock.Mockery;
import org.jmock.integration.junit4.JUnit4Mockery;
import org.jmock.lib.concurrent.Synchroniser;
import org.junit.Assert;
import org.junit.Test;
import org.kuali.coeus.common.committee.impl.bo.CommitteeMembershipRole;
import org.kuali.coeus.common.committee.impl.bo.MembershipRole;
import org.kuali.coeus.common.committee.impl.meeting.CommScheduleActItemBase;
import org.kuali.coeus.common.committee.impl.meeting.MemberAbsentBean;
import org.kuali.coeus.common.committee.impl.meeting.MemberPresentBean;
import org.kuali.coeus.common.committee.impl.meeting.MinuteEntryType;
import org.kuali.coeus.common.committee.impl.web.struts.form.schedule.Time12HrFmt;
import org.kuali.coeus.common.committee.impl.web.struts.form.schedule.Time12HrFmt.MERIDIEM;
import org.kuali.kra.committee.bo.Committee;
import org.kuali.kra.committee.bo.CommitteeMembership;
import org.kuali.kra.committee.bo.CommitteeSchedule;
import org.kuali.kra.irb.Protocol;
import org.kuali.kra.irb.actions.submit.ProtocolSubmission;
import org.kuali.kra.irb.correspondence.ProtocolCorrespondence;
import org.kuali.kra.irb.personnel.ProtocolPerson;
import org.kuali.kra.test.infrastructure.KcIntegrationTestBase;
import org.kuali.rice.core.api.datetime.DateTimeService;
import org.kuali.rice.krad.service.BusinessObjectService;
import org.kuali.rice.krad.service.SequenceAccessorService;
import java.sql.Date;
import java.sql.Timestamp;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class MeetingServiceTest extends KcIntegrationTestBase {
private Mockery context = new JUnit4Mockery() {{ setThreadingPolicy(new Synchroniser()); }};
DateFormat dateFormat = new SimpleDateFormat("MM/dd/yyyy");
private static final String PERSON_ID = "jtester";
private static final String PERSON_ID_1 = "1";
private static final String PERSON_ID_2 = "2";
private static final String PERSON_ID_3 = "3";
private static final String PERSON_NAME_1 = "test 1";
private static final String PERSON_NAME_2 = "test 2";
private static final String PERSON_NAME_3 = "test 3";
private static final Integer ROLODEX_ID = 1746;
private static final String MEMBERSHIP_TYPE_CD = "1";
private static final Date TERM_START_DATE = Date.valueOf("2009-01-01");
private static final Date TERM_END_DATE = Date.valueOf("2009-01-31");
private static final Date SCHEDULE_DATE = Date.valueOf("2009-01-15");
private static final String MEMBERSHIP_ROLE_CD_1 = "1";
private static final String MEMBERSHIP_ROLE_CD_4 = "4";
private static final Date ROLE_START_DATE = Date.valueOf("2009-01-10");
private static final Date ROLE_END_DATE = Date.valueOf("2009-01-20");
private CommitteeSchedule getCommitteeSchedule() throws Exception {
CommitteeSchedule committeeSchedule = new CommitteeSchedule();
committeeSchedule.setId(1L);
committeeSchedule.setCommittee(createCommittee("test", "committeeName"));
committeeSchedule.setScheduledDate(new Date(dateFormat.parse("10/01/2009").getTime()));
committeeSchedule.setTime(new Timestamp(committeeSchedule.getScheduledDate().getTime()));
committeeSchedule.setPlace("iu - poplar");
committeeSchedule.setScheduleStatusCode(1);
return committeeSchedule;
}
private Committee createCommittee(String committeeId, String committeeName) {
Committee committee = new Committee();
committee.setCommitteeId(committeeId);
committee.setCommitteeName(committeeName);
committee.setMaxProtocols(5);
return committee;
}
private List<ScheduleAgenda> getAgendas() throws Exception {
List<ScheduleAgenda> scheduleAgendas = new ArrayList<ScheduleAgenda>();
ScheduleAgenda scheduleAgenda = new ScheduleAgenda();
scheduleAgenda.setScheduleIdFk(1L);
scheduleAgenda.setAgendaNumber(3);
scheduleAgenda.setAgendaName("test");
scheduleAgenda.setScheduleAgendaId(3L);
scheduleAgenda.setCreateTimestamp(new Timestamp(new Date(dateFormat.parse("10/08/2009").getTime()).getTime()));
scheduleAgendas.add(scheduleAgenda);
scheduleAgenda = new ScheduleAgenda();
scheduleAgenda.setScheduleIdFk(1L);
scheduleAgenda.setAgendaNumber(2);
scheduleAgenda.setAgendaName("test");
scheduleAgenda.setScheduleAgendaId(2L);
scheduleAgenda.setCreateTimestamp(new Timestamp(new Date(dateFormat.parse("10/05/2009").getTime()).getTime()));
scheduleAgendas.add(scheduleAgenda);
scheduleAgenda = new ScheduleAgenda();
scheduleAgenda.setScheduleIdFk(1L);
scheduleAgenda.setAgendaNumber(1);
scheduleAgenda.setAgendaName("test");
scheduleAgenda.setScheduleAgendaId(1L);
scheduleAgenda.setCreateTimestamp(new Timestamp(new Date(dateFormat.parse("10/02/2009").getTime()).getTime()));
scheduleAgendas.add(scheduleAgenda);
return scheduleAgendas;
}
@Test
public void testSaveCommitteeSchedule() throws Exception {
MeetingServiceImpl meetingService = new MeetingServiceImpl();
final BusinessObjectService businessObjectService = context.mock(BusinessObjectService.class);
final CommitteeSchedule committeeSchedule = getCommitteeSchedule();
committeeSchedule.setEndTime(committeeSchedule.getTime());
committeeSchedule.setStartTime(committeeSchedule.getTime());
committeeSchedule.setViewTime(new Time12HrFmt("01:00", MERIDIEM.PM));
committeeSchedule.setViewStartTime(new Time12HrFmt("01:00", MERIDIEM.PM));
committeeSchedule.setViewEndTime(new Time12HrFmt("02:00", MERIDIEM.PM));
final List<CommScheduleActItem> deletedOtherActions = new ArrayList<CommScheduleActItem>();
CommScheduleActItem actItem = new CommScheduleActItem();
deletedOtherActions.add(actItem);
context.checking(new Expectations() {
{
one(businessObjectService).delete(deletedOtherActions);
one(businessObjectService).save(committeeSchedule);
}
});
meetingService.setBusinessObjectService(businessObjectService);
meetingService.saveMeetingDetails(committeeSchedule, deletedOtherActions);
Assert.assertEquals(committeeSchedule.getParentCommittee().getCommitteeId(), "test");
Assert.assertEquals(committeeSchedule.getParentCommittee().getCommitteeName(), "committeeName");
Assert.assertEquals(committeeSchedule.getPlace(), "iu - poplar");
Assert.assertEquals(committeeSchedule.getScheduledDate(), new Date(dateFormat.parse("10/01/2009").getTime()));
Assert.assertEquals(committeeSchedule.getMaxProtocols(), new Integer(5));
Assert.assertEquals(committeeSchedule.getId(), new Long(1));
Assert.assertNotSame(committeeSchedule.getTime(), new Timestamp(committeeSchedule.getScheduledDate().getTime()));
Assert.assertEquals(committeeSchedule.getScheduleStatusCode(), new Integer(1));
// TODO : need to set up protocolsubmission/otheractions/attendances/minutes for more testing
// to check whetehr it is really persisted in DB ok or assume the mock 'save' and 'delete' are ok ?
}
@Test
public void testgetStandardReviewComment() throws Exception {
MeetingServiceImpl meetingService = new MeetingServiceImpl();
final BusinessObjectService businessObjectService = context.mock(BusinessObjectService.class);
final ProtocolContingency protocolContingency = new ProtocolContingency();
protocolContingency.setProtocolContingencyCode("1");
protocolContingency.setDescription("Protocol Contingency comment #1");
context.checking(new Expectations() {
{
Map<String, String> queryMap = new HashMap<String, String>();
queryMap.put("protocolContingencyCode", "1");
one(businessObjectService).findByPrimaryKey(ProtocolContingency.class, queryMap);
will(returnValue(protocolContingency));
}
});
meetingService.setBusinessObjectService(businessObjectService);
String description = meetingService.getStandardReviewComment("1");
Assert.assertEquals(description, "Protocol Contingency comment #1");
context.checking(new Expectations() {
{
Map<String, String> queryMap = new HashMap<String, String>();
queryMap.put("protocolContingencyCode", "2");
one(businessObjectService).findByPrimaryKey(ProtocolContingency.class, queryMap);
will(returnValue(null));
}
});
description = meetingService.getStandardReviewComment("2");
Assert.assertTrue(description == null);
}
@Test
public void testAddOtherAction() throws Exception {
MeetingServiceImpl meetingService = new MeetingServiceImpl();
final SequenceAccessorService sequenceAccessorService = context.mock(SequenceAccessorService.class);
final CommScheduleActItem newOtherAction = getOtherActionItem(1L, "1", 0);
newOtherAction.setScheduleActItemTypeCode("1");
context.checking(new Expectations() {
{
one(sequenceAccessorService).getNextAvailableSequenceNumber("SEQ_MEETING_ID", newOtherAction.getClass());
will(returnValue(newOtherAction.getCommScheduleActItemsId()));
}
});
meetingService.setSequenceAccessorService(sequenceAccessorService);
CommitteeSchedule committeeSchedule = new CommitteeSchedule();
committeeSchedule.setCommScheduleActItems(new ArrayList<CommScheduleActItemBase>());
meetingService.addOtherAction(newOtherAction, committeeSchedule);
Assert.assertTrue(committeeSchedule.getCommScheduleActItems().size() == 1);
Assert.assertEquals(committeeSchedule.getCommScheduleActItems().get(0).getScheduleActItemTypeCode(), "1");
Assert.assertEquals(committeeSchedule.getCommScheduleActItems().get(0).getActionItemNumber(), new Integer(1));
}
@Test
public void testDeleteOtherAction() throws Exception {
MeetingServiceImpl meetingService = new MeetingServiceImpl();
List<CommScheduleActItem> items = new ArrayList<CommScheduleActItem>();
List<CommScheduleActItem> deletedItems = new ArrayList<CommScheduleActItem>();
CommScheduleActItem otherAction1 = getOtherActionItem(1L, "1", 1);
items.add(otherAction1);
CommScheduleActItem otherAction2 = getOtherActionItem(2L, "2", 2);
items.add(otherAction2);
CommitteeSchedule committeeSchedule = new CommitteeSchedule();
committeeSchedule.setCommScheduleActItems((List) items);
meetingService.deleteOtherAction(committeeSchedule, 1, (List) deletedItems);
Assert.assertTrue(committeeSchedule.getCommScheduleActItems().size() == 1);
Assert.assertEquals(committeeSchedule.getCommScheduleActItems().get(0).getScheduleActItemTypeCode(), "1");
Assert.assertEquals(committeeSchedule.getCommScheduleActItems().get(0).getActionItemNumber(), new Integer(1));
Assert.assertTrue(deletedItems.size() == 1);
Assert.assertEquals(deletedItems.get(0).getScheduleActItemTypeCode(), "2");
Assert.assertEquals(deletedItems.get(0).getActionItemNumber(), new Integer(2));
}
private CommScheduleActItem getOtherActionItem(Long commScheduleActItemsId, String scheduleActItemTypeCode, int actionItemNumber) {
CommScheduleActItem otherAction = new CommScheduleActItem() {
@Override
public void refreshReferenceObject(String referenceObjectName) {
if (referenceObjectName.equals("scheduleActItemType")) {
org.kuali.coeus.common.committee.impl.meeting.ScheduleActItemType scheduleActItemType = new org.kuali.coeus.common.committee.impl.meeting.ScheduleActItemType();
scheduleActItemType.setScheduleActItemTypeCode(this.getScheduleActItemTypeCode());
}
}
};
otherAction.setActionItemNumber(actionItemNumber);
otherAction.setScheduleActItemTypeCode(scheduleActItemTypeCode);
otherAction.setCommScheduleActItemsId(commScheduleActItemsId);
return otherAction;
}
@Test
public void testMarkAbsent() throws Exception {
MeetingServiceImpl meetingService = new MeetingServiceImpl();
List<MemberPresentBean> memberPresentBeans = new ArrayList<MemberPresentBean>();
List<MemberAbsentBean> memberAbsentBeans = new ArrayList<MemberAbsentBean>();
memberPresentBeans.add(getMemberPresentBean(PERSON_ID_1, PERSON_NAME_1));
memberPresentBeans.add(getMemberPresentBean(PERSON_ID_2, PERSON_NAME_2));
memberPresentBeans.add(getMemberPresentBean(PERSON_ID_3, PERSON_NAME_3));
meetingService.markAbsent(memberPresentBeans, memberAbsentBeans, 1);
Assert.assertTrue(memberPresentBeans.size() == 2);
Assert.assertTrue(memberAbsentBeans.size() == 1);
Assert.assertEquals(memberPresentBeans.get(0).getAttendance().getPersonId(), PERSON_ID_1);
Assert.assertEquals(memberPresentBeans.get(0).getAttendance().getPersonName(), PERSON_NAME_1);
Assert.assertEquals(memberPresentBeans.get(1).getAttendance().getPersonId(), PERSON_ID_3);
Assert.assertEquals(memberPresentBeans.get(1).getAttendance().getPersonName(), PERSON_NAME_3);
Assert.assertEquals(memberAbsentBeans.get(0).getAttendance().getPersonId(), PERSON_ID_2);
Assert.assertEquals(memberAbsentBeans.get(0).getAttendance().getPersonName(), PERSON_NAME_2);
}
private MemberPresentBean getMemberPresentBean(String personId, String personName) {
MemberPresentBean memberPresentBean = new MemberPresentBean();
CommitteeScheduleAttendance attendance = new CommitteeScheduleAttendance();
attendance.setPersonId(personId);
attendance.setPersonName(personName);
memberPresentBean.setAttendance(attendance);
return memberPresentBean;
}
@Test
public void testPresentVoting() throws Exception {
MeetingServiceImpl meetingService = new MeetingServiceImpl();
List<MemberPresentBean> memberPresentBeans = new ArrayList<MemberPresentBean>();
List<MemberAbsentBean> memberAbsentBeans = new ArrayList<MemberAbsentBean>();
memberAbsentBeans.add(getMemberAbsentBean(PERSON_ID_1, PERSON_NAME_1));
memberAbsentBeans.add(getMemberAbsentBean(PERSON_ID_2, PERSON_NAME_2));
memberAbsentBeans.add(getMemberAbsentBean(PERSON_ID_3, PERSON_NAME_3));
CommitteeSchedule committeeSchedule = new CommitteeSchedule();
committeeSchedule.setCommittee(getCommitteeWithMember());
// TODO : test if "alternate for" role ?
committeeSchedule.setScheduledDate(SCHEDULE_DATE);
MeetingHelper meetingHelper = new MeetingHelper(new MeetingForm());
meetingHelper.setCommitteeSchedule(committeeSchedule);
meetingHelper.setMemberAbsentBeans(memberAbsentBeans);
meetingHelper.setMemberPresentBeans(memberPresentBeans);
meetingService.presentVoting(meetingHelper, 1);
Assert.assertTrue(memberPresentBeans.size() == 1);
Assert.assertTrue(memberAbsentBeans.size() == 2);
Assert.assertEquals(memberAbsentBeans.get(0).getAttendance().getPersonId(), PERSON_ID_1);
Assert.assertEquals(memberAbsentBeans.get(0).getAttendance().getPersonName(), PERSON_NAME_1);
Assert.assertEquals(memberAbsentBeans.get(1).getAttendance().getPersonId(), PERSON_ID_3);
Assert.assertEquals(memberAbsentBeans.get(1).getAttendance().getPersonName(), PERSON_NAME_3);
Assert.assertEquals(memberPresentBeans.get(0).getAttendance().getPersonId(), PERSON_ID_2);
Assert.assertEquals(memberPresentBeans.get(0).getAttendance().getPersonName(), PERSON_NAME_2);
}
@Test
public void testPresentOther() throws Exception {
MeetingServiceImpl meetingService = new MeetingServiceImpl();
List<OtherPresentBean> otherPresentBeans = new ArrayList<OtherPresentBean>();
List<MemberAbsentBean> memberAbsentBeans = new ArrayList<MemberAbsentBean>();
memberAbsentBeans.add(getMemberAbsentBean(PERSON_ID_1, PERSON_NAME_1));
memberAbsentBeans.add(getMemberAbsentBean(PERSON_ID_2, PERSON_NAME_2));
memberAbsentBeans.add(getMemberAbsentBean(PERSON_ID_3, PERSON_NAME_3));
CommitteeSchedule committeeSchedule = new CommitteeSchedule();
committeeSchedule.setCommittee(getCommitteeWithMember());
committeeSchedule.setScheduledDate(SCHEDULE_DATE);
MeetingHelper meetingHelper = new MeetingHelper(new MeetingForm());
meetingHelper.setCommitteeSchedule(committeeSchedule);
meetingHelper.setMemberAbsentBeans(memberAbsentBeans);
meetingHelper.setOtherPresentBeans((List) otherPresentBeans);
meetingService.presentOther(meetingHelper, 1);
Assert.assertTrue(otherPresentBeans.size() == 1);
Assert.assertTrue(memberAbsentBeans.size() == 2);
Assert.assertEquals(memberAbsentBeans.get(0).getAttendance().getPersonId(), PERSON_ID_1);
Assert.assertEquals(memberAbsentBeans.get(0).getAttendance().getPersonName(), PERSON_NAME_1);
Assert.assertEquals(memberAbsentBeans.get(1).getAttendance().getPersonId(), PERSON_ID_3);
Assert.assertEquals(memberAbsentBeans.get(1).getAttendance().getPersonName(), PERSON_NAME_3);
Assert.assertEquals(otherPresentBeans.get(0).getAttendance().getPersonId(), PERSON_ID_2);
Assert.assertEquals(otherPresentBeans.get(0).getAttendance().getPersonName(), PERSON_NAME_2);
}
private CommitteeMembership getMembership(String personID, Integer rolodexID, String membershipTypeCode, Date termStartDate,
Date termEndDate) {
CommitteeMembership committeeMembership = new CommitteeMembership();
committeeMembership.setPersonId(personID);
committeeMembership.setRolodexId(rolodexID);
committeeMembership.setMembershipTypeCode(membershipTypeCode);
committeeMembership.setTermStartDate(termStartDate);
committeeMembership.setTermEndDate(termEndDate);
return committeeMembership;
}
private MemberAbsentBean getMemberAbsentBean(String personId, String personName) {
MemberAbsentBean memberAbsentBean = new MemberAbsentBean();
CommitteeScheduleAttendance attendance = new CommitteeScheduleAttendance();
attendance.setPersonId(personId);
attendance.setPersonName(personName);
memberAbsentBean.setAttendance(attendance);
return memberAbsentBean;
}
private CommitteeMembershipRole getRole(String membershipRoleCode, Date startDate, Date endDate) {
CommitteeMembershipRole committeeMembershipRole = new CommitteeMembershipRole();
committeeMembershipRole.setMembershipRoleCode(membershipRoleCode);
committeeMembershipRole.setStartDate(startDate);
committeeMembershipRole.setEndDate(endDate);
MembershipRole membershipRole = new MembershipRole();
membershipRole.setMembershipRoleCode(membershipRoleCode);
membershipRole.setDescription("Role " + membershipRoleCode);
committeeMembershipRole.setMembershipRole(membershipRole);
return committeeMembershipRole;
}
@Test
public void testAddOtherPresent() throws Exception {
MeetingServiceImpl meetingService = new MeetingServiceImpl();
OtherPresentBean newOtherPresentBean = getOtherPresentBean(PERSON_ID_1, PERSON_NAME_1, true);
CommitteeSchedule committeeSchedule = new CommitteeSchedule();
List<OtherPresentBean> otherPresentBeans = new ArrayList<OtherPresentBean>();
List<MemberAbsentBean> memberAbsentBeans = new ArrayList<MemberAbsentBean>();
memberAbsentBeans.add(getMemberAbsentBean(PERSON_ID_1, PERSON_NAME_1));
committeeSchedule.setCommittee(getCommitteeWithMember());
committeeSchedule.setScheduledDate(SCHEDULE_DATE);
MeetingHelper meetingHelper = new MeetingHelper(new MeetingForm());
meetingHelper.setMemberAbsentBeans(memberAbsentBeans);
meetingHelper.setOtherPresentBeans((List) otherPresentBeans);
meetingHelper.setNewOtherPresentBean(newOtherPresentBean);
meetingHelper.setCommitteeSchedule(committeeSchedule);
meetingService.addOtherPresent(meetingHelper);
Assert.assertTrue(otherPresentBeans.size() == 1);
Assert.assertTrue(memberAbsentBeans.size() == 0);
Assert.assertEquals(otherPresentBeans.get(0).getAttendance().getPersonId(), PERSON_ID_1);
Assert.assertEquals(otherPresentBeans.get(0).getAttendance().getPersonName(), PERSON_NAME_1);
}
private Committee getCommitteeWithMember() {
Committee committee = new Committee();
CommitteeMembership committeeMembership = getMembership(PERSON_ID_1, null, MEMBERSHIP_TYPE_CD, TERM_START_DATE,
TERM_END_DATE);
committeeMembership.getMembershipRoles().add(getRole(MEMBERSHIP_ROLE_CD_1, ROLE_START_DATE, ROLE_END_DATE));
committee.getCommitteeMemberships().add(committeeMembership);
committee.getCommitteeMemberships()
.add(getMembership(null, ROLODEX_ID, MEMBERSHIP_TYPE_CD, TERM_START_DATE, TERM_END_DATE));
committeeMembership.getMembershipRoles().add(getRole(MEMBERSHIP_ROLE_CD_4, ROLE_START_DATE, ROLE_END_DATE));
return committee;
}
@Test
public void testdeleteOtherPresent() throws Exception {
MeetingServiceImpl meetingService = new MeetingServiceImpl();
CommitteeSchedule committeeSchedule = new CommitteeSchedule();
List<OtherPresentBean> otherPresentBeans = new ArrayList<OtherPresentBean>();
otherPresentBeans.add(getOtherPresentBean(PERSON_ID_1, PERSON_NAME_1, true));
otherPresentBeans.add(getOtherPresentBean(PERSON_ID_3, PERSON_NAME_3, false));
List<MemberAbsentBean> memberAbsentBeans = new ArrayList<MemberAbsentBean>();
memberAbsentBeans.add(getMemberAbsentBean(PERSON_ID_2, PERSON_NAME_2));
committeeSchedule.setCommittee(getCommitteeWithMember());
committeeSchedule.setScheduledDate(SCHEDULE_DATE);
MeetingHelper meetingHelper = new MeetingHelper(new MeetingForm());
meetingHelper.setMemberAbsentBeans(memberAbsentBeans);
meetingHelper.setOtherPresentBeans((List) otherPresentBeans);
meetingHelper.setCommitteeSchedule(committeeSchedule);
meetingService.deleteOtherPresent(meetingHelper, 0);
Assert.assertTrue(otherPresentBeans.size() == 1);
Assert.assertTrue(memberAbsentBeans.size() == 2);
Assert.assertEquals(otherPresentBeans.get(0).getAttendance().getPersonId(), PERSON_ID_3);
Assert.assertEquals(otherPresentBeans.get(0).getAttendance().getPersonName(), PERSON_NAME_3);
Assert.assertEquals(memberAbsentBeans.get(0).getAttendance().getPersonId(), PERSON_ID_2);
Assert.assertEquals(memberAbsentBeans.get(0).getAttendance().getPersonName(), PERSON_NAME_2);
Assert.assertEquals(memberAbsentBeans.get(1).getAttendance().getPersonId(), PERSON_ID_1);
Assert.assertEquals(memberAbsentBeans.get(1).getAttendance().getPersonName(), PERSON_NAME_1);
}
private OtherPresentBean getOtherPresentBean(String personId, String personName, boolean isMember) {
OtherPresentBean otherPresentBean = new OtherPresentBean();
CommitteeScheduleAttendance attendance = new CommitteeScheduleAttendance();
attendance.setPersonId(personId);
attendance.setPersonName(personName);
otherPresentBean.setAttendance(attendance);
otherPresentBean.setMember(isMember);
return otherPresentBean;
}
@Test
public void testAddCommitteeScheduleMinute() throws Exception {
MeetingServiceImpl meetingService = new MeetingServiceImpl();
final DateTimeService dateTimeService = context.mock(DateTimeService.class);
context.checking(new Expectations() {{
one(dateTimeService).getCurrentTimestamp();
will(returnValue(new Timestamp(System.currentTimeMillis())));
}});
meetingService.setDateTimeService(dateTimeService);
CommitteeScheduleMinute newCommitteeScheduleMinute = getCommitteeScheduleMinute(1L, "1", 1, 2L);
CommitteeSchedule committeeSchedule = new CommitteeSchedule();
committeeSchedule.setId(1L);
committeeSchedule.setCommitteeScheduleMinutes(new ArrayList<CommitteeScheduleMinute>());
List<ProtocolSubmission> protocolSubmissions = new ArrayList<ProtocolSubmission>();
protocolSubmissions.add(getProtocolSubmission(1L));
committeeSchedule.setProtocolSubmissions(protocolSubmissions);
MeetingHelper meetingHelper = new MeetingHelper(new MeetingForm());
meetingHelper.setNewCommitteeScheduleMinute(newCommitteeScheduleMinute);
meetingHelper.setCommitteeSchedule(committeeSchedule);
meetingService.addCommitteeScheduleMinute(meetingHelper);
Assert.assertTrue(committeeSchedule.getCommitteeScheduleMinutes().size() == 1);
Assert.assertEquals(committeeSchedule.getCommitteeScheduleMinutes().get(0).getMinuteEntryTypeCode(), "1");
Assert.assertEquals(committeeSchedule.getCommitteeScheduleMinutes().get(0).getEntryNumber(), new Integer(1));
}
@Test
public void testDeleteCommitteeScheduleMinute() throws Exception {
MeetingServiceImpl meetingService = new MeetingServiceImpl();
List<CommitteeScheduleMinute> items = new ArrayList<CommitteeScheduleMinute>();
List<CommitteeScheduleMinute> deletedItems = new ArrayList<CommitteeScheduleMinute>();
CommitteeScheduleMinute minute1 = getCommitteeScheduleMinute(1L, "1", 1, 3L);
items.add(minute1);
CommitteeScheduleMinute minute2 = getCommitteeScheduleMinute(2L, "2", 2, 3L);
items.add(minute2);
CommitteeSchedule committeeSchedule = new CommitteeSchedule();
committeeSchedule.setCommitteeScheduleMinutes(items);
meetingService.deleteCommitteeScheduleMinute(committeeSchedule, deletedItems, 1);
Assert.assertTrue(committeeSchedule.getCommitteeScheduleMinutes().size() == 1);
Assert.assertEquals(committeeSchedule.getCommitteeScheduleMinutes().get(0).getMinuteEntryTypeCode(), "1");
Assert.assertEquals(committeeSchedule.getCommitteeScheduleMinutes().get(0).getEntryNumber(), new Integer(1));
Assert.assertTrue(deletedItems.size() == 1);
Assert.assertEquals(deletedItems.get(0).getMinuteEntryTypeCode(), "2");
Assert.assertEquals(deletedItems.get(0).getEntryNumber(), new Integer(2));
}
private CommitteeScheduleMinute getCommitteeScheduleMinute(Long commScheduleMinutesId, String minuteEntryTypeCode,
int entryNumber, Long submissionId) {
CommitteeScheduleMinute committeeScheduleMinute = new CommitteeScheduleMinute() {
@Override
public void refreshReferenceObject(String referenceObjectName) {
if (referenceObjectName.equals("minuteEntryType")) {
MinuteEntryType minuteEntryType = new MinuteEntryType();
minuteEntryType.setMinuteEntryTypeCode(this.getMinuteEntryTypeCode());
}
}
};
ProtocolSubmission submission = getProtocolSubmission(submissionId);
committeeScheduleMinute.setProtocol(submission.getProtocol());
committeeScheduleMinute.setEntryNumber(entryNumber);
committeeScheduleMinute.setMinuteEntryTypeCode(minuteEntryTypeCode);
committeeScheduleMinute.setCommScheduleMinutesId(commScheduleMinutesId);
return committeeScheduleMinute;
}
@Test
public void testPopulateFormHelper() throws Exception {
MeetingServiceImpl meetingService = new MeetingServiceImpl();
CommitteeSchedule committeeSchedule = new CommitteeSchedule();
Committee committee = new Committee();
committee.setCommitteeId("1");
committee.setCommitteeName("Test Committee");
CommitteeMembership committeeMembership = getMembership(PERSON_ID, null, MEMBERSHIP_TYPE_CD, TERM_START_DATE, TERM_END_DATE);
committeeMembership.getMembershipRoles().add(getRole(MEMBERSHIP_ROLE_CD_1, ROLE_START_DATE, ROLE_END_DATE));
committee.getCommitteeMemberships().add(committeeMembership);
committee.getCommitteeMemberships()
.add(getMembership(null, ROLODEX_ID, MEMBERSHIP_TYPE_CD, TERM_START_DATE, TERM_END_DATE));
committeeMembership.getMembershipRoles().add(getRole(MEMBERSHIP_ROLE_CD_4, ROLE_START_DATE, ROLE_END_DATE));
committeeSchedule.setCommittee(committee);
// TODO : test if "alternate for" role ?
committeeSchedule.setScheduledDate(SCHEDULE_DATE);
committeeSchedule.setId(1L);
List<ProtocolSubmission> protocolSubmissions = new ArrayList<ProtocolSubmission>();
protocolSubmissions.add(getProtocolSubmission(1L));
protocolSubmissions.add(getProtocolSubmission(2L));
committeeSchedule.setProtocolSubmissions(protocolSubmissions);
MeetingHelper meetingHelper = new MeetingHelper(new MeetingForm());
final BusinessObjectService businessObjectService = context.mock(BusinessObjectService.class);
final List<ScheduleAgenda> agendas = getAgendas();
final List<CommScheduleMinuteDoc> minuteDocs = getMinuteDocs();
final List<ProtocolCorrespondence> correspondences = getCorrespondences();
context.checking(new Expectations() {
{
Map queryMap = new HashMap();
queryMap.put("scheduleIdFk", 1L);
one(businessObjectService).findMatchingOrderBy(ScheduleAgenda.class, queryMap, "createTimestamp", true);
;
will(returnValue(agendas));
one(businessObjectService).findMatchingOrderBy(ScheduleAgenda.class, queryMap, "createTimestamp", true);
;
will(returnValue(agendas));
one(businessObjectService).findMatchingOrderBy(CommScheduleMinuteDoc.class, queryMap, "createTimestamp", true);
;
will(returnValue(minuteDocs));
Map queryMap1 = new HashMap();
queryMap1.put("protocolId", 1L);
one(businessObjectService).findMatching(ProtocolCorrespondence.class, queryMap1);
;
will(returnValue(correspondences));
}
});
meetingService.setBusinessObjectService(businessObjectService);
meetingService.populateFormHelper(meetingHelper, committeeSchedule, 1);
Assert.assertTrue(meetingHelper.getMemberAbsentBeans().size() == 2);
Assert.assertTrue(meetingHelper.getProtocolSubmittedBeans().size() == 2);
Assert.assertTrue(meetingHelper.getMemberPresentBeans().size() == 0);
Assert.assertTrue(meetingHelper.getOtherPresentBeans().size() == 0);
Assert.assertEquals(meetingHelper.getTabLabel(), "Test Committee #1 Meeting " + dateFormat.format(SCHEDULE_DATE));
Assert.assertTrue(meetingHelper.getMinuteDocs().size() == 1);
Assert.assertTrue(meetingHelper.getCorrespondences().size() == 1);
Assert.assertEquals(meetingHelper.getMinuteDocs().get(0).getScheduleIdFk().toString(), "1");
Assert.assertEquals(meetingHelper.getCorrespondences().get(0).getProtocolId().toString(), "1");
}
private List<CommScheduleMinuteDoc> getMinuteDocs() {
List<CommScheduleMinuteDoc> minuteDocs = new ArrayList<CommScheduleMinuteDoc>();
CommScheduleMinuteDoc minuteDoc = new CommScheduleMinuteDoc();
minuteDoc.setScheduleIdFk(1L);
minuteDocs.add(minuteDoc);
return minuteDocs;
}
private List<ProtocolCorrespondence> getCorrespondences() {
List<ProtocolCorrespondence> correspondences = new ArrayList<ProtocolCorrespondence>();
ProtocolCorrespondence correspondence = new ProtocolCorrespondence();
correspondence.setProtocolId(1L);
correspondences.add(correspondence);
return correspondences;
}
private ProtocolSubmission getProtocolSubmission(Long submissionId) {
ProtocolSubmission protocolSubmission = new ProtocolSubmission() {
@Override
public void refreshReferenceObject(String referenceObjectName) {
// do nothing
}
};
protocolSubmission.setProtocol(getProtocol());
protocolSubmission.setSubmissionId(submissionId);
protocolSubmission.setProtocolId(1L);
return protocolSubmission;
}
private Protocol getProtocol() {
Protocol protocol = new Protocol() {
@Override
public void refreshReferenceObject(String referenceObjectName) {
// do nothing
}
@Override
public ProtocolPerson getPrincipalInvestigator() {
ProtocolPerson protocolPerson = new ProtocolPerson();
protocolPerson.setPersonId(PERSON_ID_1);
protocolPerson.setPersonName(PERSON_NAME_1);
return protocolPerson;
}
};
return protocol;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.